1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2022 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "tree-hash-traits.h"
57 #include "omp-general.h"
58 #include "omp-low.h"
59 #include "gimple-low.h"
60 #include "gomp-constants.h"
61 #include "splay-tree.h"
62 #include "gimple-walk.h"
63 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "dbgcnt.h"
69 #include "omp-offload.h"
70 #include "context.h"
71 #include "tree-nested.h"
72
73 /* Hash set of poisoned variables in a bind expr. */
74 static hash_set<tree> *asan_poisoned_variables = NULL;
75
76 enum gimplify_omp_var_data
77 {
78 GOVD_SEEN = 0x000001,
79 GOVD_EXPLICIT = 0x000002,
80 GOVD_SHARED = 0x000004,
81 GOVD_PRIVATE = 0x000008,
82 GOVD_FIRSTPRIVATE = 0x000010,
83 GOVD_LASTPRIVATE = 0x000020,
84 GOVD_REDUCTION = 0x000040,
85 GOVD_LOCAL = 0x00080,
86 GOVD_MAP = 0x000100,
87 GOVD_DEBUG_PRIVATE = 0x000200,
88 GOVD_PRIVATE_OUTER_REF = 0x000400,
89 GOVD_LINEAR = 0x000800,
90 GOVD_ALIGNED = 0x001000,
91
92 /* Flag for GOVD_MAP: don't copy back. */
93 GOVD_MAP_TO_ONLY = 0x002000,
94
95 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
96 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
97
98 GOVD_MAP_0LEN_ARRAY = 0x008000,
99
100 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
101 GOVD_MAP_ALWAYS_TO = 0x010000,
102
103 /* Flag for shared vars that are or might be stored to in the region. */
104 GOVD_WRITTEN = 0x020000,
105
106 /* Flag for GOVD_MAP, if it is a forced mapping. */
107 GOVD_MAP_FORCE = 0x040000,
108
109 /* Flag for GOVD_MAP: must be present already. */
110 GOVD_MAP_FORCE_PRESENT = 0x080000,
111
112 /* Flag for GOVD_MAP: only allocate. */
113 GOVD_MAP_ALLOC_ONLY = 0x100000,
114
115 /* Flag for GOVD_MAP: only copy back. */
116 GOVD_MAP_FROM_ONLY = 0x200000,
117
118 GOVD_NONTEMPORAL = 0x400000,
119
120 /* Flag for GOVD_LASTPRIVATE: conditional modifier. */
121 GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
122
123 GOVD_CONDTEMP = 0x1000000,
124
125 /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause. */
126 GOVD_REDUCTION_INSCAN = 0x2000000,
127
128 /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
129 fields. */
130 GOVD_MAP_HAS_ATTACHMENTS = 0x4000000,
131
132 /* Flag for GOVD_FIRSTPRIVATE: OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT. */
133 GOVD_FIRSTPRIVATE_IMPLICIT = 0x8000000,
134
135 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
136 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
137 | GOVD_LOCAL)
138 };
139
140
141 enum omp_region_type
142 {
143 ORT_WORKSHARE = 0x00,
144 ORT_TASKGROUP = 0x01,
145 ORT_SIMD = 0x04,
146
147 ORT_PARALLEL = 0x08,
148 ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
149
150 ORT_TASK = 0x10,
151 ORT_UNTIED_TASK = ORT_TASK | 1,
152 ORT_TASKLOOP = ORT_TASK | 2,
153 ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
154
155 ORT_TEAMS = 0x20,
156 ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
157 ORT_HOST_TEAMS = ORT_TEAMS | 2,
158 ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
159
160 /* Data region. */
161 ORT_TARGET_DATA = 0x40,
162
163 /* Data region with offloading. */
164 ORT_TARGET = 0x80,
165 ORT_COMBINED_TARGET = ORT_TARGET | 1,
166 ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
167
168 /* OpenACC variants. */
169 ORT_ACC = 0x100, /* A generic OpenACC region. */
170 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
171 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
172 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 2, /* Kernels construct. */
173 ORT_ACC_SERIAL = ORT_ACC | ORT_TARGET | 4, /* Serial construct. */
174 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2, /* Host data. */
175
176 /* Dummy OpenMP region, used to disable expansion of
177 DECL_VALUE_EXPRs in taskloop pre body. */
178 ORT_NONE = 0x200
179 };
180
181 /* Gimplify hashtable helper. */
182
183 struct gimplify_hasher : free_ptr_hash <elt_t>
184 {
185 static inline hashval_t hash (const elt_t *);
186 static inline bool equal (const elt_t *, const elt_t *);
187 };
188
189 struct gimplify_ctx
190 {
191 struct gimplify_ctx *prev_context;
192
193 vec<gbind *> bind_expr_stack;
194 tree temps;
195 gimple_seq conditional_cleanups;
196 tree exit_label;
197 tree return_temp;
198
199 vec<tree> case_labels;
200 hash_set<tree> *live_switch_vars;
201 /* The formal temporary table. Should this be persistent? */
202 hash_table<gimplify_hasher> *temp_htab;
203
204 int conditions;
205 unsigned into_ssa : 1;
206 unsigned allow_rhs_cond_expr : 1;
207 unsigned in_cleanup_point_expr : 1;
208 unsigned keep_stack : 1;
209 unsigned save_stack : 1;
210 unsigned in_switch_expr : 1;
211 };
212
213 enum gimplify_defaultmap_kind
214 {
215 GDMK_SCALAR,
216 GDMK_SCALAR_TARGET, /* w/ Fortran's target attr, implicit mapping, only. */
217 GDMK_AGGREGATE,
218 GDMK_ALLOCATABLE,
219 GDMK_POINTER
220 };
221
222 struct gimplify_omp_ctx
223 {
224 struct gimplify_omp_ctx *outer_context;
225 splay_tree variables;
226 hash_set<tree> *privatized_types;
227 tree clauses;
228 /* Iteration variables in an OMP_FOR. */
229 vec<tree> loop_iter_var;
230 location_t location;
231 enum omp_clause_default_kind default_kind;
232 enum omp_region_type region_type;
233 enum tree_code code;
234 bool combined_loop;
235 bool distribute;
236 bool target_firstprivatize_array_bases;
237 bool add_safelen1;
238 bool order_concurrent;
239 bool has_depend;
240 bool in_for_exprs;
241 int defaultmap[5];
242 };
243
244 static struct gimplify_ctx *gimplify_ctxp;
245 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
246 static bool in_omp_construct;
247
248 /* Forward declaration. */
249 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
250 static hash_map<tree, tree> *oacc_declare_returns;
251 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
252 bool (*) (tree), fallback_t, bool);
253 static void prepare_gimple_addressable (tree *, gimple_seq *);
254
255 /* Shorter alias name for the above function for use in gimplify.cc
256 only. */
257
258 static inline void
gimplify_seq_add_stmt(gimple_seq * seq_p,gimple * gs)259 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
260 {
261 gimple_seq_add_stmt_without_update (seq_p, gs);
262 }
263
264 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
265 NULL, a new sequence is allocated. This function is
266 similar to gimple_seq_add_seq, but does not scan the operands.
267 During gimplification, we need to manipulate statement sequences
268 before the def/use vectors have been constructed. */
269
270 static void
gimplify_seq_add_seq(gimple_seq * dst_p,gimple_seq src)271 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
272 {
273 gimple_stmt_iterator si;
274
275 if (src == NULL)
276 return;
277
278 si = gsi_last (*dst_p);
279 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
280 }
281
282
283 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
284 and popping gimplify contexts. */
285
286 static struct gimplify_ctx *ctx_pool = NULL;
287
288 /* Return a gimplify context struct from the pool. */
289
290 static inline struct gimplify_ctx *
ctx_alloc(void)291 ctx_alloc (void)
292 {
293 struct gimplify_ctx * c = ctx_pool;
294
295 if (c)
296 ctx_pool = c->prev_context;
297 else
298 c = XNEW (struct gimplify_ctx);
299
300 memset (c, '\0', sizeof (*c));
301 return c;
302 }
303
304 /* Put gimplify context C back into the pool. */
305
306 static inline void
ctx_free(struct gimplify_ctx * c)307 ctx_free (struct gimplify_ctx *c)
308 {
309 c->prev_context = ctx_pool;
310 ctx_pool = c;
311 }
312
313 /* Free allocated ctx stack memory. */
314
315 void
free_gimplify_stack(void)316 free_gimplify_stack (void)
317 {
318 struct gimplify_ctx *c;
319
320 while ((c = ctx_pool))
321 {
322 ctx_pool = c->prev_context;
323 free (c);
324 }
325 }
326
327
328 /* Set up a context for the gimplifier. */
329
330 void
push_gimplify_context(bool in_ssa,bool rhs_cond_ok)331 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
332 {
333 struct gimplify_ctx *c = ctx_alloc ();
334
335 c->prev_context = gimplify_ctxp;
336 gimplify_ctxp = c;
337 gimplify_ctxp->into_ssa = in_ssa;
338 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
339 }
340
341 /* Tear down a context for the gimplifier. If BODY is non-null, then
342 put the temporaries into the outer BIND_EXPR. Otherwise, put them
343 in the local_decls.
344
345 BODY is not a sequence, but the first tuple in a sequence. */
346
347 void
pop_gimplify_context(gimple * body)348 pop_gimplify_context (gimple *body)
349 {
350 struct gimplify_ctx *c = gimplify_ctxp;
351
352 gcc_assert (c
353 && (!c->bind_expr_stack.exists ()
354 || c->bind_expr_stack.is_empty ()));
355 c->bind_expr_stack.release ();
356 gimplify_ctxp = c->prev_context;
357
358 if (body)
359 declare_vars (c->temps, body, false);
360 else
361 record_vars (c->temps);
362
363 delete c->temp_htab;
364 c->temp_htab = NULL;
365 ctx_free (c);
366 }
367
368 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
369
370 static void
gimple_push_bind_expr(gbind * bind_stmt)371 gimple_push_bind_expr (gbind *bind_stmt)
372 {
373 gimplify_ctxp->bind_expr_stack.reserve (8);
374 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
375 }
376
377 /* Pop the first element off the stack of bindings. */
378
379 static void
gimple_pop_bind_expr(void)380 gimple_pop_bind_expr (void)
381 {
382 gimplify_ctxp->bind_expr_stack.pop ();
383 }
384
385 /* Return the first element of the stack of bindings. */
386
387 gbind *
gimple_current_bind_expr(void)388 gimple_current_bind_expr (void)
389 {
390 return gimplify_ctxp->bind_expr_stack.last ();
391 }
392
393 /* Return the stack of bindings created during gimplification. */
394
395 vec<gbind *>
gimple_bind_expr_stack(void)396 gimple_bind_expr_stack (void)
397 {
398 return gimplify_ctxp->bind_expr_stack;
399 }
400
401 /* Return true iff there is a COND_EXPR between us and the innermost
402 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
403
404 static bool
gimple_conditional_context(void)405 gimple_conditional_context (void)
406 {
407 return gimplify_ctxp->conditions > 0;
408 }
409
410 /* Note that we've entered a COND_EXPR. */
411
412 static void
gimple_push_condition(void)413 gimple_push_condition (void)
414 {
415 #ifdef ENABLE_GIMPLE_CHECKING
416 if (gimplify_ctxp->conditions == 0)
417 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
418 #endif
419 ++(gimplify_ctxp->conditions);
420 }
421
422 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
423 now, add any conditional cleanups we've seen to the prequeue. */
424
425 static void
gimple_pop_condition(gimple_seq * pre_p)426 gimple_pop_condition (gimple_seq *pre_p)
427 {
428 int conds = --(gimplify_ctxp->conditions);
429
430 gcc_assert (conds >= 0);
431 if (conds == 0)
432 {
433 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
434 gimplify_ctxp->conditional_cleanups = NULL;
435 }
436 }
437
438 /* A stable comparison routine for use with splay trees and DECLs. */
439
440 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)441 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
442 {
443 tree a = (tree) xa;
444 tree b = (tree) xb;
445
446 return DECL_UID (a) - DECL_UID (b);
447 }
448
449 /* Create a new omp construct that deals with variable remapping. */
450
451 static struct gimplify_omp_ctx *
new_omp_context(enum omp_region_type region_type)452 new_omp_context (enum omp_region_type region_type)
453 {
454 struct gimplify_omp_ctx *c;
455
456 c = XCNEW (struct gimplify_omp_ctx);
457 c->outer_context = gimplify_omp_ctxp;
458 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
459 c->privatized_types = new hash_set<tree>;
460 c->location = input_location;
461 c->region_type = region_type;
462 if ((region_type & ORT_TASK) == 0)
463 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
464 else
465 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
466 c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
467 c->defaultmap[GDMK_SCALAR_TARGET] = GOVD_MAP;
468 c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
469 c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
470 c->defaultmap[GDMK_POINTER] = GOVD_MAP;
471
472 return c;
473 }
474
475 /* Destroy an omp construct that deals with variable remapping. */
476
477 static void
delete_omp_context(struct gimplify_omp_ctx * c)478 delete_omp_context (struct gimplify_omp_ctx *c)
479 {
480 splay_tree_delete (c->variables);
481 delete c->privatized_types;
482 c->loop_iter_var.release ();
483 XDELETE (c);
484 }
485
486 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
487 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
488
489 /* Both gimplify the statement T and append it to *SEQ_P. This function
490 behaves exactly as gimplify_stmt, but you don't have to pass T as a
491 reference. */
492
493 void
gimplify_and_add(tree t,gimple_seq * seq_p)494 gimplify_and_add (tree t, gimple_seq *seq_p)
495 {
496 gimplify_stmt (&t, seq_p);
497 }
498
499 /* Gimplify statement T into sequence *SEQ_P, and return the first
500 tuple in the sequence of generated tuples for this statement.
501 Return NULL if gimplifying T produced no tuples. */
502
503 static gimple *
gimplify_and_return_first(tree t,gimple_seq * seq_p)504 gimplify_and_return_first (tree t, gimple_seq *seq_p)
505 {
506 gimple_stmt_iterator last = gsi_last (*seq_p);
507
508 gimplify_and_add (t, seq_p);
509
510 if (!gsi_end_p (last))
511 {
512 gsi_next (&last);
513 return gsi_stmt (last);
514 }
515 else
516 return gimple_seq_first_stmt (*seq_p);
517 }
518
519 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
520 LHS, or for a call argument. */
521
522 static bool
is_gimple_mem_rhs(tree t)523 is_gimple_mem_rhs (tree t)
524 {
525 /* If we're dealing with a renamable type, either source or dest must be
526 a renamed variable. */
527 if (is_gimple_reg_type (TREE_TYPE (t)))
528 return is_gimple_val (t);
529 else
530 return is_gimple_val (t) || is_gimple_lvalue (t);
531 }
532
533 /* Return true if T is a CALL_EXPR or an expression that can be
534 assigned to a temporary. Note that this predicate should only be
535 used during gimplification. See the rationale for this in
536 gimplify_modify_expr. */
537
538 static bool
is_gimple_reg_rhs_or_call(tree t)539 is_gimple_reg_rhs_or_call (tree t)
540 {
541 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
542 || TREE_CODE (t) == CALL_EXPR);
543 }
544
545 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
546 this predicate should only be used during gimplification. See the
547 rationale for this in gimplify_modify_expr. */
548
549 static bool
is_gimple_mem_rhs_or_call(tree t)550 is_gimple_mem_rhs_or_call (tree t)
551 {
552 /* If we're dealing with a renamable type, either source or dest must be
553 a renamed variable. */
554 if (is_gimple_reg_type (TREE_TYPE (t)))
555 return is_gimple_val (t);
556 else
557 return (is_gimple_val (t)
558 || is_gimple_lvalue (t)
559 || TREE_CLOBBER_P (t)
560 || TREE_CODE (t) == CALL_EXPR);
561 }
562
563 /* Create a temporary with a name derived from VAL. Subroutine of
564 lookup_tmp_var; nobody else should call this function. */
565
566 static inline tree
create_tmp_from_val(tree val)567 create_tmp_from_val (tree val)
568 {
569 /* Drop all qualifiers and address-space information from the value type. */
570 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
571 tree var = create_tmp_var (type, get_name (val));
572 return var;
573 }
574
575 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
576 an existing expression temporary. */
577
578 static tree
lookup_tmp_var(tree val,bool is_formal)579 lookup_tmp_var (tree val, bool is_formal)
580 {
581 tree ret;
582
583 /* If not optimizing, never really reuse a temporary. local-alloc
584 won't allocate any variable that is used in more than one basic
585 block, which means it will go into memory, causing much extra
586 work in reload and final and poorer code generation, outweighing
587 the extra memory allocation here. */
588 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
589 ret = create_tmp_from_val (val);
590 else
591 {
592 elt_t elt, *elt_p;
593 elt_t **slot;
594
595 elt.val = val;
596 if (!gimplify_ctxp->temp_htab)
597 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
598 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
599 if (*slot == NULL)
600 {
601 elt_p = XNEW (elt_t);
602 elt_p->val = val;
603 elt_p->temp = ret = create_tmp_from_val (val);
604 *slot = elt_p;
605 }
606 else
607 {
608 elt_p = *slot;
609 ret = elt_p->temp;
610 }
611 }
612
613 return ret;
614 }
615
616 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
617
618 static tree
internal_get_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool is_formal,bool allow_ssa)619 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
620 bool is_formal, bool allow_ssa)
621 {
622 tree t, mod;
623
624 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
625 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
626 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
627 fb_rvalue);
628
629 if (allow_ssa
630 && gimplify_ctxp->into_ssa
631 && is_gimple_reg_type (TREE_TYPE (val)))
632 {
633 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
634 if (! gimple_in_ssa_p (cfun))
635 {
636 const char *name = get_name (val);
637 if (name)
638 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
639 }
640 }
641 else
642 t = lookup_tmp_var (val, is_formal);
643
644 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
645
646 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
647
648 /* gimplify_modify_expr might want to reduce this further. */
649 gimplify_and_add (mod, pre_p);
650 ggc_free (mod);
651
652 return t;
653 }
654
655 /* Return a formal temporary variable initialized with VAL. PRE_P is as
656 in gimplify_expr. Only use this function if:
657
658 1) The value of the unfactored expression represented by VAL will not
659 change between the initialization and use of the temporary, and
660 2) The temporary will not be otherwise modified.
661
662 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
663 and #2 means it is inappropriate for && temps.
664
665 For other cases, use get_initialized_tmp_var instead. */
666
667 tree
get_formal_tmp_var(tree val,gimple_seq * pre_p)668 get_formal_tmp_var (tree val, gimple_seq *pre_p)
669 {
670 return internal_get_tmp_var (val, pre_p, NULL, true, true);
671 }
672
673 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
674 are as in gimplify_expr. */
675
676 tree
get_initialized_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool allow_ssa)677 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
678 gimple_seq *post_p /* = NULL */,
679 bool allow_ssa /* = true */)
680 {
681 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
682 }
683
684 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
685 generate debug info for them; otherwise don't. */
686
687 void
declare_vars(tree vars,gimple * gs,bool debug_info)688 declare_vars (tree vars, gimple *gs, bool debug_info)
689 {
690 tree last = vars;
691 if (last)
692 {
693 tree temps, block;
694
695 gbind *scope = as_a <gbind *> (gs);
696
697 temps = nreverse (last);
698
699 block = gimple_bind_block (scope);
700 gcc_assert (!block || TREE_CODE (block) == BLOCK);
701 if (!block || !debug_info)
702 {
703 DECL_CHAIN (last) = gimple_bind_vars (scope);
704 gimple_bind_set_vars (scope, temps);
705 }
706 else
707 {
708 /* We need to attach the nodes both to the BIND_EXPR and to its
709 associated BLOCK for debugging purposes. The key point here
710 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
711 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
712 if (BLOCK_VARS (block))
713 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
714 else
715 {
716 gimple_bind_set_vars (scope,
717 chainon (gimple_bind_vars (scope), temps));
718 BLOCK_VARS (block) = temps;
719 }
720 }
721 }
722 }
723
724 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
725 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
726 no such upper bound can be obtained. */
727
728 static void
force_constant_size(tree var)729 force_constant_size (tree var)
730 {
731 /* The only attempt we make is by querying the maximum size of objects
732 of the variable's type. */
733
734 HOST_WIDE_INT max_size;
735
736 gcc_assert (VAR_P (var));
737
738 max_size = max_int_size_in_bytes (TREE_TYPE (var));
739
740 gcc_assert (max_size >= 0);
741
742 DECL_SIZE_UNIT (var)
743 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
744 DECL_SIZE (var)
745 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
746 }
747
748 /* Push the temporary variable TMP into the current binding. */
749
750 void
gimple_add_tmp_var_fn(struct function * fn,tree tmp)751 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
752 {
753 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
754
755 /* Later processing assumes that the object size is constant, which might
756 not be true at this point. Force the use of a constant upper bound in
757 this case. */
758 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
759 force_constant_size (tmp);
760
761 DECL_CONTEXT (tmp) = fn->decl;
762 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
763
764 record_vars_into (tmp, fn->decl);
765 }
766
767 /* Push the temporary variable TMP into the current binding. */
768
769 void
gimple_add_tmp_var(tree tmp)770 gimple_add_tmp_var (tree tmp)
771 {
772 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
773
774 /* Later processing assumes that the object size is constant, which might
775 not be true at this point. Force the use of a constant upper bound in
776 this case. */
777 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
778 force_constant_size (tmp);
779
780 DECL_CONTEXT (tmp) = current_function_decl;
781 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
782
783 if (gimplify_ctxp)
784 {
785 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
786 gimplify_ctxp->temps = tmp;
787
788 /* Mark temporaries local within the nearest enclosing parallel. */
789 if (gimplify_omp_ctxp)
790 {
791 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
792 int flag = GOVD_LOCAL | GOVD_SEEN;
793 while (ctx
794 && (ctx->region_type == ORT_WORKSHARE
795 || ctx->region_type == ORT_TASKGROUP
796 || ctx->region_type == ORT_SIMD
797 || ctx->region_type == ORT_ACC))
798 {
799 if (ctx->region_type == ORT_SIMD
800 && TREE_ADDRESSABLE (tmp)
801 && !TREE_STATIC (tmp))
802 {
803 if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
804 ctx->add_safelen1 = true;
805 else if (ctx->in_for_exprs)
806 flag = GOVD_PRIVATE;
807 else
808 flag = GOVD_PRIVATE | GOVD_SEEN;
809 break;
810 }
811 ctx = ctx->outer_context;
812 }
813 if (ctx)
814 omp_add_variable (ctx, tmp, flag);
815 }
816 }
817 else if (cfun)
818 record_vars (tmp);
819 else
820 {
821 gimple_seq body_seq;
822
823 /* This case is for nested functions. We need to expose the locals
824 they create. */
825 body_seq = gimple_body (current_function_decl);
826 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
827 }
828 }
829
830
831
832 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
833 nodes that are referenced more than once in GENERIC functions. This is
834 necessary because gimplification (translation into GIMPLE) is performed
835 by modifying tree nodes in-place, so gimplication of a shared node in a
836 first context could generate an invalid GIMPLE form in a second context.
837
838 This is achieved with a simple mark/copy/unmark algorithm that walks the
839 GENERIC representation top-down, marks nodes with TREE_VISITED the first
840 time it encounters them, duplicates them if they already have TREE_VISITED
841 set, and finally removes the TREE_VISITED marks it has set.
842
843 The algorithm works only at the function level, i.e. it generates a GENERIC
844 representation of a function with no nodes shared within the function when
845 passed a GENERIC function (except for nodes that are allowed to be shared).
846
847 At the global level, it is also necessary to unshare tree nodes that are
848 referenced in more than one function, for the same aforementioned reason.
849 This requires some cooperation from the front-end. There are 2 strategies:
850
851 1. Manual unsharing. The front-end needs to call unshare_expr on every
852 expression that might end up being shared across functions.
853
854 2. Deep unsharing. This is an extension of regular unsharing. Instead
855 of calling unshare_expr on expressions that might be shared across
856 functions, the front-end pre-marks them with TREE_VISITED. This will
857 ensure that they are unshared on the first reference within functions
858 when the regular unsharing algorithm runs. The counterpart is that
859 this algorithm must look deeper than for manual unsharing, which is
860 specified by LANG_HOOKS_DEEP_UNSHARING.
861
862 If there are only few specific cases of node sharing across functions, it is
863 probably easier for a front-end to unshare the expressions manually. On the
864 contrary, if the expressions generated at the global level are as widespread
865 as expressions generated within functions, deep unsharing is very likely the
866 way to go. */
867
868 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
869 These nodes model computations that must be done once. If we were to
870 unshare something like SAVE_EXPR(i++), the gimplification process would
871 create wrong code. However, if DATA is non-null, it must hold a pointer
872 set that is used to unshare the subtrees of these nodes. */
873
874 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)875 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
876 {
877 tree t = *tp;
878 enum tree_code code = TREE_CODE (t);
879
880 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
881 copy their subtrees if we can make sure to do it only once. */
882 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
883 {
884 if (data && !((hash_set<tree> *)data)->add (t))
885 ;
886 else
887 *walk_subtrees = 0;
888 }
889
890 /* Stop at types, decls, constants like copy_tree_r. */
891 else if (TREE_CODE_CLASS (code) == tcc_type
892 || TREE_CODE_CLASS (code) == tcc_declaration
893 || TREE_CODE_CLASS (code) == tcc_constant)
894 *walk_subtrees = 0;
895
896 /* Cope with the statement expression extension. */
897 else if (code == STATEMENT_LIST)
898 ;
899
900 /* Leave the bulk of the work to copy_tree_r itself. */
901 else
902 copy_tree_r (tp, walk_subtrees, NULL);
903
904 return NULL_TREE;
905 }
906
907 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
908 If *TP has been visited already, then *TP is deeply copied by calling
909 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
910
911 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees,void * data)912 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
913 {
914 tree t = *tp;
915 enum tree_code code = TREE_CODE (t);
916
917 /* Skip types, decls, and constants. But we do want to look at their
918 types and the bounds of types. Mark them as visited so we properly
919 unmark their subtrees on the unmark pass. If we've already seen them,
920 don't look down further. */
921 if (TREE_CODE_CLASS (code) == tcc_type
922 || TREE_CODE_CLASS (code) == tcc_declaration
923 || TREE_CODE_CLASS (code) == tcc_constant)
924 {
925 if (TREE_VISITED (t))
926 *walk_subtrees = 0;
927 else
928 TREE_VISITED (t) = 1;
929 }
930
931 /* If this node has been visited already, unshare it and don't look
932 any deeper. */
933 else if (TREE_VISITED (t))
934 {
935 walk_tree (tp, mostly_copy_tree_r, data, NULL);
936 *walk_subtrees = 0;
937 }
938
939 /* Otherwise, mark the node as visited and keep looking. */
940 else
941 TREE_VISITED (t) = 1;
942
943 return NULL_TREE;
944 }
945
946 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
947 copy_if_shared_r callback unmodified. */
948
949 void
copy_if_shared(tree * tp,void * data)950 copy_if_shared (tree *tp, void *data)
951 {
952 walk_tree (tp, copy_if_shared_r, data, NULL);
953 }
954
955 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
956 any nested functions. */
957
958 static void
unshare_body(tree fndecl)959 unshare_body (tree fndecl)
960 {
961 struct cgraph_node *cgn = cgraph_node::get (fndecl);
962 /* If the language requires deep unsharing, we need a pointer set to make
963 sure we don't repeatedly unshare subtrees of unshareable nodes. */
964 hash_set<tree> *visited
965 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
966
967 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
968 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
969 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
970
971 delete visited;
972
973 if (cgn)
974 for (cgn = first_nested_function (cgn); cgn;
975 cgn = next_nested_function (cgn))
976 unshare_body (cgn->decl);
977 }
978
979 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
980 Subtrees are walked until the first unvisited node is encountered. */
981
982 static tree
unmark_visited_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)983 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
984 {
985 tree t = *tp;
986
987 /* If this node has been visited, unmark it and keep looking. */
988 if (TREE_VISITED (t))
989 TREE_VISITED (t) = 0;
990
991 /* Otherwise, don't look any deeper. */
992 else
993 *walk_subtrees = 0;
994
995 return NULL_TREE;
996 }
997
998 /* Unmark the visited trees rooted at *TP. */
999
1000 static inline void
unmark_visited(tree * tp)1001 unmark_visited (tree *tp)
1002 {
1003 walk_tree (tp, unmark_visited_r, NULL, NULL);
1004 }
1005
1006 /* Likewise, but mark all trees as not visited. */
1007
1008 static void
unvisit_body(tree fndecl)1009 unvisit_body (tree fndecl)
1010 {
1011 struct cgraph_node *cgn = cgraph_node::get (fndecl);
1012
1013 unmark_visited (&DECL_SAVED_TREE (fndecl));
1014 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1015 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1016
1017 if (cgn)
1018 for (cgn = first_nested_function (cgn);
1019 cgn; cgn = next_nested_function (cgn))
1020 unvisit_body (cgn->decl);
1021 }
1022
1023 /* Unconditionally make an unshared copy of EXPR. This is used when using
1024 stored expressions which span multiple functions, such as BINFO_VTABLE,
1025 as the normal unsharing process can't tell that they're shared. */
1026
1027 tree
unshare_expr(tree expr)1028 unshare_expr (tree expr)
1029 {
1030 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1031 return expr;
1032 }
1033
1034 /* Worker for unshare_expr_without_location. */
1035
1036 static tree
prune_expr_location(tree * tp,int * walk_subtrees,void *)1037 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1038 {
1039 if (EXPR_P (*tp))
1040 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1041 else
1042 *walk_subtrees = 0;
1043 return NULL_TREE;
1044 }
1045
1046 /* Similar to unshare_expr but also prune all expression locations
1047 from EXPR. */
1048
1049 tree
unshare_expr_without_location(tree expr)1050 unshare_expr_without_location (tree expr)
1051 {
1052 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1053 if (EXPR_P (expr))
1054 walk_tree (&expr, prune_expr_location, NULL, NULL);
1055 return expr;
1056 }
1057
1058 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1059 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
1060 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1061 EXPR is the location of the EXPR. */
1062
1063 static location_t
rexpr_location(tree expr,location_t or_else=UNKNOWN_LOCATION)1064 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1065 {
1066 if (!expr)
1067 return or_else;
1068
1069 if (EXPR_HAS_LOCATION (expr))
1070 return EXPR_LOCATION (expr);
1071
1072 if (TREE_CODE (expr) != STATEMENT_LIST)
1073 return or_else;
1074
1075 tree_stmt_iterator i = tsi_start (expr);
1076
1077 bool found = false;
1078 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1079 {
1080 found = true;
1081 tsi_next (&i);
1082 }
1083
1084 if (!found || !tsi_one_before_end_p (i))
1085 return or_else;
1086
1087 return rexpr_location (tsi_stmt (i), or_else);
1088 }
1089
1090 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1091 rexpr_location for the potential recursion. */
1092
1093 static inline bool
rexpr_has_location(tree expr)1094 rexpr_has_location (tree expr)
1095 {
1096 return rexpr_location (expr) != UNKNOWN_LOCATION;
1097 }
1098
1099
1100 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1101 contain statements and have a value. Assign its value to a temporary
1102 and give it void_type_node. Return the temporary, or NULL_TREE if
1103 WRAPPER was already void. */
1104
1105 tree
voidify_wrapper_expr(tree wrapper,tree temp)1106 voidify_wrapper_expr (tree wrapper, tree temp)
1107 {
1108 tree type = TREE_TYPE (wrapper);
1109 if (type && !VOID_TYPE_P (type))
1110 {
1111 tree *p;
1112
1113 /* Set p to point to the body of the wrapper. Loop until we find
1114 something that isn't a wrapper. */
1115 for (p = &wrapper; p && *p; )
1116 {
1117 switch (TREE_CODE (*p))
1118 {
1119 case BIND_EXPR:
1120 TREE_SIDE_EFFECTS (*p) = 1;
1121 TREE_TYPE (*p) = void_type_node;
1122 /* For a BIND_EXPR, the body is operand 1. */
1123 p = &BIND_EXPR_BODY (*p);
1124 break;
1125
1126 case CLEANUP_POINT_EXPR:
1127 case TRY_FINALLY_EXPR:
1128 case TRY_CATCH_EXPR:
1129 TREE_SIDE_EFFECTS (*p) = 1;
1130 TREE_TYPE (*p) = void_type_node;
1131 p = &TREE_OPERAND (*p, 0);
1132 break;
1133
1134 case STATEMENT_LIST:
1135 {
1136 tree_stmt_iterator i = tsi_last (*p);
1137 TREE_SIDE_EFFECTS (*p) = 1;
1138 TREE_TYPE (*p) = void_type_node;
1139 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1140 }
1141 break;
1142
1143 case COMPOUND_EXPR:
1144 /* Advance to the last statement. Set all container types to
1145 void. */
1146 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1147 {
1148 TREE_SIDE_EFFECTS (*p) = 1;
1149 TREE_TYPE (*p) = void_type_node;
1150 }
1151 break;
1152
1153 case TRANSACTION_EXPR:
1154 TREE_SIDE_EFFECTS (*p) = 1;
1155 TREE_TYPE (*p) = void_type_node;
1156 p = &TRANSACTION_EXPR_BODY (*p);
1157 break;
1158
1159 default:
1160 /* Assume that any tree upon which voidify_wrapper_expr is
1161 directly called is a wrapper, and that its body is op0. */
1162 if (p == &wrapper)
1163 {
1164 TREE_SIDE_EFFECTS (*p) = 1;
1165 TREE_TYPE (*p) = void_type_node;
1166 p = &TREE_OPERAND (*p, 0);
1167 break;
1168 }
1169 goto out;
1170 }
1171 }
1172
1173 out:
1174 if (p == NULL || IS_EMPTY_STMT (*p))
1175 temp = NULL_TREE;
1176 else if (temp)
1177 {
1178 /* The wrapper is on the RHS of an assignment that we're pushing
1179 down. */
1180 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1181 || TREE_CODE (temp) == MODIFY_EXPR);
1182 TREE_OPERAND (temp, 1) = *p;
1183 *p = temp;
1184 }
1185 else
1186 {
1187 temp = create_tmp_var (type, "retval");
1188 *p = build2 (INIT_EXPR, type, temp, *p);
1189 }
1190
1191 return temp;
1192 }
1193
1194 return NULL_TREE;
1195 }
1196
1197 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1198 a temporary through which they communicate. */
1199
1200 static void
build_stack_save_restore(gcall ** save,gcall ** restore)1201 build_stack_save_restore (gcall **save, gcall **restore)
1202 {
1203 tree tmp_var;
1204
1205 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1206 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1207 gimple_call_set_lhs (*save, tmp_var);
1208
1209 *restore
1210 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1211 1, tmp_var);
1212 }
1213
1214 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1215
1216 static tree
build_asan_poison_call_expr(tree decl)1217 build_asan_poison_call_expr (tree decl)
1218 {
1219 /* Do not poison variables that have size equal to zero. */
1220 tree unit_size = DECL_SIZE_UNIT (decl);
1221 if (zerop (unit_size))
1222 return NULL_TREE;
1223
1224 tree base = build_fold_addr_expr (decl);
1225
1226 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1227 void_type_node, 3,
1228 build_int_cst (integer_type_node,
1229 ASAN_MARK_POISON),
1230 base, unit_size);
1231 }
1232
1233 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1234 on POISON flag, shadow memory of a DECL variable. The call will be
1235 put on location identified by IT iterator, where BEFORE flag drives
1236 position where the stmt will be put. */
1237
1238 static void
asan_poison_variable(tree decl,bool poison,gimple_stmt_iterator * it,bool before)1239 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1240 bool before)
1241 {
1242 tree unit_size = DECL_SIZE_UNIT (decl);
1243 tree base = build_fold_addr_expr (decl);
1244
1245 /* Do not poison variables that have size equal to zero. */
1246 if (zerop (unit_size))
1247 return;
1248
1249 /* It's necessary to have all stack variables aligned to ASAN granularity
1250 bytes. */
1251 gcc_assert (!hwasan_sanitize_p () || hwasan_sanitize_stack_p ());
1252 unsigned shadow_granularity
1253 = hwasan_sanitize_p () ? HWASAN_TAG_GRANULE_SIZE : ASAN_SHADOW_GRANULARITY;
1254 if (DECL_ALIGN_UNIT (decl) <= shadow_granularity)
1255 SET_DECL_ALIGN (decl, BITS_PER_UNIT * shadow_granularity);
1256
1257 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1258
1259 gimple *g
1260 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1261 build_int_cst (integer_type_node, flags),
1262 base, unit_size);
1263
1264 if (before)
1265 gsi_insert_before (it, g, GSI_NEW_STMT);
1266 else
1267 gsi_insert_after (it, g, GSI_NEW_STMT);
1268 }
1269
1270 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1271 either poisons or unpoisons a DECL. Created statement is appended
1272 to SEQ_P gimple sequence. */
1273
1274 static void
asan_poison_variable(tree decl,bool poison,gimple_seq * seq_p)1275 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1276 {
1277 gimple_stmt_iterator it = gsi_last (*seq_p);
1278 bool before = false;
1279
1280 if (gsi_end_p (it))
1281 before = true;
1282
1283 asan_poison_variable (decl, poison, &it, before);
1284 }
1285
1286 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1287
1288 static int
sort_by_decl_uid(const void * a,const void * b)1289 sort_by_decl_uid (const void *a, const void *b)
1290 {
1291 const tree *t1 = (const tree *)a;
1292 const tree *t2 = (const tree *)b;
1293
1294 int uid1 = DECL_UID (*t1);
1295 int uid2 = DECL_UID (*t2);
1296
1297 if (uid1 < uid2)
1298 return -1;
1299 else if (uid1 > uid2)
1300 return 1;
1301 else
1302 return 0;
1303 }
1304
1305 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1306 depending on POISON flag. Created statement is appended
1307 to SEQ_P gimple sequence. */
1308
1309 static void
asan_poison_variables(hash_set<tree> * variables,bool poison,gimple_seq * seq_p)1310 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1311 {
1312 unsigned c = variables->elements ();
1313 if (c == 0)
1314 return;
1315
1316 auto_vec<tree> sorted_variables (c);
1317
1318 for (hash_set<tree>::iterator it = variables->begin ();
1319 it != variables->end (); ++it)
1320 sorted_variables.safe_push (*it);
1321
1322 sorted_variables.qsort (sort_by_decl_uid);
1323
1324 unsigned i;
1325 tree var;
1326 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1327 {
1328 asan_poison_variable (var, poison, seq_p);
1329
1330 /* Add use_after_scope_memory attribute for the variable in order
1331 to prevent re-written into SSA. */
1332 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1333 DECL_ATTRIBUTES (var)))
1334 DECL_ATTRIBUTES (var)
1335 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1336 integer_one_node,
1337 DECL_ATTRIBUTES (var));
1338 }
1339 }
1340
1341 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1342
1343 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,gimple_seq * pre_p)1344 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1345 {
1346 tree bind_expr = *expr_p;
1347 bool old_keep_stack = gimplify_ctxp->keep_stack;
1348 bool old_save_stack = gimplify_ctxp->save_stack;
1349 tree t;
1350 gbind *bind_stmt;
1351 gimple_seq body, cleanup;
1352 gcall *stack_save;
1353 location_t start_locus = 0, end_locus = 0;
1354 tree ret_clauses = NULL;
1355
1356 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1357
1358 /* Mark variables seen in this bind expr. */
1359 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1360 {
1361 if (VAR_P (t))
1362 {
1363 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1364
1365 /* Mark variable as local. */
1366 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1367 {
1368 if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1369 || splay_tree_lookup (ctx->variables,
1370 (splay_tree_key) t) == NULL)
1371 {
1372 int flag = GOVD_LOCAL;
1373 if (ctx->region_type == ORT_SIMD
1374 && TREE_ADDRESSABLE (t)
1375 && !TREE_STATIC (t))
1376 {
1377 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1378 ctx->add_safelen1 = true;
1379 else
1380 flag = GOVD_PRIVATE;
1381 }
1382 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1383 }
1384 /* Static locals inside of target construct or offloaded
1385 routines need to be "omp declare target". */
1386 if (TREE_STATIC (t))
1387 for (; ctx; ctx = ctx->outer_context)
1388 if ((ctx->region_type & ORT_TARGET) != 0)
1389 {
1390 if (!lookup_attribute ("omp declare target",
1391 DECL_ATTRIBUTES (t)))
1392 {
1393 tree id = get_identifier ("omp declare target");
1394 DECL_ATTRIBUTES (t)
1395 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1396 varpool_node *node = varpool_node::get (t);
1397 if (node)
1398 {
1399 node->offloadable = 1;
1400 if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1401 {
1402 g->have_offload = true;
1403 if (!in_lto_p)
1404 vec_safe_push (offload_vars, t);
1405 }
1406 }
1407 }
1408 break;
1409 }
1410 }
1411
1412 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1413
1414 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1415 cfun->has_local_explicit_reg_vars = true;
1416 }
1417 }
1418
1419 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1420 BIND_EXPR_BLOCK (bind_expr));
1421 gimple_push_bind_expr (bind_stmt);
1422
1423 gimplify_ctxp->keep_stack = false;
1424 gimplify_ctxp->save_stack = false;
1425
1426 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1427 body = NULL;
1428 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1429 gimple_bind_set_body (bind_stmt, body);
1430
1431 /* Source location wise, the cleanup code (stack_restore and clobbers)
1432 belongs to the end of the block, so propagate what we have. The
1433 stack_save operation belongs to the beginning of block, which we can
1434 infer from the bind_expr directly if the block has no explicit
1435 assignment. */
1436 if (BIND_EXPR_BLOCK (bind_expr))
1437 {
1438 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1439 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1440 }
1441 if (start_locus == 0)
1442 start_locus = EXPR_LOCATION (bind_expr);
1443
1444 cleanup = NULL;
1445 stack_save = NULL;
1446
1447 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1448 the stack space allocated to the VLAs. */
1449 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1450 {
1451 gcall *stack_restore;
1452
1453 /* Save stack on entry and restore it on exit. Add a try_finally
1454 block to achieve this. */
1455 build_stack_save_restore (&stack_save, &stack_restore);
1456
1457 gimple_set_location (stack_save, start_locus);
1458 gimple_set_location (stack_restore, end_locus);
1459
1460 gimplify_seq_add_stmt (&cleanup, stack_restore);
1461 }
1462
1463 /* Add clobbers for all variables that go out of scope. */
1464 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1465 {
1466 if (VAR_P (t)
1467 && !is_global_var (t)
1468 && DECL_CONTEXT (t) == current_function_decl)
1469 {
1470 if (!DECL_HARD_REGISTER (t)
1471 && !TREE_THIS_VOLATILE (t)
1472 && !DECL_HAS_VALUE_EXPR_P (t)
1473 /* Only care for variables that have to be in memory. Others
1474 will be rewritten into SSA names, hence moved to the
1475 top-level. */
1476 && !is_gimple_reg (t)
1477 && flag_stack_reuse != SR_NONE)
1478 {
1479 tree clobber = build_clobber (TREE_TYPE (t), CLOBBER_EOL);
1480 gimple *clobber_stmt;
1481 clobber_stmt = gimple_build_assign (t, clobber);
1482 gimple_set_location (clobber_stmt, end_locus);
1483 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1484 }
1485
1486 if (flag_openacc && oacc_declare_returns != NULL)
1487 {
1488 tree key = t;
1489 if (DECL_HAS_VALUE_EXPR_P (key))
1490 {
1491 key = DECL_VALUE_EXPR (key);
1492 if (TREE_CODE (key) == INDIRECT_REF)
1493 key = TREE_OPERAND (key, 0);
1494 }
1495 tree *c = oacc_declare_returns->get (key);
1496 if (c != NULL)
1497 {
1498 if (ret_clauses)
1499 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1500
1501 ret_clauses = unshare_expr (*c);
1502
1503 oacc_declare_returns->remove (key);
1504
1505 if (oacc_declare_returns->is_empty ())
1506 {
1507 delete oacc_declare_returns;
1508 oacc_declare_returns = NULL;
1509 }
1510 }
1511 }
1512 }
1513
1514 if (asan_poisoned_variables != NULL
1515 && asan_poisoned_variables->contains (t))
1516 {
1517 asan_poisoned_variables->remove (t);
1518 asan_poison_variable (t, true, &cleanup);
1519 }
1520
1521 if (gimplify_ctxp->live_switch_vars != NULL
1522 && gimplify_ctxp->live_switch_vars->contains (t))
1523 gimplify_ctxp->live_switch_vars->remove (t);
1524 }
1525
1526 if (ret_clauses)
1527 {
1528 gomp_target *stmt;
1529 gimple_stmt_iterator si = gsi_start (cleanup);
1530
1531 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1532 ret_clauses);
1533 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1534 }
1535
1536 if (cleanup)
1537 {
1538 gtry *gs;
1539 gimple_seq new_body;
1540
1541 new_body = NULL;
1542 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1543 GIMPLE_TRY_FINALLY);
1544
1545 if (stack_save)
1546 gimplify_seq_add_stmt (&new_body, stack_save);
1547 gimplify_seq_add_stmt (&new_body, gs);
1548 gimple_bind_set_body (bind_stmt, new_body);
1549 }
1550
1551 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1552 if (!gimplify_ctxp->keep_stack)
1553 gimplify_ctxp->keep_stack = old_keep_stack;
1554 gimplify_ctxp->save_stack = old_save_stack;
1555
1556 gimple_pop_bind_expr ();
1557
1558 gimplify_seq_add_stmt (pre_p, bind_stmt);
1559
1560 if (temp)
1561 {
1562 *expr_p = temp;
1563 return GS_OK;
1564 }
1565
1566 *expr_p = NULL_TREE;
1567 return GS_ALL_DONE;
1568 }
1569
1570 /* Maybe add early return predict statement to PRE_P sequence. */
1571
1572 static void
maybe_add_early_return_predict_stmt(gimple_seq * pre_p)1573 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1574 {
1575 /* If we are not in a conditional context, add PREDICT statement. */
1576 if (gimple_conditional_context ())
1577 {
1578 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1579 NOT_TAKEN);
1580 gimplify_seq_add_stmt (pre_p, predict);
1581 }
1582 }
1583
1584 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1585 GIMPLE value, it is assigned to a new temporary and the statement is
1586 re-written to return the temporary.
1587
1588 PRE_P points to the sequence where side effects that must happen before
1589 STMT should be stored. */
1590
1591 static enum gimplify_status
gimplify_return_expr(tree stmt,gimple_seq * pre_p)1592 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1593 {
1594 greturn *ret;
1595 tree ret_expr = TREE_OPERAND (stmt, 0);
1596 tree result_decl, result;
1597
1598 if (ret_expr == error_mark_node)
1599 return GS_ERROR;
1600
1601 if (!ret_expr
1602 || TREE_CODE (ret_expr) == RESULT_DECL)
1603 {
1604 maybe_add_early_return_predict_stmt (pre_p);
1605 greturn *ret = gimple_build_return (ret_expr);
1606 copy_warning (ret, stmt);
1607 gimplify_seq_add_stmt (pre_p, ret);
1608 return GS_ALL_DONE;
1609 }
1610
1611 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1612 result_decl = NULL_TREE;
1613 else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1614 {
1615 /* Used in C++ for handling EH cleanup of the return value if a local
1616 cleanup throws. Assume the front-end knows what it's doing. */
1617 result_decl = DECL_RESULT (current_function_decl);
1618 /* But crash if we end up trying to modify ret_expr below. */
1619 ret_expr = NULL_TREE;
1620 }
1621 else
1622 {
1623 result_decl = TREE_OPERAND (ret_expr, 0);
1624
1625 /* See through a return by reference. */
1626 if (TREE_CODE (result_decl) == INDIRECT_REF)
1627 result_decl = TREE_OPERAND (result_decl, 0);
1628
1629 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1630 || TREE_CODE (ret_expr) == INIT_EXPR)
1631 && TREE_CODE (result_decl) == RESULT_DECL);
1632 }
1633
1634 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1635 Recall that aggregate_value_p is FALSE for any aggregate type that is
1636 returned in registers. If we're returning values in registers, then
1637 we don't want to extend the lifetime of the RESULT_DECL, particularly
1638 across another call. In addition, for those aggregates for which
1639 hard_function_value generates a PARALLEL, we'll die during normal
1640 expansion of structure assignments; there's special code in expand_return
1641 to handle this case that does not exist in expand_expr. */
1642 if (!result_decl)
1643 result = NULL_TREE;
1644 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1645 {
1646 if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1647 {
1648 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1649 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1650 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1651 should be effectively allocated by the caller, i.e. all calls to
1652 this function must be subject to the Return Slot Optimization. */
1653 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1654 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1655 }
1656 result = result_decl;
1657 }
1658 else if (gimplify_ctxp->return_temp)
1659 result = gimplify_ctxp->return_temp;
1660 else
1661 {
1662 result = create_tmp_reg (TREE_TYPE (result_decl));
1663
1664 /* ??? With complex control flow (usually involving abnormal edges),
1665 we can wind up warning about an uninitialized value for this. Due
1666 to how this variable is constructed and initialized, this is never
1667 true. Give up and never warn. */
1668 suppress_warning (result, OPT_Wuninitialized);
1669
1670 gimplify_ctxp->return_temp = result;
1671 }
1672
1673 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1674 Then gimplify the whole thing. */
1675 if (result != result_decl)
1676 TREE_OPERAND (ret_expr, 0) = result;
1677
1678 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1679
1680 maybe_add_early_return_predict_stmt (pre_p);
1681 ret = gimple_build_return (result);
1682 copy_warning (ret, stmt);
1683 gimplify_seq_add_stmt (pre_p, ret);
1684
1685 return GS_ALL_DONE;
1686 }
1687
1688 /* Gimplify a variable-length array DECL. */
1689
1690 static void
gimplify_vla_decl(tree decl,gimple_seq * seq_p)1691 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1692 {
1693 /* This is a variable-sized decl. Simplify its size and mark it
1694 for deferred expansion. */
1695 tree t, addr, ptr_type;
1696
1697 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1698 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1699
1700 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1701 if (DECL_HAS_VALUE_EXPR_P (decl))
1702 return;
1703
1704 /* All occurrences of this decl in final gimplified code will be
1705 replaced by indirection. Setting DECL_VALUE_EXPR does two
1706 things: First, it lets the rest of the gimplifier know what
1707 replacement to use. Second, it lets the debug info know
1708 where to find the value. */
1709 ptr_type = build_pointer_type (TREE_TYPE (decl));
1710 addr = create_tmp_var (ptr_type, get_name (decl));
1711 DECL_IGNORED_P (addr) = 0;
1712 t = build_fold_indirect_ref (addr);
1713 TREE_THIS_NOTRAP (t) = 1;
1714 SET_DECL_VALUE_EXPR (decl, t);
1715 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1716
1717 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1718 max_int_size_in_bytes (TREE_TYPE (decl)));
1719 /* The call has been built for a variable-sized object. */
1720 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1721 t = fold_convert (ptr_type, t);
1722 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1723
1724 gimplify_and_add (t, seq_p);
1725
1726 /* Record the dynamic allocation associated with DECL if requested. */
1727 if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1728 record_dynamic_alloc (decl);
1729 }
1730
1731 /* A helper function to be called via walk_tree. Mark all labels under *TP
1732 as being forced. To be called for DECL_INITIAL of static variables. */
1733
1734 static tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1735 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1736 {
1737 if (TYPE_P (*tp))
1738 *walk_subtrees = 0;
1739 if (TREE_CODE (*tp) == LABEL_DECL)
1740 {
1741 FORCED_LABEL (*tp) = 1;
1742 cfun->has_forced_label_in_static = 1;
1743 }
1744
1745 return NULL_TREE;
1746 }
1747
1748 /* Generate an initialization to automatic variable DECL based on INIT_TYPE.
1749 Build a call to internal const function DEFERRED_INIT:
1750 1st argument: SIZE of the DECL;
1751 2nd argument: INIT_TYPE;
1752 3rd argument: NAME of the DECL;
1753
1754 as LHS = DEFERRED_INIT (SIZE of the DECL, INIT_TYPE, NAME of the DECL). */
1755
1756 static void
gimple_add_init_for_auto_var(tree decl,enum auto_init_type init_type,gimple_seq * seq_p)1757 gimple_add_init_for_auto_var (tree decl,
1758 enum auto_init_type init_type,
1759 gimple_seq *seq_p)
1760 {
1761 gcc_assert (auto_var_p (decl));
1762 gcc_assert (init_type > AUTO_INIT_UNINITIALIZED);
1763 location_t loc = EXPR_LOCATION (decl);
1764 tree decl_size = TYPE_SIZE_UNIT (TREE_TYPE (decl));
1765
1766 tree init_type_node
1767 = build_int_cst (integer_type_node, (int) init_type);
1768
1769 tree decl_name = NULL_TREE;
1770 if (DECL_NAME (decl))
1771
1772 decl_name = build_string_literal (IDENTIFIER_LENGTH (DECL_NAME (decl)) + 1,
1773 IDENTIFIER_POINTER (DECL_NAME (decl)));
1774
1775 else
1776 {
1777 char *decl_name_anonymous = xasprintf ("D.%u", DECL_UID (decl));
1778 decl_name = build_string_literal (strlen (decl_name_anonymous) + 1,
1779 decl_name_anonymous);
1780 free (decl_name_anonymous);
1781 }
1782
1783 tree call = build_call_expr_internal_loc (loc, IFN_DEFERRED_INIT,
1784 TREE_TYPE (decl), 3,
1785 decl_size, init_type_node,
1786 decl_name);
1787
1788 gimplify_assign (decl, call, seq_p);
1789 }
1790
1791 /* Generate padding initialization for automatic vairable DECL.
1792 C guarantees that brace-init with fewer initializers than members
1793 aggregate will initialize the rest of the aggregate as-if it were
1794 static initialization. In turn static initialization guarantees
1795 that padding is initialized to zero. So, we always initialize paddings
1796 to zeroes regardless INIT_TYPE.
1797 To do the padding initialization, we insert a call to
1798 __builtin_clear_padding (&decl, 0, for_auto_init = true).
1799 Note, we add an additional dummy argument for __builtin_clear_padding,
1800 'for_auto_init' to distinguish whether this call is for automatic
1801 variable initialization or not.
1802 */
1803 static void
gimple_add_padding_init_for_auto_var(tree decl,bool is_vla,gimple_seq * seq_p)1804 gimple_add_padding_init_for_auto_var (tree decl, bool is_vla,
1805 gimple_seq *seq_p)
1806 {
1807 tree addr_of_decl = NULL_TREE;
1808 tree fn = builtin_decl_explicit (BUILT_IN_CLEAR_PADDING);
1809
1810 if (is_vla)
1811 {
1812 /* The temporary address variable for this vla should be
1813 created in gimplify_vla_decl. */
1814 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
1815 gcc_assert (TREE_CODE (DECL_VALUE_EXPR (decl)) == INDIRECT_REF);
1816 addr_of_decl = TREE_OPERAND (DECL_VALUE_EXPR (decl), 0);
1817 }
1818 else
1819 {
1820 mark_addressable (decl);
1821 addr_of_decl = build_fold_addr_expr (decl);
1822 }
1823
1824 gimple *call = gimple_build_call (fn, 2, addr_of_decl,
1825 build_one_cst (TREE_TYPE (addr_of_decl)));
1826 gimplify_seq_add_stmt (seq_p, call);
1827 }
1828
1829 /* Return true if the DECL need to be automaticly initialized by the
1830 compiler. */
1831 static bool
is_var_need_auto_init(tree decl)1832 is_var_need_auto_init (tree decl)
1833 {
1834 if (auto_var_p (decl)
1835 && (TREE_CODE (decl) != VAR_DECL
1836 || !DECL_HARD_REGISTER (decl))
1837 && (flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
1838 && (!lookup_attribute ("uninitialized", DECL_ATTRIBUTES (decl)))
1839 && !OPAQUE_TYPE_P (TREE_TYPE (decl))
1840 && !is_empty_type (TREE_TYPE (decl)))
1841 return true;
1842 return false;
1843 }
1844
1845 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1846 and initialization explicit. */
1847
1848 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p,gimple_seq * seq_p)1849 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1850 {
1851 tree stmt = *stmt_p;
1852 tree decl = DECL_EXPR_DECL (stmt);
1853
1854 *stmt_p = NULL_TREE;
1855
1856 if (TREE_TYPE (decl) == error_mark_node)
1857 return GS_ERROR;
1858
1859 if ((TREE_CODE (decl) == TYPE_DECL
1860 || VAR_P (decl))
1861 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1862 {
1863 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1864 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1865 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1866 }
1867
1868 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1869 in case its size expressions contain problematic nodes like CALL_EXPR. */
1870 if (TREE_CODE (decl) == TYPE_DECL
1871 && DECL_ORIGINAL_TYPE (decl)
1872 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1873 {
1874 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1875 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1876 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1877 }
1878
1879 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1880 {
1881 tree init = DECL_INITIAL (decl);
1882 bool is_vla = false;
1883 /* Check whether a decl has FE created VALUE_EXPR here BEFORE
1884 gimplify_vla_decl creates VALUE_EXPR for a vla decl.
1885 If the decl has VALUE_EXPR that was created by FE (usually
1886 C++FE), it's a proxy varaible, and FE already initialized
1887 the VALUE_EXPR of it, we should not initialize it anymore. */
1888 bool decl_had_value_expr_p = DECL_HAS_VALUE_EXPR_P (decl);
1889
1890 poly_uint64 size;
1891 if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1892 || (!TREE_STATIC (decl)
1893 && flag_stack_check == GENERIC_STACK_CHECK
1894 && maybe_gt (size,
1895 (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1896 {
1897 gimplify_vla_decl (decl, seq_p);
1898 is_vla = true;
1899 }
1900
1901 if (asan_poisoned_variables
1902 && !is_vla
1903 && TREE_ADDRESSABLE (decl)
1904 && !TREE_STATIC (decl)
1905 && !DECL_HAS_VALUE_EXPR_P (decl)
1906 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1907 && dbg_cnt (asan_use_after_scope)
1908 && !gimplify_omp_ctxp
1909 /* GNAT introduces temporaries to hold return values of calls in
1910 initializers of variables defined in other units, so the
1911 declaration of the variable is discarded completely. We do not
1912 want to issue poison calls for such dropped variables. */
1913 && (DECL_SEEN_IN_BIND_EXPR_P (decl)
1914 || (DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)))
1915 {
1916 asan_poisoned_variables->add (decl);
1917 asan_poison_variable (decl, false, seq_p);
1918 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1919 gimplify_ctxp->live_switch_vars->add (decl);
1920 }
1921
1922 /* Some front ends do not explicitly declare all anonymous
1923 artificial variables. We compensate here by declaring the
1924 variables, though it would be better if the front ends would
1925 explicitly declare them. */
1926 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1927 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1928 gimple_add_tmp_var (decl);
1929
1930 if (init && init != error_mark_node)
1931 {
1932 if (!TREE_STATIC (decl))
1933 {
1934 DECL_INITIAL (decl) = NULL_TREE;
1935 init = build2 (INIT_EXPR, void_type_node, decl, init);
1936 gimplify_and_add (init, seq_p);
1937 ggc_free (init);
1938 /* Clear TREE_READONLY if we really have an initialization. */
1939 if (!DECL_INITIAL (decl)
1940 && !omp_privatize_by_reference (decl))
1941 TREE_READONLY (decl) = 0;
1942 }
1943 else
1944 /* We must still examine initializers for static variables
1945 as they may contain a label address. */
1946 walk_tree (&init, force_labels_r, NULL, NULL);
1947 }
1948 /* When there is no explicit initializer, if the user requested,
1949 We should insert an artifical initializer for this automatic
1950 variable. */
1951 else if (is_var_need_auto_init (decl)
1952 && !decl_had_value_expr_p)
1953 {
1954 gimple_add_init_for_auto_var (decl,
1955 flag_auto_var_init,
1956 seq_p);
1957 /* The expanding of a call to the above .DEFERRED_INIT will apply
1958 block initialization to the whole space covered by this variable.
1959 As a result, all the paddings will be initialized to zeroes
1960 for zero initialization and 0xFE byte-repeatable patterns for
1961 pattern initialization.
1962 In order to make the paddings as zeroes for pattern init, We
1963 should add a call to __builtin_clear_padding to clear the
1964 paddings to zero in compatiple with CLANG.
1965 We cannot insert this call if the variable is a gimple register
1966 since __builtin_clear_padding will take the address of the
1967 variable. As a result, if a long double/_Complex long double
1968 variable will spilled into stack later, its padding is 0XFE. */
1969 if (flag_auto_var_init == AUTO_INIT_PATTERN
1970 && !is_gimple_reg (decl)
1971 && clear_padding_type_may_have_padding_p (TREE_TYPE (decl)))
1972 gimple_add_padding_init_for_auto_var (decl, is_vla, seq_p);
1973 }
1974 }
1975
1976 return GS_ALL_DONE;
1977 }
1978
1979 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1980 and replacing the LOOP_EXPR with goto, but if the loop contains an
1981 EXIT_EXPR, we need to append a label for it to jump to. */
1982
1983 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,gimple_seq * pre_p)1984 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1985 {
1986 tree saved_label = gimplify_ctxp->exit_label;
1987 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1988
1989 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1990
1991 gimplify_ctxp->exit_label = NULL_TREE;
1992
1993 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1994
1995 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1996
1997 if (gimplify_ctxp->exit_label)
1998 gimplify_seq_add_stmt (pre_p,
1999 gimple_build_label (gimplify_ctxp->exit_label));
2000
2001 gimplify_ctxp->exit_label = saved_label;
2002
2003 *expr_p = NULL;
2004 return GS_ALL_DONE;
2005 }
2006
2007 /* Gimplify a statement list onto a sequence. These may be created either
2008 by an enlightened front-end, or by shortcut_cond_expr. */
2009
2010 static enum gimplify_status
gimplify_statement_list(tree * expr_p,gimple_seq * pre_p)2011 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
2012 {
2013 tree temp = voidify_wrapper_expr (*expr_p, NULL);
2014
2015 tree_stmt_iterator i = tsi_start (*expr_p);
2016
2017 while (!tsi_end_p (i))
2018 {
2019 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
2020 tsi_delink (&i);
2021 }
2022
2023 if (temp)
2024 {
2025 *expr_p = temp;
2026 return GS_OK;
2027 }
2028
2029 return GS_ALL_DONE;
2030 }
2031
2032
2033 /* Emit warning for the unreachable statment STMT if needed.
2034 Return the gimple itself when the warning is emitted, otherwise
2035 return NULL. */
2036 static gimple *
emit_warn_switch_unreachable(gimple * stmt)2037 emit_warn_switch_unreachable (gimple *stmt)
2038 {
2039 if (gimple_code (stmt) == GIMPLE_GOTO
2040 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
2041 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
2042 /* Don't warn for compiler-generated gotos. These occur
2043 in Duff's devices, for example. */
2044 return NULL;
2045 else if ((flag_auto_var_init > AUTO_INIT_UNINITIALIZED)
2046 && ((gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2047 || (gimple_call_builtin_p (stmt, BUILT_IN_CLEAR_PADDING)
2048 && (bool) TREE_INT_CST_LOW (gimple_call_arg (stmt, 1)))
2049 || (is_gimple_assign (stmt)
2050 && gimple_assign_single_p (stmt)
2051 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2052 && gimple_call_internal_p (
2053 SSA_NAME_DEF_STMT (gimple_assign_rhs1 (stmt)),
2054 IFN_DEFERRED_INIT))))
2055 /* Don't warn for compiler-generated initializations for
2056 -ftrivial-auto-var-init.
2057 There are 3 cases:
2058 case 1: a call to .DEFERRED_INIT;
2059 case 2: a call to __builtin_clear_padding with the 2nd argument is
2060 present and non-zero;
2061 case 3: a gimple assign store right after the call to .DEFERRED_INIT
2062 that has the LHS of .DEFERRED_INIT as the RHS as following:
2063 _1 = .DEFERRED_INIT (4, 2, &"i1"[0]);
2064 i1 = _1. */
2065 return NULL;
2066 else
2067 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
2068 "statement will never be executed");
2069 return stmt;
2070 }
2071
2072 /* Callback for walk_gimple_seq. */
2073
2074 static tree
warn_switch_unreachable_and_auto_init_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2075 warn_switch_unreachable_and_auto_init_r (gimple_stmt_iterator *gsi_p,
2076 bool *handled_ops_p,
2077 struct walk_stmt_info *wi)
2078 {
2079 gimple *stmt = gsi_stmt (*gsi_p);
2080 bool unreachable_issued = wi->info != NULL;
2081
2082 *handled_ops_p = true;
2083 switch (gimple_code (stmt))
2084 {
2085 case GIMPLE_TRY:
2086 /* A compiler-generated cleanup or a user-written try block.
2087 If it's empty, don't dive into it--that would result in
2088 worse location info. */
2089 if (gimple_try_eval (stmt) == NULL)
2090 {
2091 if (warn_switch_unreachable && !unreachable_issued)
2092 wi->info = emit_warn_switch_unreachable (stmt);
2093
2094 /* Stop when auto var init warning is not on. */
2095 if (!warn_trivial_auto_var_init)
2096 return integer_zero_node;
2097 }
2098 /* Fall through. */
2099 case GIMPLE_BIND:
2100 case GIMPLE_CATCH:
2101 case GIMPLE_EH_FILTER:
2102 case GIMPLE_TRANSACTION:
2103 /* Walk the sub-statements. */
2104 *handled_ops_p = false;
2105 break;
2106
2107 case GIMPLE_DEBUG:
2108 /* Ignore these. We may generate them before declarations that
2109 are never executed. If there's something to warn about,
2110 there will be non-debug stmts too, and we'll catch those. */
2111 break;
2112
2113 case GIMPLE_LABEL:
2114 /* Stop till the first Label. */
2115 return integer_zero_node;
2116 case GIMPLE_CALL:
2117 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2118 {
2119 *handled_ops_p = false;
2120 break;
2121 }
2122 if (warn_trivial_auto_var_init
2123 && flag_auto_var_init > AUTO_INIT_UNINITIALIZED
2124 && gimple_call_internal_p (stmt, IFN_DEFERRED_INIT))
2125 {
2126 /* Get the variable name from the 3rd argument of call. */
2127 tree var_name = gimple_call_arg (stmt, 2);
2128 var_name = TREE_OPERAND (TREE_OPERAND (var_name, 0), 0);
2129 const char *var_name_str = TREE_STRING_POINTER (var_name);
2130
2131 warning_at (gimple_location (stmt), OPT_Wtrivial_auto_var_init,
2132 "%qs cannot be initialized with"
2133 "%<-ftrivial-auto-var_init%>",
2134 var_name_str);
2135 break;
2136 }
2137
2138 /* Fall through. */
2139 default:
2140 /* check the first "real" statement (not a decl/lexical scope/...), issue
2141 warning if needed. */
2142 if (warn_switch_unreachable && !unreachable_issued)
2143 wi->info = emit_warn_switch_unreachable (stmt);
2144 /* Stop when auto var init warning is not on. */
2145 if (!warn_trivial_auto_var_init)
2146 return integer_zero_node;
2147 break;
2148 }
2149 return NULL_TREE;
2150 }
2151
2152
2153 /* Possibly warn about unreachable statements between switch's controlling
2154 expression and the first case. Also warn about -ftrivial-auto-var-init
2155 cannot initialize the auto variable under such situation.
2156 SEQ is the body of a switch expression. */
2157
2158 static void
maybe_warn_switch_unreachable_and_auto_init(gimple_seq seq)2159 maybe_warn_switch_unreachable_and_auto_init (gimple_seq seq)
2160 {
2161 if ((!warn_switch_unreachable && !warn_trivial_auto_var_init)
2162 /* This warning doesn't play well with Fortran when optimizations
2163 are on. */
2164 || lang_GNU_Fortran ()
2165 || seq == NULL)
2166 return;
2167
2168 struct walk_stmt_info wi;
2169
2170 memset (&wi, 0, sizeof (wi));
2171 walk_gimple_seq (seq, warn_switch_unreachable_and_auto_init_r, NULL, &wi);
2172 }
2173
2174
2175 /* A label entry that pairs label and a location. */
2176 struct label_entry
2177 {
2178 tree label;
2179 location_t loc;
2180 };
2181
2182 /* Find LABEL in vector of label entries VEC. */
2183
2184 static struct label_entry *
find_label_entry(const auto_vec<struct label_entry> * vec,tree label)2185 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
2186 {
2187 unsigned int i;
2188 struct label_entry *l;
2189
2190 FOR_EACH_VEC_ELT (*vec, i, l)
2191 if (l->label == label)
2192 return l;
2193 return NULL;
2194 }
2195
2196 /* Return true if LABEL, a LABEL_DECL, represents a case label
2197 in a vector of labels CASES. */
2198
2199 static bool
case_label_p(const vec<tree> * cases,tree label)2200 case_label_p (const vec<tree> *cases, tree label)
2201 {
2202 unsigned int i;
2203 tree l;
2204
2205 FOR_EACH_VEC_ELT (*cases, i, l)
2206 if (CASE_LABEL (l) == label)
2207 return true;
2208 return false;
2209 }
2210
2211 /* Find the last nondebug statement in a scope STMT. */
2212
2213 static gimple *
last_stmt_in_scope(gimple * stmt)2214 last_stmt_in_scope (gimple *stmt)
2215 {
2216 if (!stmt)
2217 return NULL;
2218
2219 switch (gimple_code (stmt))
2220 {
2221 case GIMPLE_BIND:
2222 {
2223 gbind *bind = as_a <gbind *> (stmt);
2224 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2225 return last_stmt_in_scope (stmt);
2226 }
2227
2228 case GIMPLE_TRY:
2229 {
2230 gtry *try_stmt = as_a <gtry *> (stmt);
2231 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2232 gimple *last_eval = last_stmt_in_scope (stmt);
2233 if (gimple_stmt_may_fallthru (last_eval)
2234 && (last_eval == NULL
2235 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2236 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2237 {
2238 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2239 return last_stmt_in_scope (stmt);
2240 }
2241 else
2242 return last_eval;
2243 }
2244
2245 case GIMPLE_DEBUG:
2246 gcc_unreachable ();
2247
2248 default:
2249 return stmt;
2250 }
2251 }
2252
2253 /* Collect labels that may fall through into LABELS and return the statement
2254 preceding another case label, or a user-defined label. Store a location
2255 useful to give warnings at *PREVLOC (usually the location of the returned
2256 statement or of its surrounding scope). */
2257
2258 static gimple *
collect_fallthrough_labels(gimple_stmt_iterator * gsi_p,auto_vec<struct label_entry> * labels,location_t * prevloc)2259 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2260 auto_vec <struct label_entry> *labels,
2261 location_t *prevloc)
2262 {
2263 gimple *prev = NULL;
2264
2265 *prevloc = UNKNOWN_LOCATION;
2266 do
2267 {
2268 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2269 {
2270 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2271 which starts on a GIMPLE_SWITCH and ends with a break label.
2272 Handle that as a single statement that can fall through. */
2273 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2274 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2275 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2276 if (last
2277 && gimple_code (first) == GIMPLE_SWITCH
2278 && gimple_code (last) == GIMPLE_LABEL)
2279 {
2280 tree label = gimple_label_label (as_a <glabel *> (last));
2281 if (SWITCH_BREAK_LABEL_P (label))
2282 {
2283 prev = bind;
2284 gsi_next (gsi_p);
2285 continue;
2286 }
2287 }
2288 }
2289 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2290 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2291 {
2292 /* Nested scope. Only look at the last statement of
2293 the innermost scope. */
2294 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2295 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2296 if (last)
2297 {
2298 prev = last;
2299 /* It might be a label without a location. Use the
2300 location of the scope then. */
2301 if (!gimple_has_location (prev))
2302 *prevloc = bind_loc;
2303 }
2304 gsi_next (gsi_p);
2305 continue;
2306 }
2307
2308 /* Ifs are tricky. */
2309 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2310 {
2311 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2312 tree false_lab = gimple_cond_false_label (cond_stmt);
2313 location_t if_loc = gimple_location (cond_stmt);
2314
2315 /* If we have e.g.
2316 if (i > 1) goto <D.2259>; else goto D;
2317 we can't do much with the else-branch. */
2318 if (!DECL_ARTIFICIAL (false_lab))
2319 break;
2320
2321 /* Go on until the false label, then one step back. */
2322 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2323 {
2324 gimple *stmt = gsi_stmt (*gsi_p);
2325 if (gimple_code (stmt) == GIMPLE_LABEL
2326 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2327 break;
2328 }
2329
2330 /* Not found? Oops. */
2331 if (gsi_end_p (*gsi_p))
2332 break;
2333
2334 /* A dead label can't fall through. */
2335 if (!UNUSED_LABEL_P (false_lab))
2336 {
2337 struct label_entry l = { false_lab, if_loc };
2338 labels->safe_push (l);
2339 }
2340
2341 /* Go to the last statement of the then branch. */
2342 gsi_prev (gsi_p);
2343
2344 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2345 <D.1759>:
2346 <stmt>;
2347 goto <D.1761>;
2348 <D.1760>:
2349 */
2350 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2351 && !gimple_has_location (gsi_stmt (*gsi_p)))
2352 {
2353 /* Look at the statement before, it might be
2354 attribute fallthrough, in which case don't warn. */
2355 gsi_prev (gsi_p);
2356 bool fallthru_before_dest
2357 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2358 gsi_next (gsi_p);
2359 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2360 if (!fallthru_before_dest)
2361 {
2362 struct label_entry l = { goto_dest, if_loc };
2363 labels->safe_push (l);
2364 }
2365 }
2366 /* This case is about
2367 if (1 != 0) goto <D.2022>; else goto <D.2023>;
2368 <D.2022>:
2369 n = n + 1; // #1
2370 <D.2023>: // #2
2371 <D.1988>: // #3
2372 where #2 is UNUSED_LABEL_P and we want to warn about #1 falling
2373 through to #3. So set PREV to #1. */
2374 else if (UNUSED_LABEL_P (false_lab))
2375 prev = gsi_stmt (*gsi_p);
2376
2377 /* And move back. */
2378 gsi_next (gsi_p);
2379 }
2380
2381 /* Remember the last statement. Skip labels that are of no interest
2382 to us. */
2383 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2384 {
2385 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2386 if (find_label_entry (labels, label))
2387 prev = gsi_stmt (*gsi_p);
2388 }
2389 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2390 ;
2391 else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2392 ;
2393 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2394 prev = gsi_stmt (*gsi_p);
2395 gsi_next (gsi_p);
2396 }
2397 while (!gsi_end_p (*gsi_p)
2398 /* Stop if we find a case or a user-defined label. */
2399 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2400 || !gimple_has_location (gsi_stmt (*gsi_p))));
2401
2402 if (prev && gimple_has_location (prev))
2403 *prevloc = gimple_location (prev);
2404 return prev;
2405 }
2406
2407 /* Return true if the switch fallthough warning should occur. LABEL is
2408 the label statement that we're falling through to. */
2409
2410 static bool
should_warn_for_implicit_fallthrough(gimple_stmt_iterator * gsi_p,tree label)2411 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2412 {
2413 gimple_stmt_iterator gsi = *gsi_p;
2414
2415 /* Don't warn if the label is marked with a "falls through" comment. */
2416 if (FALLTHROUGH_LABEL_P (label))
2417 return false;
2418
2419 /* Don't warn for non-case labels followed by a statement:
2420 case 0:
2421 foo ();
2422 label:
2423 bar ();
2424 as these are likely intentional. */
2425 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2426 {
2427 tree l;
2428 while (!gsi_end_p (gsi)
2429 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2430 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2431 && !case_label_p (&gimplify_ctxp->case_labels, l))
2432 gsi_next_nondebug (&gsi);
2433 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2434 return false;
2435 }
2436
2437 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2438 immediately breaks. */
2439 gsi = *gsi_p;
2440
2441 /* Skip all immediately following labels. */
2442 while (!gsi_end_p (gsi)
2443 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2444 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2445 gsi_next_nondebug (&gsi);
2446
2447 /* { ... something; default:; } */
2448 if (gsi_end_p (gsi)
2449 /* { ... something; default: break; } or
2450 { ... something; default: goto L; } */
2451 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2452 /* { ... something; default: return; } */
2453 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2454 return false;
2455
2456 return true;
2457 }
2458
2459 /* Callback for walk_gimple_seq. */
2460
2461 static tree
warn_implicit_fallthrough_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info *)2462 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2463 struct walk_stmt_info *)
2464 {
2465 gimple *stmt = gsi_stmt (*gsi_p);
2466
2467 *handled_ops_p = true;
2468 switch (gimple_code (stmt))
2469 {
2470 case GIMPLE_TRY:
2471 case GIMPLE_BIND:
2472 case GIMPLE_CATCH:
2473 case GIMPLE_EH_FILTER:
2474 case GIMPLE_TRANSACTION:
2475 /* Walk the sub-statements. */
2476 *handled_ops_p = false;
2477 break;
2478
2479 /* Find a sequence of form:
2480
2481 GIMPLE_LABEL
2482 [...]
2483 <may fallthru stmt>
2484 GIMPLE_LABEL
2485
2486 and possibly warn. */
2487 case GIMPLE_LABEL:
2488 {
2489 /* Found a label. Skip all immediately following labels. */
2490 while (!gsi_end_p (*gsi_p)
2491 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2492 gsi_next_nondebug (gsi_p);
2493
2494 /* There might be no more statements. */
2495 if (gsi_end_p (*gsi_p))
2496 return integer_zero_node;
2497
2498 /* Vector of labels that fall through. */
2499 auto_vec <struct label_entry> labels;
2500 location_t prevloc;
2501 gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2502
2503 /* There might be no more statements. */
2504 if (gsi_end_p (*gsi_p))
2505 return integer_zero_node;
2506
2507 gimple *next = gsi_stmt (*gsi_p);
2508 tree label;
2509 /* If what follows is a label, then we may have a fallthrough. */
2510 if (gimple_code (next) == GIMPLE_LABEL
2511 && gimple_has_location (next)
2512 && (label = gimple_label_label (as_a <glabel *> (next)))
2513 && prev != NULL)
2514 {
2515 struct label_entry *l;
2516 bool warned_p = false;
2517 auto_diagnostic_group d;
2518 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2519 /* Quiet. */;
2520 else if (gimple_code (prev) == GIMPLE_LABEL
2521 && (label = gimple_label_label (as_a <glabel *> (prev)))
2522 && (l = find_label_entry (&labels, label)))
2523 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2524 "this statement may fall through");
2525 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2526 /* Try to be clever and don't warn when the statement
2527 can't actually fall through. */
2528 && gimple_stmt_may_fallthru (prev)
2529 && prevloc != UNKNOWN_LOCATION)
2530 warned_p = warning_at (prevloc,
2531 OPT_Wimplicit_fallthrough_,
2532 "this statement may fall through");
2533 if (warned_p)
2534 inform (gimple_location (next), "here");
2535
2536 /* Mark this label as processed so as to prevent multiple
2537 warnings in nested switches. */
2538 FALLTHROUGH_LABEL_P (label) = true;
2539
2540 /* So that next warn_implicit_fallthrough_r will start looking for
2541 a new sequence starting with this label. */
2542 gsi_prev (gsi_p);
2543 }
2544 }
2545 break;
2546 default:
2547 break;
2548 }
2549 return NULL_TREE;
2550 }
2551
2552 /* Warn when a switch case falls through. */
2553
2554 static void
maybe_warn_implicit_fallthrough(gimple_seq seq)2555 maybe_warn_implicit_fallthrough (gimple_seq seq)
2556 {
2557 if (!warn_implicit_fallthrough)
2558 return;
2559
2560 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2561 if (!(lang_GNU_C ()
2562 || lang_GNU_CXX ()
2563 || lang_GNU_OBJC ()))
2564 return;
2565
2566 struct walk_stmt_info wi;
2567 memset (&wi, 0, sizeof (wi));
2568 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2569 }
2570
2571 /* Callback for walk_gimple_seq. */
2572
2573 static tree
expand_FALLTHROUGH_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2574 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2575 struct walk_stmt_info *wi)
2576 {
2577 gimple *stmt = gsi_stmt (*gsi_p);
2578
2579 *handled_ops_p = true;
2580 switch (gimple_code (stmt))
2581 {
2582 case GIMPLE_TRY:
2583 case GIMPLE_BIND:
2584 case GIMPLE_CATCH:
2585 case GIMPLE_EH_FILTER:
2586 case GIMPLE_TRANSACTION:
2587 /* Walk the sub-statements. */
2588 *handled_ops_p = false;
2589 break;
2590 case GIMPLE_CALL:
2591 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2592 {
2593 gsi_remove (gsi_p, true);
2594 if (gsi_end_p (*gsi_p))
2595 {
2596 *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2597 return integer_zero_node;
2598 }
2599
2600 bool found = false;
2601 location_t loc = gimple_location (stmt);
2602
2603 gimple_stmt_iterator gsi2 = *gsi_p;
2604 stmt = gsi_stmt (gsi2);
2605 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2606 {
2607 /* Go on until the artificial label. */
2608 tree goto_dest = gimple_goto_dest (stmt);
2609 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2610 {
2611 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2612 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2613 == goto_dest)
2614 break;
2615 }
2616
2617 /* Not found? Stop. */
2618 if (gsi_end_p (gsi2))
2619 break;
2620
2621 /* Look one past it. */
2622 gsi_next (&gsi2);
2623 }
2624
2625 /* We're looking for a case label or default label here. */
2626 while (!gsi_end_p (gsi2))
2627 {
2628 stmt = gsi_stmt (gsi2);
2629 if (gimple_code (stmt) == GIMPLE_LABEL)
2630 {
2631 tree label = gimple_label_label (as_a <glabel *> (stmt));
2632 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2633 {
2634 found = true;
2635 break;
2636 }
2637 }
2638 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2639 ;
2640 else if (!is_gimple_debug (stmt))
2641 /* Anything else is not expected. */
2642 break;
2643 gsi_next (&gsi2);
2644 }
2645 if (!found)
2646 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2647 "a case label or default label");
2648 }
2649 break;
2650 default:
2651 break;
2652 }
2653 return NULL_TREE;
2654 }
2655
2656 /* Expand all FALLTHROUGH () calls in SEQ. */
2657
2658 static void
expand_FALLTHROUGH(gimple_seq * seq_p)2659 expand_FALLTHROUGH (gimple_seq *seq_p)
2660 {
2661 struct walk_stmt_info wi;
2662 location_t loc;
2663 memset (&wi, 0, sizeof (wi));
2664 wi.info = (void *) &loc;
2665 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2666 if (wi.callback_result == integer_zero_node)
2667 /* We've found [[fallthrough]]; at the end of a switch, which the C++
2668 standard says is ill-formed; see [dcl.attr.fallthrough]. */
2669 pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2670 "a case label or default label");
2671 }
2672
2673
2674 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2675 branch to. */
2676
2677 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,gimple_seq * pre_p)2678 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2679 {
2680 tree switch_expr = *expr_p;
2681 gimple_seq switch_body_seq = NULL;
2682 enum gimplify_status ret;
2683 tree index_type = TREE_TYPE (switch_expr);
2684 if (index_type == NULL_TREE)
2685 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2686
2687 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2688 fb_rvalue);
2689 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2690 return ret;
2691
2692 if (SWITCH_BODY (switch_expr))
2693 {
2694 vec<tree> labels;
2695 vec<tree> saved_labels;
2696 hash_set<tree> *saved_live_switch_vars = NULL;
2697 tree default_case = NULL_TREE;
2698 gswitch *switch_stmt;
2699
2700 /* Save old labels, get new ones from body, then restore the old
2701 labels. Save all the things from the switch body to append after. */
2702 saved_labels = gimplify_ctxp->case_labels;
2703 gimplify_ctxp->case_labels.create (8);
2704
2705 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2706 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2707 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2708 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2709 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2710 else
2711 gimplify_ctxp->live_switch_vars = NULL;
2712
2713 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2714 gimplify_ctxp->in_switch_expr = true;
2715
2716 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2717
2718 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2719 maybe_warn_switch_unreachable_and_auto_init (switch_body_seq);
2720 maybe_warn_implicit_fallthrough (switch_body_seq);
2721 /* Only do this for the outermost GIMPLE_SWITCH. */
2722 if (!gimplify_ctxp->in_switch_expr)
2723 expand_FALLTHROUGH (&switch_body_seq);
2724
2725 labels = gimplify_ctxp->case_labels;
2726 gimplify_ctxp->case_labels = saved_labels;
2727
2728 if (gimplify_ctxp->live_switch_vars)
2729 {
2730 gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2731 delete gimplify_ctxp->live_switch_vars;
2732 }
2733 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2734
2735 preprocess_case_label_vec_for_gimple (labels, index_type,
2736 &default_case);
2737
2738 bool add_bind = false;
2739 if (!default_case)
2740 {
2741 glabel *new_default;
2742
2743 default_case
2744 = build_case_label (NULL_TREE, NULL_TREE,
2745 create_artificial_label (UNKNOWN_LOCATION));
2746 if (old_in_switch_expr)
2747 {
2748 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2749 add_bind = true;
2750 }
2751 new_default = gimple_build_label (CASE_LABEL (default_case));
2752 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2753 }
2754 else if (old_in_switch_expr)
2755 {
2756 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2757 if (last && gimple_code (last) == GIMPLE_LABEL)
2758 {
2759 tree label = gimple_label_label (as_a <glabel *> (last));
2760 if (SWITCH_BREAK_LABEL_P (label))
2761 add_bind = true;
2762 }
2763 }
2764
2765 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2766 default_case, labels);
2767 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2768 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2769 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2770 so that we can easily find the start and end of the switch
2771 statement. */
2772 if (add_bind)
2773 {
2774 gimple_seq bind_body = NULL;
2775 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2776 gimple_seq_add_seq (&bind_body, switch_body_seq);
2777 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2778 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2779 gimplify_seq_add_stmt (pre_p, bind);
2780 }
2781 else
2782 {
2783 gimplify_seq_add_stmt (pre_p, switch_stmt);
2784 gimplify_seq_add_seq (pre_p, switch_body_seq);
2785 }
2786 labels.release ();
2787 }
2788 else
2789 gcc_unreachable ();
2790
2791 return GS_ALL_DONE;
2792 }
2793
2794 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2795
2796 static enum gimplify_status
gimplify_label_expr(tree * expr_p,gimple_seq * pre_p)2797 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2798 {
2799 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2800 == current_function_decl);
2801
2802 tree label = LABEL_EXPR_LABEL (*expr_p);
2803 glabel *label_stmt = gimple_build_label (label);
2804 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2805 gimplify_seq_add_stmt (pre_p, label_stmt);
2806
2807 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2808 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2809 NOT_TAKEN));
2810 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2811 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2812 TAKEN));
2813
2814 return GS_ALL_DONE;
2815 }
2816
2817 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2818
2819 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p,gimple_seq * pre_p)2820 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2821 {
2822 struct gimplify_ctx *ctxp;
2823 glabel *label_stmt;
2824
2825 /* Invalid programs can play Duff's Device type games with, for example,
2826 #pragma omp parallel. At least in the C front end, we don't
2827 detect such invalid branches until after gimplification, in the
2828 diagnose_omp_blocks pass. */
2829 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2830 if (ctxp->case_labels.exists ())
2831 break;
2832
2833 tree label = CASE_LABEL (*expr_p);
2834 label_stmt = gimple_build_label (label);
2835 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2836 ctxp->case_labels.safe_push (*expr_p);
2837 gimplify_seq_add_stmt (pre_p, label_stmt);
2838
2839 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2840 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2841 NOT_TAKEN));
2842 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2843 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2844 TAKEN));
2845
2846 return GS_ALL_DONE;
2847 }
2848
2849 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2850 if necessary. */
2851
2852 tree
build_and_jump(tree * label_p)2853 build_and_jump (tree *label_p)
2854 {
2855 if (label_p == NULL)
2856 /* If there's nowhere to jump, just fall through. */
2857 return NULL_TREE;
2858
2859 if (*label_p == NULL_TREE)
2860 {
2861 tree label = create_artificial_label (UNKNOWN_LOCATION);
2862 *label_p = label;
2863 }
2864
2865 return build1 (GOTO_EXPR, void_type_node, *label_p);
2866 }
2867
2868 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2869 This also involves building a label to jump to and communicating it to
2870 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2871
2872 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)2873 gimplify_exit_expr (tree *expr_p)
2874 {
2875 tree cond = TREE_OPERAND (*expr_p, 0);
2876 tree expr;
2877
2878 expr = build_and_jump (&gimplify_ctxp->exit_label);
2879 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2880 *expr_p = expr;
2881
2882 return GS_OK;
2883 }
2884
2885 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2886 different from its canonical type, wrap the whole thing inside a
2887 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2888 type.
2889
2890 The canonical type of a COMPONENT_REF is the type of the field being
2891 referenced--unless the field is a bit-field which can be read directly
2892 in a smaller mode, in which case the canonical type is the
2893 sign-appropriate type corresponding to that mode. */
2894
2895 static void
canonicalize_component_ref(tree * expr_p)2896 canonicalize_component_ref (tree *expr_p)
2897 {
2898 tree expr = *expr_p;
2899 tree type;
2900
2901 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2902
2903 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2904 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2905 else
2906 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2907
2908 /* One could argue that all the stuff below is not necessary for
2909 the non-bitfield case and declare it a FE error if type
2910 adjustment would be needed. */
2911 if (TREE_TYPE (expr) != type)
2912 {
2913 #ifdef ENABLE_TYPES_CHECKING
2914 tree old_type = TREE_TYPE (expr);
2915 #endif
2916 int type_quals;
2917
2918 /* We need to preserve qualifiers and propagate them from
2919 operand 0. */
2920 type_quals = TYPE_QUALS (type)
2921 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2922 if (TYPE_QUALS (type) != type_quals)
2923 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2924
2925 /* Set the type of the COMPONENT_REF to the underlying type. */
2926 TREE_TYPE (expr) = type;
2927
2928 #ifdef ENABLE_TYPES_CHECKING
2929 /* It is now a FE error, if the conversion from the canonical
2930 type to the original expression type is not useless. */
2931 gcc_assert (useless_type_conversion_p (old_type, type));
2932 #endif
2933 }
2934 }
2935
2936 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2937 to foo, embed that change in the ADDR_EXPR by converting
2938 T array[U];
2939 (T *)&array
2940 ==>
2941 &array[L]
2942 where L is the lower bound. For simplicity, only do this for constant
2943 lower bound.
2944 The constraint is that the type of &array[L] is trivially convertible
2945 to T *. */
2946
2947 static void
canonicalize_addr_expr(tree * expr_p)2948 canonicalize_addr_expr (tree *expr_p)
2949 {
2950 tree expr = *expr_p;
2951 tree addr_expr = TREE_OPERAND (expr, 0);
2952 tree datype, ddatype, pddatype;
2953
2954 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2955 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2956 || TREE_CODE (addr_expr) != ADDR_EXPR)
2957 return;
2958
2959 /* The addr_expr type should be a pointer to an array. */
2960 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2961 if (TREE_CODE (datype) != ARRAY_TYPE)
2962 return;
2963
2964 /* The pointer to element type shall be trivially convertible to
2965 the expression pointer type. */
2966 ddatype = TREE_TYPE (datype);
2967 pddatype = build_pointer_type (ddatype);
2968 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2969 pddatype))
2970 return;
2971
2972 /* The lower bound and element sizes must be constant. */
2973 if (!TYPE_SIZE_UNIT (ddatype)
2974 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2975 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2976 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2977 return;
2978
2979 /* All checks succeeded. Build a new node to merge the cast. */
2980 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2982 NULL_TREE, NULL_TREE);
2983 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2984
2985 /* We can have stripped a required restrict qualifier above. */
2986 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2987 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2988 }
2989
2990 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2991 underneath as appropriate. */
2992
2993 static enum gimplify_status
gimplify_conversion(tree * expr_p)2994 gimplify_conversion (tree *expr_p)
2995 {
2996 location_t loc = EXPR_LOCATION (*expr_p);
2997 gcc_assert (CONVERT_EXPR_P (*expr_p));
2998
2999 /* Then strip away all but the outermost conversion. */
3000 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
3001
3002 /* And remove the outermost conversion if it's useless. */
3003 if (tree_ssa_useless_type_conversion (*expr_p))
3004 *expr_p = TREE_OPERAND (*expr_p, 0);
3005
3006 /* If we still have a conversion at the toplevel,
3007 then canonicalize some constructs. */
3008 if (CONVERT_EXPR_P (*expr_p))
3009 {
3010 tree sub = TREE_OPERAND (*expr_p, 0);
3011
3012 /* If a NOP conversion is changing the type of a COMPONENT_REF
3013 expression, then canonicalize its type now in order to expose more
3014 redundant conversions. */
3015 if (TREE_CODE (sub) == COMPONENT_REF)
3016 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
3017
3018 /* If a NOP conversion is changing a pointer to array of foo
3019 to a pointer to foo, embed that change in the ADDR_EXPR. */
3020 else if (TREE_CODE (sub) == ADDR_EXPR)
3021 canonicalize_addr_expr (expr_p);
3022 }
3023
3024 /* If we have a conversion to a non-register type force the
3025 use of a VIEW_CONVERT_EXPR instead. */
3026 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
3027 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
3028 TREE_OPERAND (*expr_p, 0));
3029
3030 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
3031 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
3032 TREE_SET_CODE (*expr_p, NOP_EXPR);
3033
3034 return GS_OK;
3035 }
3036
3037 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
3038 DECL_VALUE_EXPR, and it's worth re-examining things. */
3039
3040 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)3041 gimplify_var_or_parm_decl (tree *expr_p)
3042 {
3043 tree decl = *expr_p;
3044
3045 /* ??? If this is a local variable, and it has not been seen in any
3046 outer BIND_EXPR, then it's probably the result of a duplicate
3047 declaration, for which we've already issued an error. It would
3048 be really nice if the front end wouldn't leak these at all.
3049 Currently the only known culprit is C++ destructors, as seen
3050 in g++.old-deja/g++.jason/binding.C.
3051 Another possible culpit are size expressions for variably modified
3052 types which are lost in the FE or not gimplified correctly. */
3053 if (VAR_P (decl)
3054 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
3055 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
3056 && decl_function_context (decl) == current_function_decl)
3057 {
3058 gcc_assert (seen_error ());
3059 return GS_ERROR;
3060 }
3061
3062 /* When within an OMP context, notice uses of variables. */
3063 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
3064 return GS_ALL_DONE;
3065
3066 /* If the decl is an alias for another expression, substitute it now. */
3067 if (DECL_HAS_VALUE_EXPR_P (decl))
3068 {
3069 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
3070 return GS_OK;
3071 }
3072
3073 return GS_ALL_DONE;
3074 }
3075
3076 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
3077
3078 static void
recalculate_side_effects(tree t)3079 recalculate_side_effects (tree t)
3080 {
3081 enum tree_code code = TREE_CODE (t);
3082 int len = TREE_OPERAND_LENGTH (t);
3083 int i;
3084
3085 switch (TREE_CODE_CLASS (code))
3086 {
3087 case tcc_expression:
3088 switch (code)
3089 {
3090 case INIT_EXPR:
3091 case MODIFY_EXPR:
3092 case VA_ARG_EXPR:
3093 case PREDECREMENT_EXPR:
3094 case PREINCREMENT_EXPR:
3095 case POSTDECREMENT_EXPR:
3096 case POSTINCREMENT_EXPR:
3097 /* All of these have side-effects, no matter what their
3098 operands are. */
3099 return;
3100
3101 default:
3102 break;
3103 }
3104 /* Fall through. */
3105
3106 case tcc_comparison: /* a comparison expression */
3107 case tcc_unary: /* a unary arithmetic expression */
3108 case tcc_binary: /* a binary arithmetic expression */
3109 case tcc_reference: /* a reference */
3110 case tcc_vl_exp: /* a function call */
3111 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
3112 for (i = 0; i < len; ++i)
3113 {
3114 tree op = TREE_OPERAND (t, i);
3115 if (op && TREE_SIDE_EFFECTS (op))
3116 TREE_SIDE_EFFECTS (t) = 1;
3117 }
3118 break;
3119
3120 case tcc_constant:
3121 /* No side-effects. */
3122 return;
3123
3124 default:
3125 gcc_unreachable ();
3126 }
3127 }
3128
3129 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
3130 node *EXPR_P.
3131
3132 compound_lval
3133 : min_lval '[' val ']'
3134 | min_lval '.' ID
3135 | compound_lval '[' val ']'
3136 | compound_lval '.' ID
3137
3138 This is not part of the original SIMPLE definition, which separates
3139 array and member references, but it seems reasonable to handle them
3140 together. Also, this way we don't run into problems with union
3141 aliasing; gcc requires that for accesses through a union to alias, the
3142 union reference must be explicit, which was not always the case when we
3143 were splitting up array and member refs.
3144
3145 PRE_P points to the sequence where side effects that must happen before
3146 *EXPR_P should be stored.
3147
3148 POST_P points to the sequence where side effects that must happen after
3149 *EXPR_P should be stored. */
3150
3151 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,fallback_t fallback)3152 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3153 fallback_t fallback)
3154 {
3155 tree *p;
3156 enum gimplify_status ret = GS_ALL_DONE, tret;
3157 int i;
3158 location_t loc = EXPR_LOCATION (*expr_p);
3159 tree expr = *expr_p;
3160
3161 /* Create a stack of the subexpressions so later we can walk them in
3162 order from inner to outer. */
3163 auto_vec<tree, 10> expr_stack;
3164
3165 /* We can handle anything that get_inner_reference can deal with. */
3166 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
3167 {
3168 restart:
3169 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
3170 if (TREE_CODE (*p) == INDIRECT_REF)
3171 *p = fold_indirect_ref_loc (loc, *p);
3172
3173 if (handled_component_p (*p))
3174 ;
3175 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
3176 additional COMPONENT_REFs. */
3177 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
3178 && gimplify_var_or_parm_decl (p) == GS_OK)
3179 goto restart;
3180 else
3181 break;
3182
3183 expr_stack.safe_push (*p);
3184 }
3185
3186 gcc_assert (expr_stack.length ());
3187
3188 /* Now EXPR_STACK is a stack of pointers to all the refs we've
3189 walked through and P points to the innermost expression.
3190
3191 Java requires that we elaborated nodes in source order. That
3192 means we must gimplify the inner expression followed by each of
3193 the indices, in order. But we can't gimplify the inner
3194 expression until we deal with any variable bounds, sizes, or
3195 positions in order to deal with PLACEHOLDER_EXPRs.
3196
3197 The base expression may contain a statement expression that
3198 has declarations used in size expressions, so has to be
3199 gimplified before gimplifying the size expressions.
3200
3201 So we do this in three steps. First we deal with variable
3202 bounds, sizes, and positions, then we gimplify the base and
3203 ensure it is memory if needed, then we deal with the annotations
3204 for any variables in the components and any indices, from left
3205 to right. */
3206
3207 bool need_non_reg = false;
3208 for (i = expr_stack.length () - 1; i >= 0; i--)
3209 {
3210 tree t = expr_stack[i];
3211
3212 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3213 {
3214 /* Deal with the low bound and element type size and put them into
3215 the ARRAY_REF. If these values are set, they have already been
3216 gimplified. */
3217 if (TREE_OPERAND (t, 2) == NULL_TREE)
3218 {
3219 tree low = unshare_expr (array_ref_low_bound (t));
3220 if (!is_gimple_min_invariant (low))
3221 {
3222 TREE_OPERAND (t, 2) = low;
3223 }
3224 }
3225
3226 if (TREE_OPERAND (t, 3) == NULL_TREE)
3227 {
3228 tree elmt_size = array_ref_element_size (t);
3229 if (!is_gimple_min_invariant (elmt_size))
3230 {
3231 elmt_size = unshare_expr (elmt_size);
3232 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3233 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3234
3235 /* Divide the element size by the alignment of the element
3236 type (above). */
3237 elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3238 elmt_size, factor);
3239
3240 TREE_OPERAND (t, 3) = elmt_size;
3241 }
3242 }
3243 need_non_reg = true;
3244 }
3245 else if (TREE_CODE (t) == COMPONENT_REF)
3246 {
3247 /* Set the field offset into T and gimplify it. */
3248 if (TREE_OPERAND (t, 2) == NULL_TREE)
3249 {
3250 tree offset = component_ref_field_offset (t);
3251 if (!is_gimple_min_invariant (offset))
3252 {
3253 offset = unshare_expr (offset);
3254 tree field = TREE_OPERAND (t, 1);
3255 tree factor
3256 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3257
3258 /* Divide the offset by its alignment. */
3259 offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3260 offset, factor);
3261
3262 TREE_OPERAND (t, 2) = offset;
3263 }
3264 }
3265 need_non_reg = true;
3266 }
3267 }
3268
3269 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
3270 so as to match the min_lval predicate. Failure to do so may result
3271 in the creation of large aggregate temporaries. */
3272 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3273 fallback | fb_lvalue);
3274 ret = MIN (ret, tret);
3275
3276 /* Step 2a: if we have component references we do not support on
3277 registers then make sure the base isn't a register. Of course
3278 we can only do so if an rvalue is OK. */
3279 if (need_non_reg && (fallback & fb_rvalue))
3280 prepare_gimple_addressable (p, pre_p);
3281
3282 /* Step 3: gimplify size expressions and the indices and operands of
3283 ARRAY_REF. During this loop we also remove any useless conversions. */
3284
3285 for (; expr_stack.length () > 0; )
3286 {
3287 tree t = expr_stack.pop ();
3288
3289 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3290 {
3291 /* Gimplify the low bound and element type size. */
3292 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3293 is_gimple_reg, fb_rvalue);
3294 ret = MIN (ret, tret);
3295
3296 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3297 is_gimple_reg, fb_rvalue);
3298 ret = MIN (ret, tret);
3299
3300 /* Gimplify the dimension. */
3301 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3302 is_gimple_val, fb_rvalue);
3303 ret = MIN (ret, tret);
3304 }
3305 else if (TREE_CODE (t) == COMPONENT_REF)
3306 {
3307 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3308 is_gimple_reg, fb_rvalue);
3309 ret = MIN (ret, tret);
3310 }
3311
3312 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3313
3314 /* The innermost expression P may have originally had
3315 TREE_SIDE_EFFECTS set which would have caused all the outer
3316 expressions in *EXPR_P leading to P to also have had
3317 TREE_SIDE_EFFECTS set. */
3318 recalculate_side_effects (t);
3319 }
3320
3321 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3322 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3323 {
3324 canonicalize_component_ref (expr_p);
3325 }
3326
3327 expr_stack.release ();
3328
3329 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3330
3331 return ret;
3332 }
3333
3334 /* Gimplify the self modifying expression pointed to by EXPR_P
3335 (++, --, +=, -=).
3336
3337 PRE_P points to the list where side effects that must happen before
3338 *EXPR_P should be stored.
3339
3340 POST_P points to the list where side effects that must happen after
3341 *EXPR_P should be stored.
3342
3343 WANT_VALUE is nonzero iff we want to use the value of this expression
3344 in another expression.
3345
3346 ARITH_TYPE is the type the computation should be performed in. */
3347
3348 enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,tree arith_type)3349 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3350 bool want_value, tree arith_type)
3351 {
3352 enum tree_code code;
3353 tree lhs, lvalue, rhs, t1;
3354 gimple_seq post = NULL, *orig_post_p = post_p;
3355 bool postfix;
3356 enum tree_code arith_code;
3357 enum gimplify_status ret;
3358 location_t loc = EXPR_LOCATION (*expr_p);
3359
3360 code = TREE_CODE (*expr_p);
3361
3362 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3363 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3364
3365 /* Prefix or postfix? */
3366 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3367 /* Faster to treat as prefix if result is not used. */
3368 postfix = want_value;
3369 else
3370 postfix = false;
3371
3372 /* For postfix, make sure the inner expression's post side effects
3373 are executed after side effects from this expression. */
3374 if (postfix)
3375 post_p = &post;
3376
3377 /* Add or subtract? */
3378 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3379 arith_code = PLUS_EXPR;
3380 else
3381 arith_code = MINUS_EXPR;
3382
3383 /* Gimplify the LHS into a GIMPLE lvalue. */
3384 lvalue = TREE_OPERAND (*expr_p, 0);
3385 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3386 if (ret == GS_ERROR)
3387 return ret;
3388
3389 /* Extract the operands to the arithmetic operation. */
3390 lhs = lvalue;
3391 rhs = TREE_OPERAND (*expr_p, 1);
3392
3393 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3394 that as the result value and in the postqueue operation. */
3395 if (postfix)
3396 {
3397 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3398 if (ret == GS_ERROR)
3399 return ret;
3400
3401 lhs = get_initialized_tmp_var (lhs, pre_p);
3402 }
3403
3404 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3405 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3406 {
3407 rhs = convert_to_ptrofftype_loc (loc, rhs);
3408 if (arith_code == MINUS_EXPR)
3409 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3410 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3411 }
3412 else
3413 t1 = fold_convert (TREE_TYPE (*expr_p),
3414 fold_build2 (arith_code, arith_type,
3415 fold_convert (arith_type, lhs),
3416 fold_convert (arith_type, rhs)));
3417
3418 if (postfix)
3419 {
3420 gimplify_assign (lvalue, t1, pre_p);
3421 gimplify_seq_add_seq (orig_post_p, post);
3422 *expr_p = lhs;
3423 return GS_ALL_DONE;
3424 }
3425 else
3426 {
3427 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3428 return GS_OK;
3429 }
3430 }
3431
3432 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3433
3434 static void
maybe_with_size_expr(tree * expr_p)3435 maybe_with_size_expr (tree *expr_p)
3436 {
3437 tree expr = *expr_p;
3438 tree type = TREE_TYPE (expr);
3439 tree size;
3440
3441 /* If we've already wrapped this or the type is error_mark_node, we can't do
3442 anything. */
3443 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3444 || type == error_mark_node)
3445 return;
3446
3447 /* If the size isn't known or is a constant, we have nothing to do. */
3448 size = TYPE_SIZE_UNIT (type);
3449 if (!size || poly_int_tree_p (size))
3450 return;
3451
3452 /* Otherwise, make a WITH_SIZE_EXPR. */
3453 size = unshare_expr (size);
3454 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3455 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3456 }
3457
3458 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3459 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3460 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3461 gimplified to an SSA name. */
3462
3463 enum gimplify_status
gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location,bool allow_ssa)3464 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3465 bool allow_ssa)
3466 {
3467 bool (*test) (tree);
3468 fallback_t fb;
3469
3470 /* In general, we allow lvalues for function arguments to avoid
3471 extra overhead of copying large aggregates out of even larger
3472 aggregates into temporaries only to copy the temporaries to
3473 the argument list. Make optimizers happy by pulling out to
3474 temporaries those types that fit in registers. */
3475 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3476 test = is_gimple_val, fb = fb_rvalue;
3477 else
3478 {
3479 test = is_gimple_lvalue, fb = fb_either;
3480 /* Also strip a TARGET_EXPR that would force an extra copy. */
3481 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3482 {
3483 tree init = TARGET_EXPR_INITIAL (*arg_p);
3484 if (init
3485 && !VOID_TYPE_P (TREE_TYPE (init)))
3486 *arg_p = init;
3487 }
3488 }
3489
3490 /* If this is a variable sized type, we must remember the size. */
3491 maybe_with_size_expr (arg_p);
3492
3493 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3494 /* Make sure arguments have the same location as the function call
3495 itself. */
3496 protected_set_expr_location (*arg_p, call_location);
3497
3498 /* There is a sequence point before a function call. Side effects in
3499 the argument list must occur before the actual call. So, when
3500 gimplifying arguments, force gimplify_expr to use an internal
3501 post queue which is then appended to the end of PRE_P. */
3502 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3503 }
3504
3505 /* Don't fold inside offloading or taskreg regions: it can break code by
3506 adding decl references that weren't in the source. We'll do it during
3507 omplower pass instead. */
3508
3509 static bool
maybe_fold_stmt(gimple_stmt_iterator * gsi)3510 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3511 {
3512 struct gimplify_omp_ctx *ctx;
3513 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3514 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3515 return false;
3516 else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3517 return false;
3518 /* Delay folding of builtins until the IL is in consistent state
3519 so the diagnostic machinery can do a better job. */
3520 if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3521 return false;
3522 return fold_stmt (gsi);
3523 }
3524
3525 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3526 WANT_VALUE is true if the result of the call is desired. */
3527
3528 static enum gimplify_status
gimplify_call_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)3529 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3530 {
3531 tree fndecl, parms, p, fnptrtype;
3532 enum gimplify_status ret;
3533 int i, nargs;
3534 gcall *call;
3535 bool builtin_va_start_p = false;
3536 location_t loc = EXPR_LOCATION (*expr_p);
3537
3538 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3539
3540 /* For reliable diagnostics during inlining, it is necessary that
3541 every call_expr be annotated with file and line. */
3542 if (! EXPR_HAS_LOCATION (*expr_p))
3543 SET_EXPR_LOCATION (*expr_p, input_location);
3544
3545 /* Gimplify internal functions created in the FEs. */
3546 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3547 {
3548 if (want_value)
3549 return GS_ALL_DONE;
3550
3551 nargs = call_expr_nargs (*expr_p);
3552 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3553 auto_vec<tree> vargs (nargs);
3554
3555 for (i = 0; i < nargs; i++)
3556 {
3557 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3558 EXPR_LOCATION (*expr_p));
3559 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3560 }
3561
3562 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3563 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3564 gimplify_seq_add_stmt (pre_p, call);
3565 return GS_ALL_DONE;
3566 }
3567
3568 /* This may be a call to a builtin function.
3569
3570 Builtin function calls may be transformed into different
3571 (and more efficient) builtin function calls under certain
3572 circumstances. Unfortunately, gimplification can muck things
3573 up enough that the builtin expanders are not aware that certain
3574 transformations are still valid.
3575
3576 So we attempt transformation/gimplification of the call before
3577 we gimplify the CALL_EXPR. At this time we do not manage to
3578 transform all calls in the same manner as the expanders do, but
3579 we do transform most of them. */
3580 fndecl = get_callee_fndecl (*expr_p);
3581 if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3582 switch (DECL_FUNCTION_CODE (fndecl))
3583 {
3584 CASE_BUILT_IN_ALLOCA:
3585 /* If the call has been built for a variable-sized object, then we
3586 want to restore the stack level when the enclosing BIND_EXPR is
3587 exited to reclaim the allocated space; otherwise, we precisely
3588 need to do the opposite and preserve the latest stack level. */
3589 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3590 gimplify_ctxp->save_stack = true;
3591 else
3592 gimplify_ctxp->keep_stack = true;
3593 break;
3594
3595 case BUILT_IN_VA_START:
3596 {
3597 builtin_va_start_p = TRUE;
3598 if (call_expr_nargs (*expr_p) < 2)
3599 {
3600 error ("too few arguments to function %<va_start%>");
3601 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3602 return GS_OK;
3603 }
3604
3605 if (fold_builtin_next_arg (*expr_p, true))
3606 {
3607 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3608 return GS_OK;
3609 }
3610 break;
3611 }
3612
3613 case BUILT_IN_EH_RETURN:
3614 cfun->calls_eh_return = true;
3615 break;
3616
3617 case BUILT_IN_CLEAR_PADDING:
3618 if (call_expr_nargs (*expr_p) == 1)
3619 {
3620 /* Remember the original type of the argument in an internal
3621 dummy second argument, as in GIMPLE pointer conversions are
3622 useless. Also mark this call as not for automatic
3623 initialization in the internal dummy third argument. */
3624 p = CALL_EXPR_ARG (*expr_p, 0);
3625 *expr_p
3626 = build_call_expr_loc (EXPR_LOCATION (*expr_p), fndecl, 2, p,
3627 build_zero_cst (TREE_TYPE (p)));
3628 return GS_OK;
3629 }
3630 break;
3631
3632 default:
3633 ;
3634 }
3635 if (fndecl && fndecl_built_in_p (fndecl))
3636 {
3637 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3638 if (new_tree && new_tree != *expr_p)
3639 {
3640 /* There was a transformation of this call which computes the
3641 same value, but in a more efficient way. Return and try
3642 again. */
3643 *expr_p = new_tree;
3644 return GS_OK;
3645 }
3646 }
3647
3648 /* Remember the original function pointer type. */
3649 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3650
3651 if (flag_openmp
3652 && fndecl
3653 && cfun
3654 && (cfun->curr_properties & PROP_gimple_any) == 0)
3655 {
3656 tree variant = omp_resolve_declare_variant (fndecl);
3657 if (variant != fndecl)
3658 CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3659 }
3660
3661 /* There is a sequence point before the call, so any side effects in
3662 the calling expression must occur before the actual call. Force
3663 gimplify_expr to use an internal post queue. */
3664 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3665 is_gimple_call_addr, fb_rvalue);
3666
3667 nargs = call_expr_nargs (*expr_p);
3668
3669 /* Get argument types for verification. */
3670 fndecl = get_callee_fndecl (*expr_p);
3671 parms = NULL_TREE;
3672 if (fndecl)
3673 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3674 else
3675 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3676
3677 if (fndecl && DECL_ARGUMENTS (fndecl))
3678 p = DECL_ARGUMENTS (fndecl);
3679 else if (parms)
3680 p = parms;
3681 else
3682 p = NULL_TREE;
3683 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3684 ;
3685
3686 /* If the last argument is __builtin_va_arg_pack () and it is not
3687 passed as a named argument, decrease the number of CALL_EXPR
3688 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3689 if (!p
3690 && i < nargs
3691 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3692 {
3693 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3694 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3695
3696 if (last_arg_fndecl
3697 && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3698 {
3699 tree call = *expr_p;
3700
3701 --nargs;
3702 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3703 CALL_EXPR_FN (call),
3704 nargs, CALL_EXPR_ARGP (call));
3705
3706 /* Copy all CALL_EXPR flags, location and block, except
3707 CALL_EXPR_VA_ARG_PACK flag. */
3708 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3709 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3710 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3711 = CALL_EXPR_RETURN_SLOT_OPT (call);
3712 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3713 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3714
3715 /* Set CALL_EXPR_VA_ARG_PACK. */
3716 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3717 }
3718 }
3719
3720 /* If the call returns twice then after building the CFG the call
3721 argument computations will no longer dominate the call because
3722 we add an abnormal incoming edge to the call. So do not use SSA
3723 vars there. */
3724 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3725
3726 /* Gimplify the function arguments. */
3727 if (nargs > 0)
3728 {
3729 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3730 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3731 PUSH_ARGS_REVERSED ? i-- : i++)
3732 {
3733 enum gimplify_status t;
3734
3735 /* Avoid gimplifying the second argument to va_start, which needs to
3736 be the plain PARM_DECL. */
3737 if ((i != 1) || !builtin_va_start_p)
3738 {
3739 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3740 EXPR_LOCATION (*expr_p), ! returns_twice);
3741
3742 if (t == GS_ERROR)
3743 ret = GS_ERROR;
3744 }
3745 }
3746 }
3747
3748 /* Gimplify the static chain. */
3749 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3750 {
3751 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3752 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3753 else
3754 {
3755 enum gimplify_status t;
3756 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3757 EXPR_LOCATION (*expr_p), ! returns_twice);
3758 if (t == GS_ERROR)
3759 ret = GS_ERROR;
3760 }
3761 }
3762
3763 /* Verify the function result. */
3764 if (want_value && fndecl
3765 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3766 {
3767 error_at (loc, "using result of function returning %<void%>");
3768 ret = GS_ERROR;
3769 }
3770
3771 /* Try this again in case gimplification exposed something. */
3772 if (ret != GS_ERROR)
3773 {
3774 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3775
3776 if (new_tree && new_tree != *expr_p)
3777 {
3778 /* There was a transformation of this call which computes the
3779 same value, but in a more efficient way. Return and try
3780 again. */
3781 *expr_p = new_tree;
3782 return GS_OK;
3783 }
3784 }
3785 else
3786 {
3787 *expr_p = error_mark_node;
3788 return GS_ERROR;
3789 }
3790
3791 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3792 decl. This allows us to eliminate redundant or useless
3793 calls to "const" functions. */
3794 if (TREE_CODE (*expr_p) == CALL_EXPR)
3795 {
3796 int flags = call_expr_flags (*expr_p);
3797 if (flags & (ECF_CONST | ECF_PURE)
3798 /* An infinite loop is considered a side effect. */
3799 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3800 TREE_SIDE_EFFECTS (*expr_p) = 0;
3801 }
3802
3803 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3804 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3805 form and delegate the creation of a GIMPLE_CALL to
3806 gimplify_modify_expr. This is always possible because when
3807 WANT_VALUE is true, the caller wants the result of this call into
3808 a temporary, which means that we will emit an INIT_EXPR in
3809 internal_get_tmp_var which will then be handled by
3810 gimplify_modify_expr. */
3811 if (!want_value)
3812 {
3813 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3814 have to do is replicate it as a GIMPLE_CALL tuple. */
3815 gimple_stmt_iterator gsi;
3816 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3817 notice_special_calls (call);
3818 gimplify_seq_add_stmt (pre_p, call);
3819 gsi = gsi_last (*pre_p);
3820 maybe_fold_stmt (&gsi);
3821 *expr_p = NULL_TREE;
3822 }
3823 else
3824 /* Remember the original function type. */
3825 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3826 CALL_EXPR_FN (*expr_p));
3827
3828 return ret;
3829 }
3830
3831 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3832 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3833
3834 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3835 condition is true or false, respectively. If null, we should generate
3836 our own to skip over the evaluation of this specific expression.
3837
3838 LOCUS is the source location of the COND_EXPR.
3839
3840 This function is the tree equivalent of do_jump.
3841
3842 shortcut_cond_r should only be called by shortcut_cond_expr. */
3843
3844 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p,location_t locus)3845 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3846 location_t locus)
3847 {
3848 tree local_label = NULL_TREE;
3849 tree t, expr = NULL;
3850
3851 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3852 retain the shortcut semantics. Just insert the gotos here;
3853 shortcut_cond_expr will append the real blocks later. */
3854 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3855 {
3856 location_t new_locus;
3857
3858 /* Turn if (a && b) into
3859
3860 if (a); else goto no;
3861 if (b) goto yes; else goto no;
3862 (no:) */
3863
3864 if (false_label_p == NULL)
3865 false_label_p = &local_label;
3866
3867 /* Keep the original source location on the first 'if'. */
3868 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3869 append_to_statement_list (t, &expr);
3870
3871 /* Set the source location of the && on the second 'if'. */
3872 new_locus = rexpr_location (pred, locus);
3873 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3874 new_locus);
3875 append_to_statement_list (t, &expr);
3876 }
3877 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3878 {
3879 location_t new_locus;
3880
3881 /* Turn if (a || b) into
3882
3883 if (a) goto yes;
3884 if (b) goto yes; else goto no;
3885 (yes:) */
3886
3887 if (true_label_p == NULL)
3888 true_label_p = &local_label;
3889
3890 /* Keep the original source location on the first 'if'. */
3891 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3892 append_to_statement_list (t, &expr);
3893
3894 /* Set the source location of the || on the second 'if'. */
3895 new_locus = rexpr_location (pred, locus);
3896 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3897 new_locus);
3898 append_to_statement_list (t, &expr);
3899 }
3900 else if (TREE_CODE (pred) == COND_EXPR
3901 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3902 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3903 {
3904 location_t new_locus;
3905
3906 /* As long as we're messing with gotos, turn if (a ? b : c) into
3907 if (a)
3908 if (b) goto yes; else goto no;
3909 else
3910 if (c) goto yes; else goto no;
3911
3912 Don't do this if one of the arms has void type, which can happen
3913 in C++ when the arm is throw. */
3914
3915 /* Keep the original source location on the first 'if'. Set the source
3916 location of the ? on the second 'if'. */
3917 new_locus = rexpr_location (pred, locus);
3918 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3919 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3920 false_label_p, locus),
3921 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3922 false_label_p, new_locus));
3923 }
3924 else
3925 {
3926 expr = build3 (COND_EXPR, void_type_node, pred,
3927 build_and_jump (true_label_p),
3928 build_and_jump (false_label_p));
3929 SET_EXPR_LOCATION (expr, locus);
3930 }
3931
3932 if (local_label)
3933 {
3934 t = build1 (LABEL_EXPR, void_type_node, local_label);
3935 append_to_statement_list (t, &expr);
3936 }
3937
3938 return expr;
3939 }
3940
3941 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3942 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3943 statement, if it is the last one. Otherwise, return NULL. */
3944
3945 static tree
find_goto(tree expr)3946 find_goto (tree expr)
3947 {
3948 if (!expr)
3949 return NULL_TREE;
3950
3951 if (TREE_CODE (expr) == GOTO_EXPR)
3952 return expr;
3953
3954 if (TREE_CODE (expr) != STATEMENT_LIST)
3955 return NULL_TREE;
3956
3957 tree_stmt_iterator i = tsi_start (expr);
3958
3959 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3960 tsi_next (&i);
3961
3962 if (!tsi_one_before_end_p (i))
3963 return NULL_TREE;
3964
3965 return find_goto (tsi_stmt (i));
3966 }
3967
3968 /* Same as find_goto, except that it returns NULL if the destination
3969 is not a LABEL_DECL. */
3970
3971 static inline tree
find_goto_label(tree expr)3972 find_goto_label (tree expr)
3973 {
3974 tree dest = find_goto (expr);
3975 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3976 return dest;
3977 return NULL_TREE;
3978 }
3979
3980 /* Given a conditional expression EXPR with short-circuit boolean
3981 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3982 predicate apart into the equivalent sequence of conditionals. */
3983
3984 static tree
shortcut_cond_expr(tree expr)3985 shortcut_cond_expr (tree expr)
3986 {
3987 tree pred = TREE_OPERAND (expr, 0);
3988 tree then_ = TREE_OPERAND (expr, 1);
3989 tree else_ = TREE_OPERAND (expr, 2);
3990 tree true_label, false_label, end_label, t;
3991 tree *true_label_p;
3992 tree *false_label_p;
3993 bool emit_end, emit_false, jump_over_else;
3994 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3995 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3996
3997 /* First do simple transformations. */
3998 if (!else_se)
3999 {
4000 /* If there is no 'else', turn
4001 if (a && b) then c
4002 into
4003 if (a) if (b) then c. */
4004 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
4005 {
4006 /* Keep the original source location on the first 'if'. */
4007 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4008 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4009 /* Set the source location of the && on the second 'if'. */
4010 if (rexpr_has_location (pred))
4011 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4012 then_ = shortcut_cond_expr (expr);
4013 then_se = then_ && TREE_SIDE_EFFECTS (then_);
4014 pred = TREE_OPERAND (pred, 0);
4015 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
4016 SET_EXPR_LOCATION (expr, locus);
4017 }
4018 }
4019
4020 if (!then_se)
4021 {
4022 /* If there is no 'then', turn
4023 if (a || b); else d
4024 into
4025 if (a); else if (b); else d. */
4026 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
4027 {
4028 /* Keep the original source location on the first 'if'. */
4029 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
4030 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
4031 /* Set the source location of the || on the second 'if'. */
4032 if (rexpr_has_location (pred))
4033 SET_EXPR_LOCATION (expr, rexpr_location (pred));
4034 else_ = shortcut_cond_expr (expr);
4035 else_se = else_ && TREE_SIDE_EFFECTS (else_);
4036 pred = TREE_OPERAND (pred, 0);
4037 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
4038 SET_EXPR_LOCATION (expr, locus);
4039 }
4040 }
4041
4042 /* If we're done, great. */
4043 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
4044 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
4045 return expr;
4046
4047 /* Otherwise we need to mess with gotos. Change
4048 if (a) c; else d;
4049 to
4050 if (a); else goto no;
4051 c; goto end;
4052 no: d; end:
4053 and recursively gimplify the condition. */
4054
4055 true_label = false_label = end_label = NULL_TREE;
4056
4057 /* If our arms just jump somewhere, hijack those labels so we don't
4058 generate jumps to jumps. */
4059
4060 if (tree then_goto = find_goto_label (then_))
4061 {
4062 true_label = GOTO_DESTINATION (then_goto);
4063 then_ = NULL;
4064 then_se = false;
4065 }
4066
4067 if (tree else_goto = find_goto_label (else_))
4068 {
4069 false_label = GOTO_DESTINATION (else_goto);
4070 else_ = NULL;
4071 else_se = false;
4072 }
4073
4074 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
4075 if (true_label)
4076 true_label_p = &true_label;
4077 else
4078 true_label_p = NULL;
4079
4080 /* The 'else' branch also needs a label if it contains interesting code. */
4081 if (false_label || else_se)
4082 false_label_p = &false_label;
4083 else
4084 false_label_p = NULL;
4085
4086 /* If there was nothing else in our arms, just forward the label(s). */
4087 if (!then_se && !else_se)
4088 return shortcut_cond_r (pred, true_label_p, false_label_p,
4089 EXPR_LOC_OR_LOC (expr, input_location));
4090
4091 /* If our last subexpression already has a terminal label, reuse it. */
4092 if (else_se)
4093 t = expr_last (else_);
4094 else if (then_se)
4095 t = expr_last (then_);
4096 else
4097 t = NULL;
4098 if (t && TREE_CODE (t) == LABEL_EXPR)
4099 end_label = LABEL_EXPR_LABEL (t);
4100
4101 /* If we don't care about jumping to the 'else' branch, jump to the end
4102 if the condition is false. */
4103 if (!false_label_p)
4104 false_label_p = &end_label;
4105
4106 /* We only want to emit these labels if we aren't hijacking them. */
4107 emit_end = (end_label == NULL_TREE);
4108 emit_false = (false_label == NULL_TREE);
4109
4110 /* We only emit the jump over the else clause if we have to--if the
4111 then clause may fall through. Otherwise we can wind up with a
4112 useless jump and a useless label at the end of gimplified code,
4113 which will cause us to think that this conditional as a whole
4114 falls through even if it doesn't. If we then inline a function
4115 which ends with such a condition, that can cause us to issue an
4116 inappropriate warning about control reaching the end of a
4117 non-void function. */
4118 jump_over_else = block_may_fallthru (then_);
4119
4120 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
4121 EXPR_LOC_OR_LOC (expr, input_location));
4122
4123 expr = NULL;
4124 append_to_statement_list (pred, &expr);
4125
4126 append_to_statement_list (then_, &expr);
4127 if (else_se)
4128 {
4129 if (jump_over_else)
4130 {
4131 tree last = expr_last (expr);
4132 t = build_and_jump (&end_label);
4133 if (rexpr_has_location (last))
4134 SET_EXPR_LOCATION (t, rexpr_location (last));
4135 append_to_statement_list (t, &expr);
4136 }
4137 if (emit_false)
4138 {
4139 t = build1 (LABEL_EXPR, void_type_node, false_label);
4140 append_to_statement_list (t, &expr);
4141 }
4142 append_to_statement_list (else_, &expr);
4143 }
4144 if (emit_end && end_label)
4145 {
4146 t = build1 (LABEL_EXPR, void_type_node, end_label);
4147 append_to_statement_list (t, &expr);
4148 }
4149
4150 return expr;
4151 }
4152
4153 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
4154
4155 tree
gimple_boolify(tree expr)4156 gimple_boolify (tree expr)
4157 {
4158 tree type = TREE_TYPE (expr);
4159 location_t loc = EXPR_LOCATION (expr);
4160
4161 if (TREE_CODE (expr) == NE_EXPR
4162 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
4163 && integer_zerop (TREE_OPERAND (expr, 1)))
4164 {
4165 tree call = TREE_OPERAND (expr, 0);
4166 tree fn = get_callee_fndecl (call);
4167
4168 /* For __builtin_expect ((long) (x), y) recurse into x as well
4169 if x is truth_value_p. */
4170 if (fn
4171 && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
4172 && call_expr_nargs (call) == 2)
4173 {
4174 tree arg = CALL_EXPR_ARG (call, 0);
4175 if (arg)
4176 {
4177 if (TREE_CODE (arg) == NOP_EXPR
4178 && TREE_TYPE (arg) == TREE_TYPE (call))
4179 arg = TREE_OPERAND (arg, 0);
4180 if (truth_value_p (TREE_CODE (arg)))
4181 {
4182 arg = gimple_boolify (arg);
4183 CALL_EXPR_ARG (call, 0)
4184 = fold_convert_loc (loc, TREE_TYPE (call), arg);
4185 }
4186 }
4187 }
4188 }
4189
4190 switch (TREE_CODE (expr))
4191 {
4192 case TRUTH_AND_EXPR:
4193 case TRUTH_OR_EXPR:
4194 case TRUTH_XOR_EXPR:
4195 case TRUTH_ANDIF_EXPR:
4196 case TRUTH_ORIF_EXPR:
4197 /* Also boolify the arguments of truth exprs. */
4198 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
4199 /* FALLTHRU */
4200
4201 case TRUTH_NOT_EXPR:
4202 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4203
4204 /* These expressions always produce boolean results. */
4205 if (TREE_CODE (type) != BOOLEAN_TYPE)
4206 TREE_TYPE (expr) = boolean_type_node;
4207 return expr;
4208
4209 case ANNOTATE_EXPR:
4210 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
4211 {
4212 case annot_expr_ivdep_kind:
4213 case annot_expr_unroll_kind:
4214 case annot_expr_no_vector_kind:
4215 case annot_expr_vector_kind:
4216 case annot_expr_parallel_kind:
4217 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4218 if (TREE_CODE (type) != BOOLEAN_TYPE)
4219 TREE_TYPE (expr) = boolean_type_node;
4220 return expr;
4221 default:
4222 gcc_unreachable ();
4223 }
4224
4225 default:
4226 if (COMPARISON_CLASS_P (expr))
4227 {
4228 /* There expressions always prduce boolean results. */
4229 if (TREE_CODE (type) != BOOLEAN_TYPE)
4230 TREE_TYPE (expr) = boolean_type_node;
4231 return expr;
4232 }
4233 /* Other expressions that get here must have boolean values, but
4234 might need to be converted to the appropriate mode. */
4235 if (TREE_CODE (type) == BOOLEAN_TYPE)
4236 return expr;
4237 return fold_convert_loc (loc, boolean_type_node, expr);
4238 }
4239 }
4240
4241 /* Given a conditional expression *EXPR_P without side effects, gimplify
4242 its operands. New statements are inserted to PRE_P. */
4243
4244 static enum gimplify_status
gimplify_pure_cond_expr(tree * expr_p,gimple_seq * pre_p)4245 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4246 {
4247 tree expr = *expr_p, cond;
4248 enum gimplify_status ret, tret;
4249 enum tree_code code;
4250
4251 cond = gimple_boolify (COND_EXPR_COND (expr));
4252
4253 /* We need to handle && and || specially, as their gimplification
4254 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
4255 code = TREE_CODE (cond);
4256 if (code == TRUTH_ANDIF_EXPR)
4257 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4258 else if (code == TRUTH_ORIF_EXPR)
4259 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4260 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4261 COND_EXPR_COND (*expr_p) = cond;
4262
4263 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4264 is_gimple_val, fb_rvalue);
4265 ret = MIN (ret, tret);
4266 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4267 is_gimple_val, fb_rvalue);
4268
4269 return MIN (ret, tret);
4270 }
4271
4272 /* Return true if evaluating EXPR could trap.
4273 EXPR is GENERIC, while tree_could_trap_p can be called
4274 only on GIMPLE. */
4275
4276 bool
generic_expr_could_trap_p(tree expr)4277 generic_expr_could_trap_p (tree expr)
4278 {
4279 unsigned i, n;
4280
4281 if (!expr || is_gimple_val (expr))
4282 return false;
4283
4284 if (!EXPR_P (expr) || tree_could_trap_p (expr))
4285 return true;
4286
4287 n = TREE_OPERAND_LENGTH (expr);
4288 for (i = 0; i < n; i++)
4289 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4290 return true;
4291
4292 return false;
4293 }
4294
4295 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4296 into
4297
4298 if (p) if (p)
4299 t1 = a; a;
4300 else or else
4301 t1 = b; b;
4302 t1;
4303
4304 The second form is used when *EXPR_P is of type void.
4305
4306 PRE_P points to the list where side effects that must happen before
4307 *EXPR_P should be stored. */
4308
4309 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,gimple_seq * pre_p,fallback_t fallback)4310 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4311 {
4312 tree expr = *expr_p;
4313 tree type = TREE_TYPE (expr);
4314 location_t loc = EXPR_LOCATION (expr);
4315 tree tmp, arm1, arm2;
4316 enum gimplify_status ret;
4317 tree label_true, label_false, label_cont;
4318 bool have_then_clause_p, have_else_clause_p;
4319 gcond *cond_stmt;
4320 enum tree_code pred_code;
4321 gimple_seq seq = NULL;
4322
4323 /* If this COND_EXPR has a value, copy the values into a temporary within
4324 the arms. */
4325 if (!VOID_TYPE_P (type))
4326 {
4327 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4328 tree result;
4329
4330 /* If either an rvalue is ok or we do not require an lvalue, create the
4331 temporary. But we cannot do that if the type is addressable. */
4332 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4333 && !TREE_ADDRESSABLE (type))
4334 {
4335 if (gimplify_ctxp->allow_rhs_cond_expr
4336 /* If either branch has side effects or could trap, it can't be
4337 evaluated unconditionally. */
4338 && !TREE_SIDE_EFFECTS (then_)
4339 && !generic_expr_could_trap_p (then_)
4340 && !TREE_SIDE_EFFECTS (else_)
4341 && !generic_expr_could_trap_p (else_))
4342 return gimplify_pure_cond_expr (expr_p, pre_p);
4343
4344 tmp = create_tmp_var (type, "iftmp");
4345 result = tmp;
4346 }
4347
4348 /* Otherwise, only create and copy references to the values. */
4349 else
4350 {
4351 type = build_pointer_type (type);
4352
4353 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4354 then_ = build_fold_addr_expr_loc (loc, then_);
4355
4356 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4357 else_ = build_fold_addr_expr_loc (loc, else_);
4358
4359 expr
4360 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4361
4362 tmp = create_tmp_var (type, "iftmp");
4363 result = build_simple_mem_ref_loc (loc, tmp);
4364 }
4365
4366 /* Build the new then clause, `tmp = then_;'. But don't build the
4367 assignment if the value is void; in C++ it can be if it's a throw. */
4368 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4369 TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4370
4371 /* Similarly, build the new else clause, `tmp = else_;'. */
4372 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4373 TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4374
4375 TREE_TYPE (expr) = void_type_node;
4376 recalculate_side_effects (expr);
4377
4378 /* Move the COND_EXPR to the prequeue. */
4379 gimplify_stmt (&expr, pre_p);
4380
4381 *expr_p = result;
4382 return GS_ALL_DONE;
4383 }
4384
4385 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4386 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4387 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4388 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4389
4390 /* Make sure the condition has BOOLEAN_TYPE. */
4391 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4392
4393 /* Break apart && and || conditions. */
4394 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4395 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4396 {
4397 expr = shortcut_cond_expr (expr);
4398
4399 if (expr != *expr_p)
4400 {
4401 *expr_p = expr;
4402
4403 /* We can't rely on gimplify_expr to re-gimplify the expanded
4404 form properly, as cleanups might cause the target labels to be
4405 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4406 set up a conditional context. */
4407 gimple_push_condition ();
4408 gimplify_stmt (expr_p, &seq);
4409 gimple_pop_condition (pre_p);
4410 gimple_seq_add_seq (pre_p, seq);
4411
4412 return GS_ALL_DONE;
4413 }
4414 }
4415
4416 /* Now do the normal gimplification. */
4417
4418 /* Gimplify condition. */
4419 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4420 is_gimple_condexpr_for_cond, fb_rvalue);
4421 if (ret == GS_ERROR)
4422 return GS_ERROR;
4423 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4424
4425 gimple_push_condition ();
4426
4427 have_then_clause_p = have_else_clause_p = false;
4428 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4429 if (label_true
4430 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4431 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4432 have different locations, otherwise we end up with incorrect
4433 location information on the branches. */
4434 && (optimize
4435 || !EXPR_HAS_LOCATION (expr)
4436 || !rexpr_has_location (label_true)
4437 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4438 {
4439 have_then_clause_p = true;
4440 label_true = GOTO_DESTINATION (label_true);
4441 }
4442 else
4443 label_true = create_artificial_label (UNKNOWN_LOCATION);
4444 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4445 if (label_false
4446 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4447 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4448 have different locations, otherwise we end up with incorrect
4449 location information on the branches. */
4450 && (optimize
4451 || !EXPR_HAS_LOCATION (expr)
4452 || !rexpr_has_location (label_false)
4453 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4454 {
4455 have_else_clause_p = true;
4456 label_false = GOTO_DESTINATION (label_false);
4457 }
4458 else
4459 label_false = create_artificial_label (UNKNOWN_LOCATION);
4460
4461 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4462 &arm2);
4463 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4464 label_false);
4465 gimple_set_location (cond_stmt, EXPR_LOCATION (expr));
4466 copy_warning (cond_stmt, COND_EXPR_COND (expr));
4467 gimplify_seq_add_stmt (&seq, cond_stmt);
4468 gimple_stmt_iterator gsi = gsi_last (seq);
4469 maybe_fold_stmt (&gsi);
4470
4471 label_cont = NULL_TREE;
4472 if (!have_then_clause_p)
4473 {
4474 /* For if (...) {} else { code; } put label_true after
4475 the else block. */
4476 if (TREE_OPERAND (expr, 1) == NULL_TREE
4477 && !have_else_clause_p
4478 && TREE_OPERAND (expr, 2) != NULL_TREE)
4479 {
4480 /* For if (0) {} else { code; } tell -Wimplicit-fallthrough
4481 handling that label_cont == label_true can be only reached
4482 through fallthrough from { code; }. */
4483 if (integer_zerop (COND_EXPR_COND (expr)))
4484 UNUSED_LABEL_P (label_true) = 1;
4485 label_cont = label_true;
4486 }
4487 else
4488 {
4489 bool then_side_effects
4490 = (TREE_OPERAND (expr, 1)
4491 && TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)));
4492 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4493 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4494 /* For if (...) { code; } else {} or
4495 if (...) { code; } else goto label; or
4496 if (...) { code; return; } else { ... }
4497 label_cont isn't needed. */
4498 if (!have_else_clause_p
4499 && TREE_OPERAND (expr, 2) != NULL_TREE
4500 && gimple_seq_may_fallthru (seq))
4501 {
4502 gimple *g;
4503 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4504
4505 /* For if (0) { non-side-effect-code } else { code }
4506 tell -Wimplicit-fallthrough handling that label_cont can
4507 be only reached through fallthrough from { code }. */
4508 if (integer_zerop (COND_EXPR_COND (expr)))
4509 {
4510 UNUSED_LABEL_P (label_true) = 1;
4511 if (!then_side_effects)
4512 UNUSED_LABEL_P (label_cont) = 1;
4513 }
4514
4515 g = gimple_build_goto (label_cont);
4516
4517 /* GIMPLE_COND's are very low level; they have embedded
4518 gotos. This particular embedded goto should not be marked
4519 with the location of the original COND_EXPR, as it would
4520 correspond to the COND_EXPR's condition, not the ELSE or the
4521 THEN arms. To avoid marking it with the wrong location, flag
4522 it as "no location". */
4523 gimple_set_do_not_emit_location (g);
4524
4525 gimplify_seq_add_stmt (&seq, g);
4526 }
4527 }
4528 }
4529 if (!have_else_clause_p)
4530 {
4531 /* For if (1) { code } or if (1) { code } else { non-side-effect-code }
4532 tell -Wimplicit-fallthrough handling that label_false can be only
4533 reached through fallthrough from { code }. */
4534 if (integer_nonzerop (COND_EXPR_COND (expr))
4535 && (TREE_OPERAND (expr, 2) == NULL_TREE
4536 || !TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2))))
4537 UNUSED_LABEL_P (label_false) = 1;
4538 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4539 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4540 }
4541 if (label_cont)
4542 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4543
4544 gimple_pop_condition (pre_p);
4545 gimple_seq_add_seq (pre_p, seq);
4546
4547 if (ret == GS_ERROR)
4548 ; /* Do nothing. */
4549 else if (have_then_clause_p || have_else_clause_p)
4550 ret = GS_ALL_DONE;
4551 else
4552 {
4553 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4554 expr = TREE_OPERAND (expr, 0);
4555 gimplify_stmt (&expr, pre_p);
4556 }
4557
4558 *expr_p = NULL;
4559 return ret;
4560 }
4561
4562 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4563 to be marked addressable.
4564
4565 We cannot rely on such an expression being directly markable if a temporary
4566 has been created by the gimplification. In this case, we create another
4567 temporary and initialize it with a copy, which will become a store after we
4568 mark it addressable. This can happen if the front-end passed us something
4569 that it could not mark addressable yet, like a Fortran pass-by-reference
4570 parameter (int) floatvar. */
4571
4572 static void
prepare_gimple_addressable(tree * expr_p,gimple_seq * seq_p)4573 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4574 {
4575 while (handled_component_p (*expr_p))
4576 expr_p = &TREE_OPERAND (*expr_p, 0);
4577 if (is_gimple_reg (*expr_p))
4578 {
4579 /* Do not allow an SSA name as the temporary. */
4580 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4581 DECL_NOT_GIMPLE_REG_P (var) = 1;
4582 *expr_p = var;
4583 }
4584 }
4585
4586 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4587 a call to __builtin_memcpy. */
4588
4589 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4590 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4591 gimple_seq *seq_p)
4592 {
4593 tree t, to, to_ptr, from, from_ptr;
4594 gcall *gs;
4595 location_t loc = EXPR_LOCATION (*expr_p);
4596
4597 to = TREE_OPERAND (*expr_p, 0);
4598 from = TREE_OPERAND (*expr_p, 1);
4599
4600 /* Mark the RHS addressable. Beware that it may not be possible to do so
4601 directly if a temporary has been created by the gimplification. */
4602 prepare_gimple_addressable (&from, seq_p);
4603
4604 mark_addressable (from);
4605 from_ptr = build_fold_addr_expr_loc (loc, from);
4606 gimplify_arg (&from_ptr, seq_p, loc);
4607
4608 mark_addressable (to);
4609 to_ptr = build_fold_addr_expr_loc (loc, to);
4610 gimplify_arg (&to_ptr, seq_p, loc);
4611
4612 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4613
4614 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4615 gimple_call_set_alloca_for_var (gs, true);
4616
4617 if (want_value)
4618 {
4619 /* tmp = memcpy() */
4620 t = create_tmp_var (TREE_TYPE (to_ptr));
4621 gimple_call_set_lhs (gs, t);
4622 gimplify_seq_add_stmt (seq_p, gs);
4623
4624 *expr_p = build_simple_mem_ref (t);
4625 return GS_ALL_DONE;
4626 }
4627
4628 gimplify_seq_add_stmt (seq_p, gs);
4629 *expr_p = NULL;
4630 return GS_ALL_DONE;
4631 }
4632
4633 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4634 a call to __builtin_memset. In this case we know that the RHS is
4635 a CONSTRUCTOR with an empty element list. */
4636
4637 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4638 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4639 gimple_seq *seq_p)
4640 {
4641 tree t, from, to, to_ptr;
4642 gcall *gs;
4643 location_t loc = EXPR_LOCATION (*expr_p);
4644
4645 /* Assert our assumptions, to abort instead of producing wrong code
4646 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4647 not be immediately exposed. */
4648 from = TREE_OPERAND (*expr_p, 1);
4649 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4650 from = TREE_OPERAND (from, 0);
4651
4652 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4653 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4654
4655 /* Now proceed. */
4656 to = TREE_OPERAND (*expr_p, 0);
4657
4658 to_ptr = build_fold_addr_expr_loc (loc, to);
4659 gimplify_arg (&to_ptr, seq_p, loc);
4660 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4661
4662 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4663
4664 if (want_value)
4665 {
4666 /* tmp = memset() */
4667 t = create_tmp_var (TREE_TYPE (to_ptr));
4668 gimple_call_set_lhs (gs, t);
4669 gimplify_seq_add_stmt (seq_p, gs);
4670
4671 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4672 return GS_ALL_DONE;
4673 }
4674
4675 gimplify_seq_add_stmt (seq_p, gs);
4676 *expr_p = NULL;
4677 return GS_ALL_DONE;
4678 }
4679
4680 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4681 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4682 assignment. Return non-null if we detect a potential overlap. */
4683
4684 struct gimplify_init_ctor_preeval_data
4685 {
4686 /* The base decl of the lhs object. May be NULL, in which case we
4687 have to assume the lhs is indirect. */
4688 tree lhs_base_decl;
4689
4690 /* The alias set of the lhs object. */
4691 alias_set_type lhs_alias_set;
4692 };
4693
4694 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)4695 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4696 {
4697 struct gimplify_init_ctor_preeval_data *data
4698 = (struct gimplify_init_ctor_preeval_data *) xdata;
4699 tree t = *tp;
4700
4701 /* If we find the base object, obviously we have overlap. */
4702 if (data->lhs_base_decl == t)
4703 return t;
4704
4705 /* If the constructor component is indirect, determine if we have a
4706 potential overlap with the lhs. The only bits of information we
4707 have to go on at this point are addressability and alias sets. */
4708 if ((INDIRECT_REF_P (t)
4709 || TREE_CODE (t) == MEM_REF)
4710 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4711 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4712 return t;
4713
4714 /* If the constructor component is a call, determine if it can hide a
4715 potential overlap with the lhs through an INDIRECT_REF like above.
4716 ??? Ugh - this is completely broken. In fact this whole analysis
4717 doesn't look conservative. */
4718 if (TREE_CODE (t) == CALL_EXPR)
4719 {
4720 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4721
4722 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4723 if (POINTER_TYPE_P (TREE_VALUE (type))
4724 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4725 && alias_sets_conflict_p (data->lhs_alias_set,
4726 get_alias_set
4727 (TREE_TYPE (TREE_VALUE (type)))))
4728 return t;
4729 }
4730
4731 if (IS_TYPE_OR_DECL_P (t))
4732 *walk_subtrees = 0;
4733 return NULL;
4734 }
4735
4736 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4737 force values that overlap with the lhs (as described by *DATA)
4738 into temporaries. */
4739
4740 static void
gimplify_init_ctor_preeval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,struct gimplify_init_ctor_preeval_data * data)4741 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4742 struct gimplify_init_ctor_preeval_data *data)
4743 {
4744 enum gimplify_status one;
4745
4746 /* If the value is constant, then there's nothing to pre-evaluate. */
4747 if (TREE_CONSTANT (*expr_p))
4748 {
4749 /* Ensure it does not have side effects, it might contain a reference to
4750 the object we're initializing. */
4751 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4752 return;
4753 }
4754
4755 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4756 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4757 return;
4758
4759 /* Recurse for nested constructors. */
4760 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4761 {
4762 unsigned HOST_WIDE_INT ix;
4763 constructor_elt *ce;
4764 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4765
4766 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4767 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4768
4769 return;
4770 }
4771
4772 /* If this is a variable sized type, we must remember the size. */
4773 maybe_with_size_expr (expr_p);
4774
4775 /* Gimplify the constructor element to something appropriate for the rhs
4776 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4777 the gimplifier will consider this a store to memory. Doing this
4778 gimplification now means that we won't have to deal with complicated
4779 language-specific trees, nor trees like SAVE_EXPR that can induce
4780 exponential search behavior. */
4781 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4782 if (one == GS_ERROR)
4783 {
4784 *expr_p = NULL;
4785 return;
4786 }
4787
4788 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4789 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4790 always be true for all scalars, since is_gimple_mem_rhs insists on a
4791 temporary variable for them. */
4792 if (DECL_P (*expr_p))
4793 return;
4794
4795 /* If this is of variable size, we have no choice but to assume it doesn't
4796 overlap since we can't make a temporary for it. */
4797 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4798 return;
4799
4800 /* Otherwise, we must search for overlap ... */
4801 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4802 return;
4803
4804 /* ... and if found, force the value into a temporary. */
4805 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4806 }
4807
4808 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4809 a RANGE_EXPR in a CONSTRUCTOR for an array.
4810
4811 var = lower;
4812 loop_entry:
4813 object[var] = value;
4814 if (var == upper)
4815 goto loop_exit;
4816 var = var + 1;
4817 goto loop_entry;
4818 loop_exit:
4819
4820 We increment var _after_ the loop exit check because we might otherwise
4821 fail if upper == TYPE_MAX_VALUE (type for upper).
4822
4823 Note that we never have to deal with SAVE_EXPRs here, because this has
4824 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4825
4826 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4827 gimple_seq *, bool);
4828
4829 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,gimple_seq * pre_p,bool cleared)4830 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4831 tree value, tree array_elt_type,
4832 gimple_seq *pre_p, bool cleared)
4833 {
4834 tree loop_entry_label, loop_exit_label, fall_thru_label;
4835 tree var, var_type, cref, tmp;
4836
4837 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4838 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4839 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4840
4841 /* Create and initialize the index variable. */
4842 var_type = TREE_TYPE (upper);
4843 var = create_tmp_var (var_type);
4844 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4845
4846 /* Add the loop entry label. */
4847 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4848
4849 /* Build the reference. */
4850 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4851 var, NULL_TREE, NULL_TREE);
4852
4853 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4854 the store. Otherwise just assign value to the reference. */
4855
4856 if (TREE_CODE (value) == CONSTRUCTOR)
4857 /* NB we might have to call ourself recursively through
4858 gimplify_init_ctor_eval if the value is a constructor. */
4859 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4860 pre_p, cleared);
4861 else
4862 {
4863 if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4864 != GS_ERROR)
4865 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4866 }
4867
4868 /* We exit the loop when the index var is equal to the upper bound. */
4869 gimplify_seq_add_stmt (pre_p,
4870 gimple_build_cond (EQ_EXPR, var, upper,
4871 loop_exit_label, fall_thru_label));
4872
4873 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4874
4875 /* Otherwise, increment the index var... */
4876 tmp = build2 (PLUS_EXPR, var_type, var,
4877 fold_convert (var_type, integer_one_node));
4878 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4879
4880 /* ...and jump back to the loop entry. */
4881 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4882
4883 /* Add the loop exit label. */
4884 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4885 }
4886
4887 /* A subroutine of gimplify_init_constructor. Generate individual
4888 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4889 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4890 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4891 zeroed first. */
4892
4893 static void
gimplify_init_ctor_eval(tree object,vec<constructor_elt,va_gc> * elts,gimple_seq * pre_p,bool cleared)4894 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4895 gimple_seq *pre_p, bool cleared)
4896 {
4897 tree array_elt_type = NULL;
4898 unsigned HOST_WIDE_INT ix;
4899 tree purpose, value;
4900
4901 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4902 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4903
4904 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4905 {
4906 tree cref;
4907
4908 /* NULL values are created above for gimplification errors. */
4909 if (value == NULL)
4910 continue;
4911
4912 if (cleared && initializer_zerop (value))
4913 continue;
4914
4915 /* ??? Here's to hoping the front end fills in all of the indices,
4916 so we don't have to figure out what's missing ourselves. */
4917 gcc_assert (purpose);
4918
4919 /* Skip zero-sized fields, unless value has side-effects. This can
4920 happen with calls to functions returning a empty type, which
4921 we shouldn't discard. As a number of downstream passes don't
4922 expect sets of empty type fields, we rely on the gimplification of
4923 the MODIFY_EXPR we make below to drop the assignment statement. */
4924 if (!TREE_SIDE_EFFECTS (value)
4925 && TREE_CODE (purpose) == FIELD_DECL
4926 && is_empty_type (TREE_TYPE (purpose)))
4927 continue;
4928
4929 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4930 whole range. */
4931 if (TREE_CODE (purpose) == RANGE_EXPR)
4932 {
4933 tree lower = TREE_OPERAND (purpose, 0);
4934 tree upper = TREE_OPERAND (purpose, 1);
4935
4936 /* If the lower bound is equal to upper, just treat it as if
4937 upper was the index. */
4938 if (simple_cst_equal (lower, upper))
4939 purpose = upper;
4940 else
4941 {
4942 gimplify_init_ctor_eval_range (object, lower, upper, value,
4943 array_elt_type, pre_p, cleared);
4944 continue;
4945 }
4946 }
4947
4948 if (array_elt_type)
4949 {
4950 /* Do not use bitsizetype for ARRAY_REF indices. */
4951 if (TYPE_DOMAIN (TREE_TYPE (object)))
4952 purpose
4953 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4954 purpose);
4955 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4956 purpose, NULL_TREE, NULL_TREE);
4957 }
4958 else
4959 {
4960 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4961 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4962 unshare_expr (object), purpose, NULL_TREE);
4963 }
4964
4965 if (TREE_CODE (value) == CONSTRUCTOR
4966 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4967 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4968 pre_p, cleared);
4969 else
4970 {
4971 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4972 gimplify_and_add (init, pre_p);
4973 ggc_free (init);
4974 }
4975 }
4976 }
4977
4978 /* Return the appropriate RHS predicate for this LHS. */
4979
4980 gimple_predicate
rhs_predicate_for(tree lhs)4981 rhs_predicate_for (tree lhs)
4982 {
4983 if (is_gimple_reg (lhs))
4984 return is_gimple_reg_rhs_or_call;
4985 else
4986 return is_gimple_mem_rhs_or_call;
4987 }
4988
4989 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4990 before the LHS has been gimplified. */
4991
4992 static gimple_predicate
initial_rhs_predicate_for(tree lhs)4993 initial_rhs_predicate_for (tree lhs)
4994 {
4995 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4996 return is_gimple_reg_rhs_or_call;
4997 else
4998 return is_gimple_mem_rhs_or_call;
4999 }
5000
5001 /* Gimplify a C99 compound literal expression. This just means adding
5002 the DECL_EXPR before the current statement and using its anonymous
5003 decl instead. */
5004
5005 static enum gimplify_status
gimplify_compound_literal_expr(tree * expr_p,gimple_seq * pre_p,bool (* gimple_test_f)(tree),fallback_t fallback)5006 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
5007 bool (*gimple_test_f) (tree),
5008 fallback_t fallback)
5009 {
5010 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
5011 tree decl = DECL_EXPR_DECL (decl_s);
5012 tree init = DECL_INITIAL (decl);
5013 /* Mark the decl as addressable if the compound literal
5014 expression is addressable now, otherwise it is marked too late
5015 after we gimplify the initialization expression. */
5016 if (TREE_ADDRESSABLE (*expr_p))
5017 TREE_ADDRESSABLE (decl) = 1;
5018 /* Otherwise, if we don't need an lvalue and have a literal directly
5019 substitute it. Check if it matches the gimple predicate, as
5020 otherwise we'd generate a new temporary, and we can as well just
5021 use the decl we already have. */
5022 else if (!TREE_ADDRESSABLE (decl)
5023 && !TREE_THIS_VOLATILE (decl)
5024 && init
5025 && (fallback & fb_lvalue) == 0
5026 && gimple_test_f (init))
5027 {
5028 *expr_p = init;
5029 return GS_OK;
5030 }
5031
5032 /* If the decl is not addressable, then it is being used in some
5033 expression or on the right hand side of a statement, and it can
5034 be put into a readonly data section. */
5035 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
5036 TREE_READONLY (decl) = 1;
5037
5038 /* This decl isn't mentioned in the enclosing block, so add it to the
5039 list of temps. FIXME it seems a bit of a kludge to say that
5040 anonymous artificial vars aren't pushed, but everything else is. */
5041 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
5042 gimple_add_tmp_var (decl);
5043
5044 gimplify_and_add (decl_s, pre_p);
5045 *expr_p = decl;
5046 return GS_OK;
5047 }
5048
5049 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
5050 return a new CONSTRUCTOR if something changed. */
5051
5052 static tree
optimize_compound_literals_in_ctor(tree orig_ctor)5053 optimize_compound_literals_in_ctor (tree orig_ctor)
5054 {
5055 tree ctor = orig_ctor;
5056 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
5057 unsigned int idx, num = vec_safe_length (elts);
5058
5059 for (idx = 0; idx < num; idx++)
5060 {
5061 tree value = (*elts)[idx].value;
5062 tree newval = value;
5063 if (TREE_CODE (value) == CONSTRUCTOR)
5064 newval = optimize_compound_literals_in_ctor (value);
5065 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
5066 {
5067 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
5068 tree decl = DECL_EXPR_DECL (decl_s);
5069 tree init = DECL_INITIAL (decl);
5070
5071 if (!TREE_ADDRESSABLE (value)
5072 && !TREE_ADDRESSABLE (decl)
5073 && init
5074 && TREE_CODE (init) == CONSTRUCTOR)
5075 newval = optimize_compound_literals_in_ctor (init);
5076 }
5077 if (newval == value)
5078 continue;
5079
5080 if (ctor == orig_ctor)
5081 {
5082 ctor = copy_node (orig_ctor);
5083 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
5084 elts = CONSTRUCTOR_ELTS (ctor);
5085 }
5086 (*elts)[idx].value = newval;
5087 }
5088 return ctor;
5089 }
5090
5091 /* A subroutine of gimplify_modify_expr. Break out elements of a
5092 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
5093
5094 Note that we still need to clear any elements that don't have explicit
5095 initializers, so if not all elements are initialized we keep the
5096 original MODIFY_EXPR, we just remove all of the constructor elements.
5097
5098 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
5099 GS_ERROR if we would have to create a temporary when gimplifying
5100 this constructor. Otherwise, return GS_OK.
5101
5102 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
5103
5104 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,bool notify_temp_creation)5105 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5106 bool want_value, bool notify_temp_creation)
5107 {
5108 tree object, ctor, type;
5109 enum gimplify_status ret;
5110 vec<constructor_elt, va_gc> *elts;
5111 bool cleared = false;
5112 bool is_empty_ctor = false;
5113 bool is_init_expr = (TREE_CODE (*expr_p) == INIT_EXPR);
5114
5115 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
5116
5117 if (!notify_temp_creation)
5118 {
5119 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5120 is_gimple_lvalue, fb_lvalue);
5121 if (ret == GS_ERROR)
5122 return ret;
5123 }
5124
5125 object = TREE_OPERAND (*expr_p, 0);
5126 ctor = TREE_OPERAND (*expr_p, 1)
5127 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
5128 type = TREE_TYPE (ctor);
5129 elts = CONSTRUCTOR_ELTS (ctor);
5130 ret = GS_ALL_DONE;
5131
5132 switch (TREE_CODE (type))
5133 {
5134 case RECORD_TYPE:
5135 case UNION_TYPE:
5136 case QUAL_UNION_TYPE:
5137 case ARRAY_TYPE:
5138 {
5139 /* Use readonly data for initializers of this or smaller size
5140 regardless of the num_nonzero_elements / num_unique_nonzero_elements
5141 ratio. */
5142 const HOST_WIDE_INT min_unique_size = 64;
5143 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
5144 is smaller than this, use readonly data. */
5145 const int unique_nonzero_ratio = 8;
5146 /* True if a single access of the object must be ensured. This is the
5147 case if the target is volatile, the type is non-addressable and more
5148 than one field need to be assigned. */
5149 const bool ensure_single_access
5150 = TREE_THIS_VOLATILE (object)
5151 && !TREE_ADDRESSABLE (type)
5152 && vec_safe_length (elts) > 1;
5153 struct gimplify_init_ctor_preeval_data preeval_data;
5154 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
5155 HOST_WIDE_INT num_unique_nonzero_elements;
5156 bool complete_p, valid_const_initializer;
5157
5158 /* Aggregate types must lower constructors to initialization of
5159 individual elements. The exception is that a CONSTRUCTOR node
5160 with no elements indicates zero-initialization of the whole. */
5161 if (vec_safe_is_empty (elts))
5162 {
5163 if (notify_temp_creation)
5164 return GS_OK;
5165
5166 /* The var will be initialized and so appear on lhs of
5167 assignment, it can't be TREE_READONLY anymore. */
5168 if (VAR_P (object))
5169 TREE_READONLY (object) = 0;
5170
5171 is_empty_ctor = true;
5172 break;
5173 }
5174
5175 /* Fetch information about the constructor to direct later processing.
5176 We might want to make static versions of it in various cases, and
5177 can only do so if it known to be a valid constant initializer. */
5178 valid_const_initializer
5179 = categorize_ctor_elements (ctor, &num_nonzero_elements,
5180 &num_unique_nonzero_elements,
5181 &num_ctor_elements, &complete_p);
5182
5183 /* If a const aggregate variable is being initialized, then it
5184 should never be a lose to promote the variable to be static. */
5185 if (valid_const_initializer
5186 && num_nonzero_elements > 1
5187 && TREE_READONLY (object)
5188 && VAR_P (object)
5189 && !DECL_REGISTER (object)
5190 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
5191 /* For ctors that have many repeated nonzero elements
5192 represented through RANGE_EXPRs, prefer initializing
5193 those through runtime loops over copies of large amounts
5194 of data from readonly data section. */
5195 && (num_unique_nonzero_elements
5196 > num_nonzero_elements / unique_nonzero_ratio
5197 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
5198 <= (unsigned HOST_WIDE_INT) min_unique_size)))
5199 {
5200 if (notify_temp_creation)
5201 return GS_ERROR;
5202
5203 DECL_INITIAL (object) = ctor;
5204 TREE_STATIC (object) = 1;
5205 if (!DECL_NAME (object))
5206 DECL_NAME (object) = create_tmp_var_name ("C");
5207 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
5208
5209 /* ??? C++ doesn't automatically append a .<number> to the
5210 assembler name, and even when it does, it looks at FE private
5211 data structures to figure out what that number should be,
5212 which are not set for this variable. I suppose this is
5213 important for local statics for inline functions, which aren't
5214 "local" in the object file sense. So in order to get a unique
5215 TU-local symbol, we must invoke the lhd version now. */
5216 lhd_set_decl_assembler_name (object);
5217
5218 *expr_p = NULL_TREE;
5219 break;
5220 }
5221
5222 /* The var will be initialized and so appear on lhs of
5223 assignment, it can't be TREE_READONLY anymore. */
5224 if (VAR_P (object) && !notify_temp_creation)
5225 TREE_READONLY (object) = 0;
5226
5227 /* If there are "lots" of initialized elements, even discounting
5228 those that are not address constants (and thus *must* be
5229 computed at runtime), then partition the constructor into
5230 constant and non-constant parts. Block copy the constant
5231 parts in, then generate code for the non-constant parts. */
5232 /* TODO. There's code in cp/typeck.cc to do this. */
5233
5234 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
5235 /* store_constructor will ignore the clearing of variable-sized
5236 objects. Initializers for such objects must explicitly set
5237 every field that needs to be set. */
5238 cleared = false;
5239 else if (!complete_p)
5240 /* If the constructor isn't complete, clear the whole object
5241 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
5242
5243 ??? This ought not to be needed. For any element not present
5244 in the initializer, we should simply set them to zero. Except
5245 we'd need to *find* the elements that are not present, and that
5246 requires trickery to avoid quadratic compile-time behavior in
5247 large cases or excessive memory use in small cases. */
5248 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
5249 else if (num_ctor_elements - num_nonzero_elements
5250 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
5251 && num_nonzero_elements < num_ctor_elements / 4)
5252 /* If there are "lots" of zeros, it's more efficient to clear
5253 the memory and then set the nonzero elements. */
5254 cleared = true;
5255 else if (ensure_single_access && num_nonzero_elements == 0)
5256 /* If a single access to the target must be ensured and all elements
5257 are zero, then it's optimal to clear whatever their number. */
5258 cleared = true;
5259 else
5260 cleared = false;
5261
5262 /* If there are "lots" of initialized elements, and all of them
5263 are valid address constants, then the entire initializer can
5264 be dropped to memory, and then memcpy'd out. Don't do this
5265 for sparse arrays, though, as it's more efficient to follow
5266 the standard CONSTRUCTOR behavior of memset followed by
5267 individual element initialization. Also don't do this for small
5268 all-zero initializers (which aren't big enough to merit
5269 clearing), and don't try to make bitwise copies of
5270 TREE_ADDRESSABLE types. */
5271 if (valid_const_initializer
5272 && complete_p
5273 && !(cleared || num_nonzero_elements == 0)
5274 && !TREE_ADDRESSABLE (type))
5275 {
5276 HOST_WIDE_INT size = int_size_in_bytes (type);
5277 unsigned int align;
5278
5279 /* ??? We can still get unbounded array types, at least
5280 from the C++ front end. This seems wrong, but attempt
5281 to work around it for now. */
5282 if (size < 0)
5283 {
5284 size = int_size_in_bytes (TREE_TYPE (object));
5285 if (size >= 0)
5286 TREE_TYPE (ctor) = type = TREE_TYPE (object);
5287 }
5288
5289 /* Find the maximum alignment we can assume for the object. */
5290 /* ??? Make use of DECL_OFFSET_ALIGN. */
5291 if (DECL_P (object))
5292 align = DECL_ALIGN (object);
5293 else
5294 align = TYPE_ALIGN (type);
5295
5296 /* Do a block move either if the size is so small as to make
5297 each individual move a sub-unit move on average, or if it
5298 is so large as to make individual moves inefficient. */
5299 if (size > 0
5300 && num_nonzero_elements > 1
5301 /* For ctors that have many repeated nonzero elements
5302 represented through RANGE_EXPRs, prefer initializing
5303 those through runtime loops over copies of large amounts
5304 of data from readonly data section. */
5305 && (num_unique_nonzero_elements
5306 > num_nonzero_elements / unique_nonzero_ratio
5307 || size <= min_unique_size)
5308 && (size < num_nonzero_elements
5309 || !can_move_by_pieces (size, align)))
5310 {
5311 if (notify_temp_creation)
5312 return GS_ERROR;
5313
5314 walk_tree (&ctor, force_labels_r, NULL, NULL);
5315 ctor = tree_output_constant_def (ctor);
5316 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5317 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5318 TREE_OPERAND (*expr_p, 1) = ctor;
5319
5320 /* This is no longer an assignment of a CONSTRUCTOR, but
5321 we still may have processing to do on the LHS. So
5322 pretend we didn't do anything here to let that happen. */
5323 return GS_UNHANDLED;
5324 }
5325 }
5326
5327 /* If a single access to the target must be ensured and there are
5328 nonzero elements or the zero elements are not assigned en masse,
5329 initialize the target from a temporary. */
5330 if (ensure_single_access && (num_nonzero_elements > 0 || !cleared))
5331 {
5332 if (notify_temp_creation)
5333 return GS_ERROR;
5334
5335 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5336 TREE_OPERAND (*expr_p, 0) = temp;
5337 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5338 *expr_p,
5339 build2 (MODIFY_EXPR, void_type_node,
5340 object, temp));
5341 return GS_OK;
5342 }
5343
5344 if (notify_temp_creation)
5345 return GS_OK;
5346
5347 /* If there are nonzero elements and if needed, pre-evaluate to capture
5348 elements overlapping with the lhs into temporaries. We must do this
5349 before clearing to fetch the values before they are zeroed-out. */
5350 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5351 {
5352 preeval_data.lhs_base_decl = get_base_address (object);
5353 if (!DECL_P (preeval_data.lhs_base_decl))
5354 preeval_data.lhs_base_decl = NULL;
5355 preeval_data.lhs_alias_set = get_alias_set (object);
5356
5357 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5358 pre_p, post_p, &preeval_data);
5359 }
5360
5361 bool ctor_has_side_effects_p
5362 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5363
5364 if (cleared)
5365 {
5366 /* Zap the CONSTRUCTOR element list, which simplifies this case.
5367 Note that we still have to gimplify, in order to handle the
5368 case of variable sized types. Avoid shared tree structures. */
5369 CONSTRUCTOR_ELTS (ctor) = NULL;
5370 TREE_SIDE_EFFECTS (ctor) = 0;
5371 object = unshare_expr (object);
5372 gimplify_stmt (expr_p, pre_p);
5373 }
5374
5375 /* If we have not block cleared the object, or if there are nonzero
5376 elements in the constructor, or if the constructor has side effects,
5377 add assignments to the individual scalar fields of the object. */
5378 if (!cleared
5379 || num_nonzero_elements > 0
5380 || ctor_has_side_effects_p)
5381 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5382
5383 *expr_p = NULL_TREE;
5384 }
5385 break;
5386
5387 case COMPLEX_TYPE:
5388 {
5389 tree r, i;
5390
5391 if (notify_temp_creation)
5392 return GS_OK;
5393
5394 /* Extract the real and imaginary parts out of the ctor. */
5395 gcc_assert (elts->length () == 2);
5396 r = (*elts)[0].value;
5397 i = (*elts)[1].value;
5398 if (r == NULL || i == NULL)
5399 {
5400 tree zero = build_zero_cst (TREE_TYPE (type));
5401 if (r == NULL)
5402 r = zero;
5403 if (i == NULL)
5404 i = zero;
5405 }
5406
5407 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5408 represent creation of a complex value. */
5409 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5410 {
5411 ctor = build_complex (type, r, i);
5412 TREE_OPERAND (*expr_p, 1) = ctor;
5413 }
5414 else
5415 {
5416 ctor = build2 (COMPLEX_EXPR, type, r, i);
5417 TREE_OPERAND (*expr_p, 1) = ctor;
5418 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5419 pre_p,
5420 post_p,
5421 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5422 fb_rvalue);
5423 }
5424 }
5425 break;
5426
5427 case VECTOR_TYPE:
5428 {
5429 unsigned HOST_WIDE_INT ix;
5430 constructor_elt *ce;
5431
5432 if (notify_temp_creation)
5433 return GS_OK;
5434
5435 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5436 if (TREE_CONSTANT (ctor))
5437 {
5438 bool constant_p = true;
5439 tree value;
5440
5441 /* Even when ctor is constant, it might contain non-*_CST
5442 elements, such as addresses or trapping values like
5443 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5444 in VECTOR_CST nodes. */
5445 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5446 if (!CONSTANT_CLASS_P (value))
5447 {
5448 constant_p = false;
5449 break;
5450 }
5451
5452 if (constant_p)
5453 {
5454 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5455 break;
5456 }
5457
5458 TREE_CONSTANT (ctor) = 0;
5459 }
5460
5461 /* Vector types use CONSTRUCTOR all the way through gimple
5462 compilation as a general initializer. */
5463 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5464 {
5465 enum gimplify_status tret;
5466 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5467 fb_rvalue);
5468 if (tret == GS_ERROR)
5469 ret = GS_ERROR;
5470 else if (TREE_STATIC (ctor)
5471 && !initializer_constant_valid_p (ce->value,
5472 TREE_TYPE (ce->value)))
5473 TREE_STATIC (ctor) = 0;
5474 }
5475 recompute_constructor_flags (ctor);
5476 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5477 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5478 }
5479 break;
5480
5481 default:
5482 /* So how did we get a CONSTRUCTOR for a scalar type? */
5483 gcc_unreachable ();
5484 }
5485
5486 if (ret == GS_ERROR)
5487 return GS_ERROR;
5488 /* If we have gimplified both sides of the initializer but have
5489 not emitted an assignment, do so now. */
5490 if (*expr_p)
5491 {
5492 tree lhs = TREE_OPERAND (*expr_p, 0);
5493 tree rhs = TREE_OPERAND (*expr_p, 1);
5494 if (want_value && object == lhs)
5495 lhs = unshare_expr (lhs);
5496 gassign *init = gimple_build_assign (lhs, rhs);
5497 gimplify_seq_add_stmt (pre_p, init);
5498 }
5499 if (want_value)
5500 {
5501 *expr_p = object;
5502 ret = GS_OK;
5503 }
5504 else
5505 {
5506 *expr_p = NULL;
5507 ret = GS_ALL_DONE;
5508 }
5509
5510 /* If the user requests to initialize automatic variables, we
5511 should initialize paddings inside the variable. Add a call to
5512 __builtin_clear_pading (&object, 0, for_auto_init = true) to
5513 initialize paddings of object always to zero regardless of
5514 INIT_TYPE. Note, we will not insert this call if the aggregate
5515 variable has be completely cleared already or it's initialized
5516 with an empty constructor. We cannot insert this call if the
5517 variable is a gimple register since __builtin_clear_padding will take
5518 the address of the variable. As a result, if a long double/_Complex long
5519 double variable will be spilled into stack later, its padding cannot
5520 be cleared with __builtin_clear_padding. We should clear its padding
5521 when it is spilled into memory. */
5522 if (is_init_expr
5523 && !is_gimple_reg (object)
5524 && clear_padding_type_may_have_padding_p (type)
5525 && ((AGGREGATE_TYPE_P (type) && !cleared && !is_empty_ctor)
5526 || !AGGREGATE_TYPE_P (type))
5527 && is_var_need_auto_init (object))
5528 gimple_add_padding_init_for_auto_var (object, false, pre_p);
5529
5530 return ret;
5531 }
5532
5533 /* Given a pointer value OP0, return a simplified version of an
5534 indirection through OP0, or NULL_TREE if no simplification is
5535 possible. This may only be applied to a rhs of an expression.
5536 Note that the resulting type may be different from the type pointed
5537 to in the sense that it is still compatible from the langhooks
5538 point of view. */
5539
5540 static tree
gimple_fold_indirect_ref_rhs(tree t)5541 gimple_fold_indirect_ref_rhs (tree t)
5542 {
5543 return gimple_fold_indirect_ref (t);
5544 }
5545
5546 /* Subroutine of gimplify_modify_expr to do simplifications of
5547 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5548 something changes. */
5549
5550 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5551 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5552 gimple_seq *pre_p, gimple_seq *post_p,
5553 bool want_value)
5554 {
5555 enum gimplify_status ret = GS_UNHANDLED;
5556 bool changed;
5557
5558 do
5559 {
5560 changed = false;
5561 switch (TREE_CODE (*from_p))
5562 {
5563 case VAR_DECL:
5564 /* If we're assigning from a read-only variable initialized with
5565 a constructor and not volatile, do the direct assignment from
5566 the constructor, but only if the target is not volatile either
5567 since this latter assignment might end up being done on a per
5568 field basis. However, if the target is volatile and the type
5569 is aggregate and non-addressable, gimplify_init_constructor
5570 knows that it needs to ensure a single access to the target
5571 and it will return GS_OK only in this case. */
5572 if (TREE_READONLY (*from_p)
5573 && DECL_INITIAL (*from_p)
5574 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR
5575 && !TREE_THIS_VOLATILE (*from_p)
5576 && (!TREE_THIS_VOLATILE (*to_p)
5577 || (AGGREGATE_TYPE_P (TREE_TYPE (*to_p))
5578 && !TREE_ADDRESSABLE (TREE_TYPE (*to_p)))))
5579 {
5580 tree old_from = *from_p;
5581 enum gimplify_status subret;
5582
5583 /* Move the constructor into the RHS. */
5584 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5585
5586 /* Let's see if gimplify_init_constructor will need to put
5587 it in memory. */
5588 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5589 false, true);
5590 if (subret == GS_ERROR)
5591 {
5592 /* If so, revert the change. */
5593 *from_p = old_from;
5594 }
5595 else
5596 {
5597 ret = GS_OK;
5598 changed = true;
5599 }
5600 }
5601 break;
5602 case INDIRECT_REF:
5603 {
5604 /* If we have code like
5605
5606 *(const A*)(A*)&x
5607
5608 where the type of "x" is a (possibly cv-qualified variant
5609 of "A"), treat the entire expression as identical to "x".
5610 This kind of code arises in C++ when an object is bound
5611 to a const reference, and if "x" is a TARGET_EXPR we want
5612 to take advantage of the optimization below. */
5613 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5614 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5615 if (t)
5616 {
5617 if (TREE_THIS_VOLATILE (t) != volatile_p)
5618 {
5619 if (DECL_P (t))
5620 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5621 build_fold_addr_expr (t));
5622 if (REFERENCE_CLASS_P (t))
5623 TREE_THIS_VOLATILE (t) = volatile_p;
5624 }
5625 *from_p = t;
5626 ret = GS_OK;
5627 changed = true;
5628 }
5629 break;
5630 }
5631
5632 case TARGET_EXPR:
5633 {
5634 /* If we are initializing something from a TARGET_EXPR, strip the
5635 TARGET_EXPR and initialize it directly, if possible. This can't
5636 be done if the initializer is void, since that implies that the
5637 temporary is set in some non-trivial way.
5638
5639 ??? What about code that pulls out the temp and uses it
5640 elsewhere? I think that such code never uses the TARGET_EXPR as
5641 an initializer. If I'm wrong, we'll die because the temp won't
5642 have any RTL. In that case, I guess we'll need to replace
5643 references somehow. */
5644 tree init = TARGET_EXPR_INITIAL (*from_p);
5645
5646 if (init
5647 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5648 || !TARGET_EXPR_NO_ELIDE (*from_p))
5649 && !VOID_TYPE_P (TREE_TYPE (init)))
5650 {
5651 *from_p = init;
5652 ret = GS_OK;
5653 changed = true;
5654 }
5655 }
5656 break;
5657
5658 case COMPOUND_EXPR:
5659 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5660 caught. */
5661 gimplify_compound_expr (from_p, pre_p, true);
5662 ret = GS_OK;
5663 changed = true;
5664 break;
5665
5666 case CONSTRUCTOR:
5667 /* If we already made some changes, let the front end have a
5668 crack at this before we break it down. */
5669 if (ret != GS_UNHANDLED)
5670 break;
5671
5672 /* If we're initializing from a CONSTRUCTOR, break this into
5673 individual MODIFY_EXPRs. */
5674 ret = gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5675 false);
5676 return ret;
5677
5678 case COND_EXPR:
5679 /* If we're assigning to a non-register type, push the assignment
5680 down into the branches. This is mandatory for ADDRESSABLE types,
5681 since we cannot generate temporaries for such, but it saves a
5682 copy in other cases as well. */
5683 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5684 {
5685 /* This code should mirror the code in gimplify_cond_expr. */
5686 enum tree_code code = TREE_CODE (*expr_p);
5687 tree cond = *from_p;
5688 tree result = *to_p;
5689
5690 ret = gimplify_expr (&result, pre_p, post_p,
5691 is_gimple_lvalue, fb_lvalue);
5692 if (ret != GS_ERROR)
5693 ret = GS_OK;
5694
5695 /* If we are going to write RESULT more than once, clear
5696 TREE_READONLY flag, otherwise we might incorrectly promote
5697 the variable to static const and initialize it at compile
5698 time in one of the branches. */
5699 if (VAR_P (result)
5700 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5701 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5702 TREE_READONLY (result) = 0;
5703 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5704 TREE_OPERAND (cond, 1)
5705 = build2 (code, void_type_node, result,
5706 TREE_OPERAND (cond, 1));
5707 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5708 TREE_OPERAND (cond, 2)
5709 = build2 (code, void_type_node, unshare_expr (result),
5710 TREE_OPERAND (cond, 2));
5711
5712 TREE_TYPE (cond) = void_type_node;
5713 recalculate_side_effects (cond);
5714
5715 if (want_value)
5716 {
5717 gimplify_and_add (cond, pre_p);
5718 *expr_p = unshare_expr (result);
5719 }
5720 else
5721 *expr_p = cond;
5722 return ret;
5723 }
5724 break;
5725
5726 case CALL_EXPR:
5727 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5728 return slot so that we don't generate a temporary. */
5729 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5730 && aggregate_value_p (*from_p, *from_p))
5731 {
5732 bool use_target;
5733
5734 if (!(rhs_predicate_for (*to_p))(*from_p))
5735 /* If we need a temporary, *to_p isn't accurate. */
5736 use_target = false;
5737 /* It's OK to use the return slot directly unless it's an NRV. */
5738 else if (TREE_CODE (*to_p) == RESULT_DECL
5739 && DECL_NAME (*to_p) == NULL_TREE
5740 && needs_to_live_in_memory (*to_p))
5741 use_target = true;
5742 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5743 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5744 /* Don't force regs into memory. */
5745 use_target = false;
5746 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5747 /* It's OK to use the target directly if it's being
5748 initialized. */
5749 use_target = true;
5750 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5751 != INTEGER_CST)
5752 /* Always use the target and thus RSO for variable-sized types.
5753 GIMPLE cannot deal with a variable-sized assignment
5754 embedded in a call statement. */
5755 use_target = true;
5756 else if (TREE_CODE (*to_p) != SSA_NAME
5757 && (!is_gimple_variable (*to_p)
5758 || needs_to_live_in_memory (*to_p)))
5759 /* Don't use the original target if it's already addressable;
5760 if its address escapes, and the called function uses the
5761 NRV optimization, a conforming program could see *to_p
5762 change before the called function returns; see c++/19317.
5763 When optimizing, the return_slot pass marks more functions
5764 as safe after we have escape info. */
5765 use_target = false;
5766 else
5767 use_target = true;
5768
5769 if (use_target)
5770 {
5771 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5772 mark_addressable (*to_p);
5773 }
5774 }
5775 break;
5776
5777 case WITH_SIZE_EXPR:
5778 /* Likewise for calls that return an aggregate of non-constant size,
5779 since we would not be able to generate a temporary at all. */
5780 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5781 {
5782 *from_p = TREE_OPERAND (*from_p, 0);
5783 /* We don't change ret in this case because the
5784 WITH_SIZE_EXPR might have been added in
5785 gimplify_modify_expr, so returning GS_OK would lead to an
5786 infinite loop. */
5787 changed = true;
5788 }
5789 break;
5790
5791 /* If we're initializing from a container, push the initialization
5792 inside it. */
5793 case CLEANUP_POINT_EXPR:
5794 case BIND_EXPR:
5795 case STATEMENT_LIST:
5796 {
5797 tree wrap = *from_p;
5798 tree t;
5799
5800 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5801 fb_lvalue);
5802 if (ret != GS_ERROR)
5803 ret = GS_OK;
5804
5805 t = voidify_wrapper_expr (wrap, *expr_p);
5806 gcc_assert (t == *expr_p);
5807
5808 if (want_value)
5809 {
5810 gimplify_and_add (wrap, pre_p);
5811 *expr_p = unshare_expr (*to_p);
5812 }
5813 else
5814 *expr_p = wrap;
5815 return GS_OK;
5816 }
5817
5818 case NOP_EXPR:
5819 /* Pull out compound literal expressions from a NOP_EXPR.
5820 Those are created in the C FE to drop qualifiers during
5821 lvalue conversion. */
5822 if ((TREE_CODE (TREE_OPERAND (*from_p, 0)) == COMPOUND_LITERAL_EXPR)
5823 && tree_ssa_useless_type_conversion (*from_p))
5824 {
5825 *from_p = TREE_OPERAND (*from_p, 0);
5826 ret = GS_OK;
5827 changed = true;
5828 }
5829 break;
5830
5831 case COMPOUND_LITERAL_EXPR:
5832 {
5833 tree complit = TREE_OPERAND (*expr_p, 1);
5834 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5835 tree decl = DECL_EXPR_DECL (decl_s);
5836 tree init = DECL_INITIAL (decl);
5837
5838 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5839 into struct T x = { 0, 1, 2 } if the address of the
5840 compound literal has never been taken. */
5841 if (!TREE_ADDRESSABLE (complit)
5842 && !TREE_ADDRESSABLE (decl)
5843 && init)
5844 {
5845 *expr_p = copy_node (*expr_p);
5846 TREE_OPERAND (*expr_p, 1) = init;
5847 return GS_OK;
5848 }
5849 }
5850
5851 default:
5852 break;
5853 }
5854 }
5855 while (changed);
5856
5857 return ret;
5858 }
5859
5860
5861 /* Return true if T looks like a valid GIMPLE statement. */
5862
5863 static bool
is_gimple_stmt(tree t)5864 is_gimple_stmt (tree t)
5865 {
5866 const enum tree_code code = TREE_CODE (t);
5867
5868 switch (code)
5869 {
5870 case NOP_EXPR:
5871 /* The only valid NOP_EXPR is the empty statement. */
5872 return IS_EMPTY_STMT (t);
5873
5874 case BIND_EXPR:
5875 case COND_EXPR:
5876 /* These are only valid if they're void. */
5877 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5878
5879 case SWITCH_EXPR:
5880 case GOTO_EXPR:
5881 case RETURN_EXPR:
5882 case LABEL_EXPR:
5883 case CASE_LABEL_EXPR:
5884 case TRY_CATCH_EXPR:
5885 case TRY_FINALLY_EXPR:
5886 case EH_FILTER_EXPR:
5887 case CATCH_EXPR:
5888 case ASM_EXPR:
5889 case STATEMENT_LIST:
5890 case OACC_PARALLEL:
5891 case OACC_KERNELS:
5892 case OACC_SERIAL:
5893 case OACC_DATA:
5894 case OACC_HOST_DATA:
5895 case OACC_DECLARE:
5896 case OACC_UPDATE:
5897 case OACC_ENTER_DATA:
5898 case OACC_EXIT_DATA:
5899 case OACC_CACHE:
5900 case OMP_PARALLEL:
5901 case OMP_FOR:
5902 case OMP_SIMD:
5903 case OMP_DISTRIBUTE:
5904 case OMP_LOOP:
5905 case OACC_LOOP:
5906 case OMP_SCAN:
5907 case OMP_SCOPE:
5908 case OMP_SECTIONS:
5909 case OMP_SECTION:
5910 case OMP_SINGLE:
5911 case OMP_MASTER:
5912 case OMP_MASKED:
5913 case OMP_TASKGROUP:
5914 case OMP_ORDERED:
5915 case OMP_CRITICAL:
5916 case OMP_TASK:
5917 case OMP_TARGET:
5918 case OMP_TARGET_DATA:
5919 case OMP_TARGET_UPDATE:
5920 case OMP_TARGET_ENTER_DATA:
5921 case OMP_TARGET_EXIT_DATA:
5922 case OMP_TASKLOOP:
5923 case OMP_TEAMS:
5924 /* These are always void. */
5925 return true;
5926
5927 case CALL_EXPR:
5928 case MODIFY_EXPR:
5929 case PREDICT_EXPR:
5930 /* These are valid regardless of their type. */
5931 return true;
5932
5933 default:
5934 return false;
5935 }
5936 }
5937
5938
5939 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5940 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a gimple register.
5941
5942 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5943 other, unmodified part of the complex object just before the total store.
5944 As a consequence, if the object is still uninitialized, an undefined value
5945 will be loaded into a register, which may result in a spurious exception
5946 if the register is floating-point and the value happens to be a signaling
5947 NaN for example. Then the fully-fledged complex operations lowering pass
5948 followed by a DCE pass are necessary in order to fix things up. */
5949
5950 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,gimple_seq * pre_p,bool want_value)5951 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5952 bool want_value)
5953 {
5954 enum tree_code code, ocode;
5955 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5956
5957 lhs = TREE_OPERAND (*expr_p, 0);
5958 rhs = TREE_OPERAND (*expr_p, 1);
5959 code = TREE_CODE (lhs);
5960 lhs = TREE_OPERAND (lhs, 0);
5961
5962 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5963 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5964 suppress_warning (other);
5965 other = get_formal_tmp_var (other, pre_p);
5966
5967 realpart = code == REALPART_EXPR ? rhs : other;
5968 imagpart = code == REALPART_EXPR ? other : rhs;
5969
5970 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5971 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5972 else
5973 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5974
5975 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5976 *expr_p = (want_value) ? rhs : NULL_TREE;
5977
5978 return GS_ALL_DONE;
5979 }
5980
5981 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5982
5983 modify_expr
5984 : varname '=' rhs
5985 | '*' ID '=' rhs
5986
5987 PRE_P points to the list where side effects that must happen before
5988 *EXPR_P should be stored.
5989
5990 POST_P points to the list where side effects that must happen after
5991 *EXPR_P should be stored.
5992
5993 WANT_VALUE is nonzero iff we want to use the value of this expression
5994 in another expression. */
5995
5996 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5997 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5998 bool want_value)
5999 {
6000 tree *from_p = &TREE_OPERAND (*expr_p, 1);
6001 tree *to_p = &TREE_OPERAND (*expr_p, 0);
6002 enum gimplify_status ret = GS_UNHANDLED;
6003 gimple *assign;
6004 location_t loc = EXPR_LOCATION (*expr_p);
6005 gimple_stmt_iterator gsi;
6006
6007 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
6008 || TREE_CODE (*expr_p) == INIT_EXPR);
6009
6010 /* Trying to simplify a clobber using normal logic doesn't work,
6011 so handle it here. */
6012 if (TREE_CLOBBER_P (*from_p))
6013 {
6014 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6015 if (ret == GS_ERROR)
6016 return ret;
6017 gcc_assert (!want_value);
6018 if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
6019 {
6020 tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
6021 pre_p, post_p);
6022 *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
6023 }
6024 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
6025 *expr_p = NULL;
6026 return GS_ALL_DONE;
6027 }
6028
6029 /* Insert pointer conversions required by the middle-end that are not
6030 required by the frontend. This fixes middle-end type checking for
6031 for example gcc.dg/redecl-6.c. */
6032 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
6033 {
6034 STRIP_USELESS_TYPE_CONVERSION (*from_p);
6035 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
6036 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
6037 }
6038
6039 /* See if any simplifications can be done based on what the RHS is. */
6040 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6041 want_value);
6042 if (ret != GS_UNHANDLED)
6043 return ret;
6044
6045 /* For empty types only gimplify the left hand side and right hand
6046 side as statements and throw away the assignment. Do this after
6047 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
6048 types properly. */
6049 if (is_empty_type (TREE_TYPE (*from_p))
6050 && !want_value
6051 /* Don't do this for calls that return addressable types, expand_call
6052 relies on those having a lhs. */
6053 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
6054 && TREE_CODE (*from_p) == CALL_EXPR))
6055 {
6056 gimplify_stmt (from_p, pre_p);
6057 gimplify_stmt (to_p, pre_p);
6058 *expr_p = NULL_TREE;
6059 return GS_ALL_DONE;
6060 }
6061
6062 /* If the value being copied is of variable width, compute the length
6063 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
6064 before gimplifying any of the operands so that we can resolve any
6065 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
6066 the size of the expression to be copied, not of the destination, so
6067 that is what we must do here. */
6068 maybe_with_size_expr (from_p);
6069
6070 /* As a special case, we have to temporarily allow for assignments
6071 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
6072 a toplevel statement, when gimplifying the GENERIC expression
6073 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
6074 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
6075
6076 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
6077 prevent gimplify_expr from trying to create a new temporary for
6078 foo's LHS, we tell it that it should only gimplify until it
6079 reaches the CALL_EXPR. On return from gimplify_expr, the newly
6080 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
6081 and all we need to do here is set 'a' to be its LHS. */
6082
6083 /* Gimplify the RHS first for C++17 and bug 71104. */
6084 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
6085 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
6086 if (ret == GS_ERROR)
6087 return ret;
6088
6089 /* Then gimplify the LHS. */
6090 /* If we gimplified the RHS to a CALL_EXPR and that call may return
6091 twice we have to make sure to gimplify into non-SSA as otherwise
6092 the abnormal edge added later will make those defs not dominate
6093 their uses.
6094 ??? Technically this applies only to the registers used in the
6095 resulting non-register *TO_P. */
6096 bool saved_into_ssa = gimplify_ctxp->into_ssa;
6097 if (saved_into_ssa
6098 && TREE_CODE (*from_p) == CALL_EXPR
6099 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
6100 gimplify_ctxp->into_ssa = false;
6101 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
6102 gimplify_ctxp->into_ssa = saved_into_ssa;
6103 if (ret == GS_ERROR)
6104 return ret;
6105
6106 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
6107 guess for the predicate was wrong. */
6108 gimple_predicate final_pred = rhs_predicate_for (*to_p);
6109 if (final_pred != initial_pred)
6110 {
6111 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
6112 if (ret == GS_ERROR)
6113 return ret;
6114 }
6115
6116 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
6117 size as argument to the call. */
6118 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6119 {
6120 tree call = TREE_OPERAND (*from_p, 0);
6121 tree vlasize = TREE_OPERAND (*from_p, 1);
6122
6123 if (TREE_CODE (call) == CALL_EXPR
6124 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
6125 {
6126 int nargs = call_expr_nargs (call);
6127 tree type = TREE_TYPE (call);
6128 tree ap = CALL_EXPR_ARG (call, 0);
6129 tree tag = CALL_EXPR_ARG (call, 1);
6130 tree aptag = CALL_EXPR_ARG (call, 2);
6131 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
6132 IFN_VA_ARG, type,
6133 nargs + 1, ap, tag,
6134 aptag, vlasize);
6135 TREE_OPERAND (*from_p, 0) = newcall;
6136 }
6137 }
6138
6139 /* Now see if the above changed *from_p to something we handle specially. */
6140 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
6141 want_value);
6142 if (ret != GS_UNHANDLED)
6143 return ret;
6144
6145 /* If we've got a variable sized assignment between two lvalues (i.e. does
6146 not involve a call), then we can make things a bit more straightforward
6147 by converting the assignment to memcpy or memset. */
6148 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
6149 {
6150 tree from = TREE_OPERAND (*from_p, 0);
6151 tree size = TREE_OPERAND (*from_p, 1);
6152
6153 if (TREE_CODE (from) == CONSTRUCTOR)
6154 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
6155
6156 if (is_gimple_addressable (from))
6157 {
6158 *from_p = from;
6159 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
6160 pre_p);
6161 }
6162 }
6163
6164 /* Transform partial stores to non-addressable complex variables into
6165 total stores. This allows us to use real instead of virtual operands
6166 for these variables, which improves optimization. */
6167 if ((TREE_CODE (*to_p) == REALPART_EXPR
6168 || TREE_CODE (*to_p) == IMAGPART_EXPR)
6169 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
6170 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
6171
6172 /* Try to alleviate the effects of the gimplification creating artificial
6173 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
6174 make sure not to create DECL_DEBUG_EXPR links across functions. */
6175 if (!gimplify_ctxp->into_ssa
6176 && VAR_P (*from_p)
6177 && DECL_IGNORED_P (*from_p)
6178 && DECL_P (*to_p)
6179 && !DECL_IGNORED_P (*to_p)
6180 && decl_function_context (*to_p) == current_function_decl
6181 && decl_function_context (*from_p) == current_function_decl)
6182 {
6183 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
6184 DECL_NAME (*from_p)
6185 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
6186 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
6187 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
6188 }
6189
6190 if (want_value && TREE_THIS_VOLATILE (*to_p))
6191 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
6192
6193 if (TREE_CODE (*from_p) == CALL_EXPR)
6194 {
6195 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
6196 instead of a GIMPLE_ASSIGN. */
6197 gcall *call_stmt;
6198 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
6199 {
6200 /* Gimplify internal functions created in the FEs. */
6201 int nargs = call_expr_nargs (*from_p), i;
6202 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
6203 auto_vec<tree> vargs (nargs);
6204
6205 for (i = 0; i < nargs; i++)
6206 {
6207 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
6208 EXPR_LOCATION (*from_p));
6209 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
6210 }
6211 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
6212 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
6213 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
6214 }
6215 else
6216 {
6217 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
6218 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
6219 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
6220 tree fndecl = get_callee_fndecl (*from_p);
6221 if (fndecl
6222 && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
6223 && call_expr_nargs (*from_p) == 3)
6224 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
6225 CALL_EXPR_ARG (*from_p, 0),
6226 CALL_EXPR_ARG (*from_p, 1),
6227 CALL_EXPR_ARG (*from_p, 2));
6228 else
6229 {
6230 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
6231 }
6232 }
6233 notice_special_calls (call_stmt);
6234 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
6235 gimple_call_set_lhs (call_stmt, *to_p);
6236 else if (TREE_CODE (*to_p) == SSA_NAME)
6237 /* The above is somewhat premature, avoid ICEing later for a
6238 SSA name w/o a definition. We may have uses in the GIMPLE IL.
6239 ??? This doesn't make it a default-def. */
6240 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
6241
6242 assign = call_stmt;
6243 }
6244 else
6245 {
6246 assign = gimple_build_assign (*to_p, *from_p);
6247 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
6248 if (COMPARISON_CLASS_P (*from_p))
6249 copy_warning (assign, *from_p);
6250 }
6251
6252 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
6253 {
6254 /* We should have got an SSA name from the start. */
6255 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
6256 || ! gimple_in_ssa_p (cfun));
6257 }
6258
6259 gimplify_seq_add_stmt (pre_p, assign);
6260 gsi = gsi_last (*pre_p);
6261 maybe_fold_stmt (&gsi);
6262
6263 if (want_value)
6264 {
6265 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
6266 return GS_OK;
6267 }
6268 else
6269 *expr_p = NULL;
6270
6271 return GS_ALL_DONE;
6272 }
6273
6274 /* Gimplify a comparison between two variable-sized objects. Do this
6275 with a call to BUILT_IN_MEMCMP. */
6276
6277 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)6278 gimplify_variable_sized_compare (tree *expr_p)
6279 {
6280 location_t loc = EXPR_LOCATION (*expr_p);
6281 tree op0 = TREE_OPERAND (*expr_p, 0);
6282 tree op1 = TREE_OPERAND (*expr_p, 1);
6283 tree t, arg, dest, src, expr;
6284
6285 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
6286 arg = unshare_expr (arg);
6287 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
6288 src = build_fold_addr_expr_loc (loc, op1);
6289 dest = build_fold_addr_expr_loc (loc, op0);
6290 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
6291 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
6292
6293 expr
6294 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
6295 SET_EXPR_LOCATION (expr, loc);
6296 *expr_p = expr;
6297
6298 return GS_OK;
6299 }
6300
6301 /* Gimplify a comparison between two aggregate objects of integral scalar
6302 mode as a comparison between the bitwise equivalent scalar values. */
6303
6304 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)6305 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
6306 {
6307 location_t loc = EXPR_LOCATION (*expr_p);
6308 tree op0 = TREE_OPERAND (*expr_p, 0);
6309 tree op1 = TREE_OPERAND (*expr_p, 1);
6310
6311 tree type = TREE_TYPE (op0);
6312 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6313
6314 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6315 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6316
6317 *expr_p
6318 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6319
6320 return GS_OK;
6321 }
6322
6323 /* Gimplify an expression sequence. This function gimplifies each
6324 expression and rewrites the original expression with the last
6325 expression of the sequence in GIMPLE form.
6326
6327 PRE_P points to the list where the side effects for all the
6328 expressions in the sequence will be emitted.
6329
6330 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
6331
6332 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)6333 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6334 {
6335 tree t = *expr_p;
6336
6337 do
6338 {
6339 tree *sub_p = &TREE_OPERAND (t, 0);
6340
6341 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6342 gimplify_compound_expr (sub_p, pre_p, false);
6343 else
6344 gimplify_stmt (sub_p, pre_p);
6345
6346 t = TREE_OPERAND (t, 1);
6347 }
6348 while (TREE_CODE (t) == COMPOUND_EXPR);
6349
6350 *expr_p = t;
6351 if (want_value)
6352 return GS_OK;
6353 else
6354 {
6355 gimplify_stmt (expr_p, pre_p);
6356 return GS_ALL_DONE;
6357 }
6358 }
6359
6360 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
6361 gimplify. After gimplification, EXPR_P will point to a new temporary
6362 that holds the original value of the SAVE_EXPR node.
6363
6364 PRE_P points to the list where side effects that must happen before
6365 *EXPR_P should be stored. */
6366
6367 static enum gimplify_status
gimplify_save_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6368 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6369 {
6370 enum gimplify_status ret = GS_ALL_DONE;
6371 tree val;
6372
6373 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6374 val = TREE_OPERAND (*expr_p, 0);
6375
6376 if (val && TREE_TYPE (val) == error_mark_node)
6377 return GS_ERROR;
6378
6379 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
6380 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6381 {
6382 /* The operand may be a void-valued expression. It is
6383 being executed only for its side-effects. */
6384 if (TREE_TYPE (val) == void_type_node)
6385 {
6386 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6387 is_gimple_stmt, fb_none);
6388 val = NULL;
6389 }
6390 else
6391 /* The temporary may not be an SSA name as later abnormal and EH
6392 control flow may invalidate use/def domination. When in SSA
6393 form then assume there are no such issues and SAVE_EXPRs only
6394 appear via GENERIC foldings. */
6395 val = get_initialized_tmp_var (val, pre_p, post_p,
6396 gimple_in_ssa_p (cfun));
6397
6398 TREE_OPERAND (*expr_p, 0) = val;
6399 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6400 }
6401
6402 *expr_p = val;
6403
6404 return ret;
6405 }
6406
6407 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6408
6409 unary_expr
6410 : ...
6411 | '&' varname
6412 ...
6413
6414 PRE_P points to the list where side effects that must happen before
6415 *EXPR_P should be stored.
6416
6417 POST_P points to the list where side effects that must happen after
6418 *EXPR_P should be stored. */
6419
6420 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6421 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6422 {
6423 tree expr = *expr_p;
6424 tree op0 = TREE_OPERAND (expr, 0);
6425 enum gimplify_status ret;
6426 location_t loc = EXPR_LOCATION (*expr_p);
6427
6428 switch (TREE_CODE (op0))
6429 {
6430 case INDIRECT_REF:
6431 do_indirect_ref:
6432 /* Check if we are dealing with an expression of the form '&*ptr'.
6433 While the front end folds away '&*ptr' into 'ptr', these
6434 expressions may be generated internally by the compiler (e.g.,
6435 builtins like __builtin_va_end). */
6436 /* Caution: the silent array decomposition semantics we allow for
6437 ADDR_EXPR means we can't always discard the pair. */
6438 /* Gimplification of the ADDR_EXPR operand may drop
6439 cv-qualification conversions, so make sure we add them if
6440 needed. */
6441 {
6442 tree op00 = TREE_OPERAND (op0, 0);
6443 tree t_expr = TREE_TYPE (expr);
6444 tree t_op00 = TREE_TYPE (op00);
6445
6446 if (!useless_type_conversion_p (t_expr, t_op00))
6447 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6448 *expr_p = op00;
6449 ret = GS_OK;
6450 }
6451 break;
6452
6453 case VIEW_CONVERT_EXPR:
6454 /* Take the address of our operand and then convert it to the type of
6455 this ADDR_EXPR.
6456
6457 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6458 all clear. The impact of this transformation is even less clear. */
6459
6460 /* If the operand is a useless conversion, look through it. Doing so
6461 guarantees that the ADDR_EXPR and its operand will remain of the
6462 same type. */
6463 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6464 op0 = TREE_OPERAND (op0, 0);
6465
6466 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6467 build_fold_addr_expr_loc (loc,
6468 TREE_OPERAND (op0, 0)));
6469 ret = GS_OK;
6470 break;
6471
6472 case MEM_REF:
6473 if (integer_zerop (TREE_OPERAND (op0, 1)))
6474 goto do_indirect_ref;
6475
6476 /* fall through */
6477
6478 default:
6479 /* If we see a call to a declared builtin or see its address
6480 being taken (we can unify those cases here) then we can mark
6481 the builtin for implicit generation by GCC. */
6482 if (TREE_CODE (op0) == FUNCTION_DECL
6483 && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6484 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6485 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6486
6487 /* We use fb_either here because the C frontend sometimes takes
6488 the address of a call that returns a struct; see
6489 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6490 the implied temporary explicit. */
6491
6492 /* Make the operand addressable. */
6493 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6494 is_gimple_addressable, fb_either);
6495 if (ret == GS_ERROR)
6496 break;
6497
6498 /* Then mark it. Beware that it may not be possible to do so directly
6499 if a temporary has been created by the gimplification. */
6500 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6501
6502 op0 = TREE_OPERAND (expr, 0);
6503
6504 /* For various reasons, the gimplification of the expression
6505 may have made a new INDIRECT_REF. */
6506 if (TREE_CODE (op0) == INDIRECT_REF
6507 || (TREE_CODE (op0) == MEM_REF
6508 && integer_zerop (TREE_OPERAND (op0, 1))))
6509 goto do_indirect_ref;
6510
6511 mark_addressable (TREE_OPERAND (expr, 0));
6512
6513 /* The FEs may end up building ADDR_EXPRs early on a decl with
6514 an incomplete type. Re-build ADDR_EXPRs in canonical form
6515 here. */
6516 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6517 *expr_p = build_fold_addr_expr (op0);
6518
6519 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6520 recompute_tree_invariant_for_addr_expr (*expr_p);
6521
6522 /* If we re-built the ADDR_EXPR add a conversion to the original type
6523 if required. */
6524 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6525 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6526
6527 break;
6528 }
6529
6530 return ret;
6531 }
6532
6533 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6534 value; output operands should be a gimple lvalue. */
6535
6536 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6537 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6538 {
6539 tree expr;
6540 int noutputs;
6541 const char **oconstraints;
6542 int i;
6543 tree link;
6544 const char *constraint;
6545 bool allows_mem, allows_reg, is_inout;
6546 enum gimplify_status ret, tret;
6547 gasm *stmt;
6548 vec<tree, va_gc> *inputs;
6549 vec<tree, va_gc> *outputs;
6550 vec<tree, va_gc> *clobbers;
6551 vec<tree, va_gc> *labels;
6552 tree link_next;
6553
6554 expr = *expr_p;
6555 noutputs = list_length (ASM_OUTPUTS (expr));
6556 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6557
6558 inputs = NULL;
6559 outputs = NULL;
6560 clobbers = NULL;
6561 labels = NULL;
6562
6563 ret = GS_ALL_DONE;
6564 link_next = NULL_TREE;
6565 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6566 {
6567 bool ok;
6568 size_t constraint_len;
6569
6570 link_next = TREE_CHAIN (link);
6571
6572 oconstraints[i]
6573 = constraint
6574 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6575 constraint_len = strlen (constraint);
6576 if (constraint_len == 0)
6577 continue;
6578
6579 ok = parse_output_constraint (&constraint, i, 0, 0,
6580 &allows_mem, &allows_reg, &is_inout);
6581 if (!ok)
6582 {
6583 ret = GS_ERROR;
6584 is_inout = false;
6585 }
6586
6587 /* If we can't make copies, we can only accept memory.
6588 Similarly for VLAs. */
6589 tree outtype = TREE_TYPE (TREE_VALUE (link));
6590 if (outtype != error_mark_node
6591 && (TREE_ADDRESSABLE (outtype)
6592 || !COMPLETE_TYPE_P (outtype)
6593 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6594 {
6595 if (allows_mem)
6596 allows_reg = 0;
6597 else
6598 {
6599 error ("impossible constraint in %<asm%>");
6600 error ("non-memory output %d must stay in memory", i);
6601 return GS_ERROR;
6602 }
6603 }
6604
6605 if (!allows_reg && allows_mem)
6606 mark_addressable (TREE_VALUE (link));
6607
6608 tree orig = TREE_VALUE (link);
6609 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6610 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6611 fb_lvalue | fb_mayfail);
6612 if (tret == GS_ERROR)
6613 {
6614 if (orig != error_mark_node)
6615 error ("invalid lvalue in %<asm%> output %d", i);
6616 ret = tret;
6617 }
6618
6619 /* If the constraint does not allow memory make sure we gimplify
6620 it to a register if it is not already but its base is. This
6621 happens for complex and vector components. */
6622 if (!allows_mem)
6623 {
6624 tree op = TREE_VALUE (link);
6625 if (! is_gimple_val (op)
6626 && is_gimple_reg_type (TREE_TYPE (op))
6627 && is_gimple_reg (get_base_address (op)))
6628 {
6629 tree tem = create_tmp_reg (TREE_TYPE (op));
6630 tree ass;
6631 if (is_inout)
6632 {
6633 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6634 tem, unshare_expr (op));
6635 gimplify_and_add (ass, pre_p);
6636 }
6637 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6638 gimplify_and_add (ass, post_p);
6639
6640 TREE_VALUE (link) = tem;
6641 tret = GS_OK;
6642 }
6643 }
6644
6645 vec_safe_push (outputs, link);
6646 TREE_CHAIN (link) = NULL_TREE;
6647
6648 if (is_inout)
6649 {
6650 /* An input/output operand. To give the optimizers more
6651 flexibility, split it into separate input and output
6652 operands. */
6653 tree input;
6654 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6655 char buf[11];
6656
6657 /* Turn the in/out constraint into an output constraint. */
6658 char *p = xstrdup (constraint);
6659 p[0] = '=';
6660 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6661
6662 /* And add a matching input constraint. */
6663 if (allows_reg)
6664 {
6665 sprintf (buf, "%u", i);
6666
6667 /* If there are multiple alternatives in the constraint,
6668 handle each of them individually. Those that allow register
6669 will be replaced with operand number, the others will stay
6670 unchanged. */
6671 if (strchr (p, ',') != NULL)
6672 {
6673 size_t len = 0, buflen = strlen (buf);
6674 char *beg, *end, *str, *dst;
6675
6676 for (beg = p + 1;;)
6677 {
6678 end = strchr (beg, ',');
6679 if (end == NULL)
6680 end = strchr (beg, '\0');
6681 if ((size_t) (end - beg) < buflen)
6682 len += buflen + 1;
6683 else
6684 len += end - beg + 1;
6685 if (*end)
6686 beg = end + 1;
6687 else
6688 break;
6689 }
6690
6691 str = (char *) alloca (len);
6692 for (beg = p + 1, dst = str;;)
6693 {
6694 const char *tem;
6695 bool mem_p, reg_p, inout_p;
6696
6697 end = strchr (beg, ',');
6698 if (end)
6699 *end = '\0';
6700 beg[-1] = '=';
6701 tem = beg - 1;
6702 parse_output_constraint (&tem, i, 0, 0,
6703 &mem_p, ®_p, &inout_p);
6704 if (dst != str)
6705 *dst++ = ',';
6706 if (reg_p)
6707 {
6708 memcpy (dst, buf, buflen);
6709 dst += buflen;
6710 }
6711 else
6712 {
6713 if (end)
6714 len = end - beg;
6715 else
6716 len = strlen (beg);
6717 memcpy (dst, beg, len);
6718 dst += len;
6719 }
6720 if (end)
6721 beg = end + 1;
6722 else
6723 break;
6724 }
6725 *dst = '\0';
6726 input = build_string (dst - str, str);
6727 }
6728 else
6729 input = build_string (strlen (buf), buf);
6730 }
6731 else
6732 input = build_string (constraint_len - 1, constraint + 1);
6733
6734 free (p);
6735
6736 input = build_tree_list (build_tree_list (NULL_TREE, input),
6737 unshare_expr (TREE_VALUE (link)));
6738 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6739 }
6740 }
6741
6742 link_next = NULL_TREE;
6743 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6744 {
6745 link_next = TREE_CHAIN (link);
6746 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6747 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6748 oconstraints, &allows_mem, &allows_reg);
6749
6750 /* If we can't make copies, we can only accept memory. */
6751 tree intype = TREE_TYPE (TREE_VALUE (link));
6752 if (intype != error_mark_node
6753 && (TREE_ADDRESSABLE (intype)
6754 || !COMPLETE_TYPE_P (intype)
6755 || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6756 {
6757 if (allows_mem)
6758 allows_reg = 0;
6759 else
6760 {
6761 error ("impossible constraint in %<asm%>");
6762 error ("non-memory input %d must stay in memory", i);
6763 return GS_ERROR;
6764 }
6765 }
6766
6767 /* If the operand is a memory input, it should be an lvalue. */
6768 if (!allows_reg && allows_mem)
6769 {
6770 tree inputv = TREE_VALUE (link);
6771 STRIP_NOPS (inputv);
6772 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6773 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6774 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6775 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6776 || TREE_CODE (inputv) == MODIFY_EXPR)
6777 TREE_VALUE (link) = error_mark_node;
6778 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6779 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6780 if (tret != GS_ERROR)
6781 {
6782 /* Unlike output operands, memory inputs are not guaranteed
6783 to be lvalues by the FE, and while the expressions are
6784 marked addressable there, if it is e.g. a statement
6785 expression, temporaries in it might not end up being
6786 addressable. They might be already used in the IL and thus
6787 it is too late to make them addressable now though. */
6788 tree x = TREE_VALUE (link);
6789 while (handled_component_p (x))
6790 x = TREE_OPERAND (x, 0);
6791 if (TREE_CODE (x) == MEM_REF
6792 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6793 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6794 if ((VAR_P (x)
6795 || TREE_CODE (x) == PARM_DECL
6796 || TREE_CODE (x) == RESULT_DECL)
6797 && !TREE_ADDRESSABLE (x)
6798 && is_gimple_reg (x))
6799 {
6800 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6801 input_location), 0,
6802 "memory input %d is not directly addressable",
6803 i);
6804 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6805 }
6806 }
6807 mark_addressable (TREE_VALUE (link));
6808 if (tret == GS_ERROR)
6809 {
6810 if (inputv != error_mark_node)
6811 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6812 "memory input %d is not directly addressable", i);
6813 ret = tret;
6814 }
6815 }
6816 else
6817 {
6818 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6819 is_gimple_asm_val, fb_rvalue);
6820 if (tret == GS_ERROR)
6821 ret = tret;
6822 }
6823
6824 TREE_CHAIN (link) = NULL_TREE;
6825 vec_safe_push (inputs, link);
6826 }
6827
6828 link_next = NULL_TREE;
6829 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6830 {
6831 link_next = TREE_CHAIN (link);
6832 TREE_CHAIN (link) = NULL_TREE;
6833 vec_safe_push (clobbers, link);
6834 }
6835
6836 link_next = NULL_TREE;
6837 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6838 {
6839 link_next = TREE_CHAIN (link);
6840 TREE_CHAIN (link) = NULL_TREE;
6841 vec_safe_push (labels, link);
6842 }
6843
6844 /* Do not add ASMs with errors to the gimple IL stream. */
6845 if (ret != GS_ERROR)
6846 {
6847 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6848 inputs, outputs, clobbers, labels);
6849
6850 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6851 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6852 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6853
6854 gimplify_seq_add_stmt (pre_p, stmt);
6855 }
6856
6857 return ret;
6858 }
6859
6860 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6861 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6862 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6863 return to this function.
6864
6865 FIXME should we complexify the prequeue handling instead? Or use flags
6866 for all the cleanups and let the optimizer tighten them up? The current
6867 code seems pretty fragile; it will break on a cleanup within any
6868 non-conditional nesting. But any such nesting would be broken, anyway;
6869 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6870 and continues out of it. We can do that at the RTL level, though, so
6871 having an optimizer to tighten up try/finally regions would be a Good
6872 Thing. */
6873
6874 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,gimple_seq * pre_p)6875 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6876 {
6877 gimple_stmt_iterator iter;
6878 gimple_seq body_sequence = NULL;
6879
6880 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6881
6882 /* We only care about the number of conditions between the innermost
6883 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6884 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6885 int old_conds = gimplify_ctxp->conditions;
6886 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6887 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6888 gimplify_ctxp->conditions = 0;
6889 gimplify_ctxp->conditional_cleanups = NULL;
6890 gimplify_ctxp->in_cleanup_point_expr = true;
6891
6892 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6893
6894 gimplify_ctxp->conditions = old_conds;
6895 gimplify_ctxp->conditional_cleanups = old_cleanups;
6896 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6897
6898 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6899 {
6900 gimple *wce = gsi_stmt (iter);
6901
6902 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6903 {
6904 if (gsi_one_before_end_p (iter))
6905 {
6906 /* Note that gsi_insert_seq_before and gsi_remove do not
6907 scan operands, unlike some other sequence mutators. */
6908 if (!gimple_wce_cleanup_eh_only (wce))
6909 gsi_insert_seq_before_without_update (&iter,
6910 gimple_wce_cleanup (wce),
6911 GSI_SAME_STMT);
6912 gsi_remove (&iter, true);
6913 break;
6914 }
6915 else
6916 {
6917 gtry *gtry;
6918 gimple_seq seq;
6919 enum gimple_try_flags kind;
6920
6921 if (gimple_wce_cleanup_eh_only (wce))
6922 kind = GIMPLE_TRY_CATCH;
6923 else
6924 kind = GIMPLE_TRY_FINALLY;
6925 seq = gsi_split_seq_after (iter);
6926
6927 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6928 /* Do not use gsi_replace here, as it may scan operands.
6929 We want to do a simple structural modification only. */
6930 gsi_set_stmt (&iter, gtry);
6931 iter = gsi_start (gtry->eval);
6932 }
6933 }
6934 else
6935 gsi_next (&iter);
6936 }
6937
6938 gimplify_seq_add_seq (pre_p, body_sequence);
6939 if (temp)
6940 {
6941 *expr_p = temp;
6942 return GS_OK;
6943 }
6944 else
6945 {
6946 *expr_p = NULL;
6947 return GS_ALL_DONE;
6948 }
6949 }
6950
6951 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6952 is the cleanup action required. EH_ONLY is true if the cleanup should
6953 only be executed if an exception is thrown, not on normal exit.
6954 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6955 only valid for clobbers. */
6956
6957 static void
gimple_push_cleanup(tree var,tree cleanup,bool eh_only,gimple_seq * pre_p,bool force_uncond=false)6958 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6959 bool force_uncond = false)
6960 {
6961 gimple *wce;
6962 gimple_seq cleanup_stmts = NULL;
6963
6964 /* Errors can result in improperly nested cleanups. Which results in
6965 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6966 if (seen_error ())
6967 return;
6968
6969 if (gimple_conditional_context ())
6970 {
6971 /* If we're in a conditional context, this is more complex. We only
6972 want to run the cleanup if we actually ran the initialization that
6973 necessitates it, but we want to run it after the end of the
6974 conditional context. So we wrap the try/finally around the
6975 condition and use a flag to determine whether or not to actually
6976 run the destructor. Thus
6977
6978 test ? f(A()) : 0
6979
6980 becomes (approximately)
6981
6982 flag = 0;
6983 try {
6984 if (test) { A::A(temp); flag = 1; val = f(temp); }
6985 else { val = 0; }
6986 } finally {
6987 if (flag) A::~A(temp);
6988 }
6989 val
6990 */
6991 if (force_uncond)
6992 {
6993 gimplify_stmt (&cleanup, &cleanup_stmts);
6994 wce = gimple_build_wce (cleanup_stmts);
6995 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6996 }
6997 else
6998 {
6999 tree flag = create_tmp_var (boolean_type_node, "cleanup");
7000 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
7001 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
7002
7003 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
7004 gimplify_stmt (&cleanup, &cleanup_stmts);
7005 wce = gimple_build_wce (cleanup_stmts);
7006 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7007
7008 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
7009 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
7010 gimplify_seq_add_stmt (pre_p, ftrue);
7011
7012 /* Because of this manipulation, and the EH edges that jump
7013 threading cannot redirect, the temporary (VAR) will appear
7014 to be used uninitialized. Don't warn. */
7015 suppress_warning (var, OPT_Wuninitialized);
7016 }
7017 }
7018 else
7019 {
7020 gimplify_stmt (&cleanup, &cleanup_stmts);
7021 wce = gimple_build_wce (cleanup_stmts);
7022 gimple_wce_set_cleanup_eh_only (wce, eh_only);
7023 gimplify_seq_add_stmt (pre_p, wce);
7024 }
7025 }
7026
7027 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
7028
7029 static enum gimplify_status
gimplify_target_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)7030 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
7031 {
7032 tree targ = *expr_p;
7033 tree temp = TARGET_EXPR_SLOT (targ);
7034 tree init = TARGET_EXPR_INITIAL (targ);
7035 enum gimplify_status ret;
7036
7037 bool unpoison_empty_seq = false;
7038 gimple_stmt_iterator unpoison_it;
7039
7040 if (init)
7041 {
7042 gimple_seq init_pre_p = NULL;
7043
7044 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
7045 to the temps list. Handle also variable length TARGET_EXPRs. */
7046 if (!poly_int_tree_p (DECL_SIZE (temp)))
7047 {
7048 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
7049 gimplify_type_sizes (TREE_TYPE (temp), &init_pre_p);
7050 /* FIXME: this is correct only when the size of the type does
7051 not depend on expressions evaluated in init. */
7052 gimplify_vla_decl (temp, &init_pre_p);
7053 }
7054 else
7055 {
7056 /* Save location where we need to place unpoisoning. It's possible
7057 that a variable will be converted to needs_to_live_in_memory. */
7058 unpoison_it = gsi_last (*pre_p);
7059 unpoison_empty_seq = gsi_end_p (unpoison_it);
7060
7061 gimple_add_tmp_var (temp);
7062 }
7063
7064 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
7065 expression is supposed to initialize the slot. */
7066 if (VOID_TYPE_P (TREE_TYPE (init)))
7067 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7068 fb_none);
7069 else
7070 {
7071 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
7072 init = init_expr;
7073 ret = gimplify_expr (&init, &init_pre_p, post_p, is_gimple_stmt,
7074 fb_none);
7075 init = NULL;
7076 ggc_free (init_expr);
7077 }
7078 if (ret == GS_ERROR)
7079 {
7080 /* PR c++/28266 Make sure this is expanded only once. */
7081 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7082 return GS_ERROR;
7083 }
7084
7085 if (init)
7086 gimplify_and_add (init, &init_pre_p);
7087
7088 /* Add a clobber for the temporary going out of scope, like
7089 gimplify_bind_expr. */
7090 if (gimplify_ctxp->in_cleanup_point_expr
7091 && needs_to_live_in_memory (temp))
7092 {
7093 if (flag_stack_reuse == SR_ALL)
7094 {
7095 tree clobber = build_clobber (TREE_TYPE (temp), CLOBBER_EOL);
7096 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
7097 gimple_push_cleanup (temp, clobber, false, pre_p, true);
7098 }
7099 if (asan_poisoned_variables
7100 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
7101 && !TREE_STATIC (temp)
7102 && dbg_cnt (asan_use_after_scope)
7103 && !gimplify_omp_ctxp)
7104 {
7105 tree asan_cleanup = build_asan_poison_call_expr (temp);
7106 if (asan_cleanup)
7107 {
7108 if (unpoison_empty_seq)
7109 unpoison_it = gsi_start (*pre_p);
7110
7111 asan_poison_variable (temp, false, &unpoison_it,
7112 unpoison_empty_seq);
7113 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
7114 }
7115 }
7116 }
7117
7118 gimple_seq_add_seq (pre_p, init_pre_p);
7119
7120 /* If needed, push the cleanup for the temp. */
7121 if (TARGET_EXPR_CLEANUP (targ))
7122 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
7123 CLEANUP_EH_ONLY (targ), pre_p);
7124
7125 /* Only expand this once. */
7126 TREE_OPERAND (targ, 3) = init;
7127 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
7128 }
7129 else
7130 /* We should have expanded this before. */
7131 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
7132
7133 *expr_p = temp;
7134 return GS_OK;
7135 }
7136
7137 /* Gimplification of expression trees. */
7138
7139 /* Gimplify an expression which appears at statement context. The
7140 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
7141 NULL, a new sequence is allocated.
7142
7143 Return true if we actually added a statement to the queue. */
7144
7145 bool
gimplify_stmt(tree * stmt_p,gimple_seq * seq_p)7146 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
7147 {
7148 gimple_seq_node last;
7149
7150 last = gimple_seq_last (*seq_p);
7151 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
7152 return last != gimple_seq_last (*seq_p);
7153 }
7154
7155 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
7156 to CTX. If entries already exist, force them to be some flavor of private.
7157 If there is no enclosing parallel, do nothing. */
7158
7159 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)7160 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
7161 {
7162 splay_tree_node n;
7163
7164 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
7165 return;
7166
7167 do
7168 {
7169 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7170 if (n != NULL)
7171 {
7172 if (n->value & GOVD_SHARED)
7173 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
7174 else if (n->value & GOVD_MAP)
7175 n->value |= GOVD_MAP_TO_ONLY;
7176 else
7177 return;
7178 }
7179 else if ((ctx->region_type & ORT_TARGET) != 0)
7180 {
7181 if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
7182 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7183 else
7184 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
7185 }
7186 else if (ctx->region_type != ORT_WORKSHARE
7187 && ctx->region_type != ORT_TASKGROUP
7188 && ctx->region_type != ORT_SIMD
7189 && ctx->region_type != ORT_ACC
7190 && !(ctx->region_type & ORT_TARGET_DATA))
7191 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
7192
7193 ctx = ctx->outer_context;
7194 }
7195 while (ctx);
7196 }
7197
7198 /* Similarly for each of the type sizes of TYPE. */
7199
7200 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)7201 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
7202 {
7203 if (type == NULL || type == error_mark_node)
7204 return;
7205 type = TYPE_MAIN_VARIANT (type);
7206
7207 if (ctx->privatized_types->add (type))
7208 return;
7209
7210 switch (TREE_CODE (type))
7211 {
7212 case INTEGER_TYPE:
7213 case ENUMERAL_TYPE:
7214 case BOOLEAN_TYPE:
7215 case REAL_TYPE:
7216 case FIXED_POINT_TYPE:
7217 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
7218 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
7219 break;
7220
7221 case ARRAY_TYPE:
7222 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7223 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
7224 break;
7225
7226 case RECORD_TYPE:
7227 case UNION_TYPE:
7228 case QUAL_UNION_TYPE:
7229 {
7230 tree field;
7231 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7232 if (TREE_CODE (field) == FIELD_DECL)
7233 {
7234 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
7235 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
7236 }
7237 }
7238 break;
7239
7240 case POINTER_TYPE:
7241 case REFERENCE_TYPE:
7242 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
7243 break;
7244
7245 default:
7246 break;
7247 }
7248
7249 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
7250 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
7251 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
7252 }
7253
7254 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
7255
7256 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)7257 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
7258 {
7259 splay_tree_node n;
7260 unsigned int nflags;
7261 tree t;
7262
7263 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
7264 return;
7265
7266 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
7267 there are constructors involved somewhere. Exception is a shared clause,
7268 there is nothing privatized in that case. */
7269 if ((flags & GOVD_SHARED) == 0
7270 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
7271 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
7272 flags |= GOVD_SEEN;
7273
7274 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7275 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7276 {
7277 /* We shouldn't be re-adding the decl with the same data
7278 sharing class. */
7279 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
7280 nflags = n->value | flags;
7281 /* The only combination of data sharing classes we should see is
7282 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
7283 reduction variables to be used in data sharing clauses. */
7284 gcc_assert ((ctx->region_type & ORT_ACC) != 0
7285 || ((nflags & GOVD_DATA_SHARE_CLASS)
7286 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
7287 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
7288 n->value = nflags;
7289 return;
7290 }
7291
7292 /* When adding a variable-sized variable, we have to handle all sorts
7293 of additional bits of data: the pointer replacement variable, and
7294 the parameters of the type. */
7295 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7296 {
7297 /* Add the pointer replacement variable as PRIVATE if the variable
7298 replacement is private, else FIRSTPRIVATE since we'll need the
7299 address of the original variable either for SHARED, or for the
7300 copy into or out of the context. */
7301 if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
7302 {
7303 if (flags & GOVD_MAP)
7304 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
7305 else if (flags & GOVD_PRIVATE)
7306 nflags = GOVD_PRIVATE;
7307 else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7308 && (flags & GOVD_FIRSTPRIVATE))
7309 || (ctx->region_type == ORT_TARGET_DATA
7310 && (flags & GOVD_DATA_SHARE_CLASS) == 0))
7311 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
7312 else
7313 nflags = GOVD_FIRSTPRIVATE;
7314 nflags |= flags & GOVD_SEEN;
7315 t = DECL_VALUE_EXPR (decl);
7316 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7317 t = TREE_OPERAND (t, 0);
7318 gcc_assert (DECL_P (t));
7319 omp_add_variable (ctx, t, nflags);
7320 }
7321
7322 /* Add all of the variable and type parameters (which should have
7323 been gimplified to a formal temporary) as FIRSTPRIVATE. */
7324 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7325 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7326 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7327
7328 /* The variable-sized variable itself is never SHARED, only some form
7329 of PRIVATE. The sharing would take place via the pointer variable
7330 which we remapped above. */
7331 if (flags & GOVD_SHARED)
7332 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7333 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7334
7335 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7336 alloca statement we generate for the variable, so make sure it
7337 is available. This isn't automatically needed for the SHARED
7338 case, since we won't be allocating local storage then.
7339 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7340 in this case omp_notice_variable will be called later
7341 on when it is gimplified. */
7342 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7343 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7344 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7345 }
7346 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7347 && omp_privatize_by_reference (decl))
7348 {
7349 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7350
7351 /* Similar to the direct variable sized case above, we'll need the
7352 size of references being privatized. */
7353 if ((flags & GOVD_SHARED) == 0)
7354 {
7355 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7356 if (t && DECL_P (t))
7357 omp_notice_variable (ctx, t, true);
7358 }
7359 }
7360
7361 if (n != NULL)
7362 n->value |= flags;
7363 else
7364 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7365
7366 /* For reductions clauses in OpenACC loop directives, by default create a
7367 copy clause on the enclosing parallel construct for carrying back the
7368 results. */
7369 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7370 {
7371 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7372 while (outer_ctx)
7373 {
7374 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7375 if (n != NULL)
7376 {
7377 /* Ignore local variables and explicitly declared clauses. */
7378 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7379 break;
7380 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7381 {
7382 /* According to the OpenACC spec, such a reduction variable
7383 should already have a copy map on a kernels construct,
7384 verify that here. */
7385 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7386 && (n->value & GOVD_MAP));
7387 }
7388 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7389 {
7390 /* Remove firstprivate and make it a copy map. */
7391 n->value &= ~GOVD_FIRSTPRIVATE;
7392 n->value |= GOVD_MAP;
7393 }
7394 }
7395 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7396 {
7397 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7398 GOVD_MAP | GOVD_SEEN);
7399 break;
7400 }
7401 outer_ctx = outer_ctx->outer_context;
7402 }
7403 }
7404 }
7405
7406 /* Notice a threadprivate variable DECL used in OMP context CTX.
7407 This just prints out diagnostics about threadprivate variable uses
7408 in untied tasks. If DECL2 is non-NULL, prevent this warning
7409 on that variable. */
7410
7411 static bool
omp_notice_threadprivate_variable(struct gimplify_omp_ctx * ctx,tree decl,tree decl2)7412 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7413 tree decl2)
7414 {
7415 splay_tree_node n;
7416 struct gimplify_omp_ctx *octx;
7417
7418 for (octx = ctx; octx; octx = octx->outer_context)
7419 if ((octx->region_type & ORT_TARGET) != 0
7420 || octx->order_concurrent)
7421 {
7422 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7423 if (n == NULL)
7424 {
7425 if (octx->order_concurrent)
7426 {
7427 error ("threadprivate variable %qE used in a region with"
7428 " %<order(concurrent)%> clause", DECL_NAME (decl));
7429 inform (octx->location, "enclosing region");
7430 }
7431 else
7432 {
7433 error ("threadprivate variable %qE used in target region",
7434 DECL_NAME (decl));
7435 inform (octx->location, "enclosing target region");
7436 }
7437 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7438 }
7439 if (decl2)
7440 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7441 }
7442
7443 if (ctx->region_type != ORT_UNTIED_TASK)
7444 return false;
7445 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7446 if (n == NULL)
7447 {
7448 error ("threadprivate variable %qE used in untied task",
7449 DECL_NAME (decl));
7450 inform (ctx->location, "enclosing task");
7451 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7452 }
7453 if (decl2)
7454 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7455 return false;
7456 }
7457
7458 /* Return true if global var DECL is device resident. */
7459
7460 static bool
device_resident_p(tree decl)7461 device_resident_p (tree decl)
7462 {
7463 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7464
7465 if (!attr)
7466 return false;
7467
7468 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7469 {
7470 tree c = TREE_VALUE (t);
7471 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7472 return true;
7473 }
7474
7475 return false;
7476 }
7477
7478 /* Return true if DECL has an ACC DECLARE attribute. */
7479
7480 static bool
is_oacc_declared(tree decl)7481 is_oacc_declared (tree decl)
7482 {
7483 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7484 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7485 return declared != NULL_TREE;
7486 }
7487
7488 /* Determine outer default flags for DECL mentioned in an OMP region
7489 but not declared in an enclosing clause.
7490
7491 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7492 remapped firstprivate instead of shared. To some extent this is
7493 addressed in omp_firstprivatize_type_sizes, but not
7494 effectively. */
7495
7496 static unsigned
omp_default_clause(struct gimplify_omp_ctx * ctx,tree decl,bool in_code,unsigned flags)7497 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7498 bool in_code, unsigned flags)
7499 {
7500 enum omp_clause_default_kind default_kind = ctx->default_kind;
7501 enum omp_clause_default_kind kind;
7502
7503 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7504 if (ctx->region_type & ORT_TASK)
7505 {
7506 tree detach_clause = omp_find_clause (ctx->clauses, OMP_CLAUSE_DETACH);
7507
7508 /* The event-handle specified by a detach clause should always be firstprivate,
7509 regardless of the current default. */
7510 if (detach_clause && OMP_CLAUSE_DECL (detach_clause) == decl)
7511 kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
7512 }
7513 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7514 default_kind = kind;
7515 else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7516 default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7517 /* For C/C++ default({,first}private), variables with static storage duration
7518 declared in a namespace or global scope and referenced in construct
7519 must be explicitly specified, i.e. acts as default(none). */
7520 else if ((default_kind == OMP_CLAUSE_DEFAULT_PRIVATE
7521 || default_kind == OMP_CLAUSE_DEFAULT_FIRSTPRIVATE)
7522 && VAR_P (decl)
7523 && is_global_var (decl)
7524 && (DECL_FILE_SCOPE_P (decl)
7525 || (DECL_CONTEXT (decl)
7526 && TREE_CODE (DECL_CONTEXT (decl)) == NAMESPACE_DECL))
7527 && !lang_GNU_Fortran ())
7528 default_kind = OMP_CLAUSE_DEFAULT_NONE;
7529
7530 switch (default_kind)
7531 {
7532 case OMP_CLAUSE_DEFAULT_NONE:
7533 {
7534 const char *rtype;
7535
7536 if (ctx->region_type & ORT_PARALLEL)
7537 rtype = "parallel";
7538 else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7539 rtype = "taskloop";
7540 else if (ctx->region_type & ORT_TASK)
7541 rtype = "task";
7542 else if (ctx->region_type & ORT_TEAMS)
7543 rtype = "teams";
7544 else
7545 gcc_unreachable ();
7546
7547 error ("%qE not specified in enclosing %qs",
7548 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7549 inform (ctx->location, "enclosing %qs", rtype);
7550 }
7551 /* FALLTHRU */
7552 case OMP_CLAUSE_DEFAULT_SHARED:
7553 flags |= GOVD_SHARED;
7554 break;
7555 case OMP_CLAUSE_DEFAULT_PRIVATE:
7556 flags |= GOVD_PRIVATE;
7557 break;
7558 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7559 flags |= GOVD_FIRSTPRIVATE;
7560 break;
7561 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7562 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7563 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7564 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7565 {
7566 omp_notice_variable (octx, decl, in_code);
7567 for (; octx; octx = octx->outer_context)
7568 {
7569 splay_tree_node n2;
7570
7571 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7572 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7573 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7574 continue;
7575 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7576 {
7577 flags |= GOVD_FIRSTPRIVATE;
7578 goto found_outer;
7579 }
7580 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7581 {
7582 flags |= GOVD_SHARED;
7583 goto found_outer;
7584 }
7585 }
7586 }
7587
7588 if (TREE_CODE (decl) == PARM_DECL
7589 || (!is_global_var (decl)
7590 && DECL_CONTEXT (decl) == current_function_decl))
7591 flags |= GOVD_FIRSTPRIVATE;
7592 else
7593 flags |= GOVD_SHARED;
7594 found_outer:
7595 break;
7596
7597 default:
7598 gcc_unreachable ();
7599 }
7600
7601 return flags;
7602 }
7603
7604
7605 /* Determine outer default flags for DECL mentioned in an OACC region
7606 but not declared in an enclosing clause. */
7607
7608 static unsigned
oacc_default_clause(struct gimplify_omp_ctx * ctx,tree decl,unsigned flags)7609 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7610 {
7611 const char *rkind;
7612 bool on_device = false;
7613 bool is_private = false;
7614 bool declared = is_oacc_declared (decl);
7615 tree type = TREE_TYPE (decl);
7616
7617 if (omp_privatize_by_reference (decl))
7618 type = TREE_TYPE (type);
7619
7620 /* For Fortran COMMON blocks, only used variables in those blocks are
7621 transfered and remapped. The block itself will have a private clause to
7622 avoid transfering the data twice.
7623 The hook evaluates to false by default. For a variable in Fortran's COMMON
7624 or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7625 the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7626 the whole block. For C++ and Fortran, it can also be true under certain
7627 other conditions, if DECL_HAS_VALUE_EXPR. */
7628 if (RECORD_OR_UNION_TYPE_P (type))
7629 is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7630
7631 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7632 && is_global_var (decl)
7633 && device_resident_p (decl)
7634 && !is_private)
7635 {
7636 on_device = true;
7637 flags |= GOVD_MAP_TO_ONLY;
7638 }
7639
7640 switch (ctx->region_type)
7641 {
7642 case ORT_ACC_KERNELS:
7643 rkind = "kernels";
7644
7645 if (is_private)
7646 flags |= GOVD_FIRSTPRIVATE;
7647 else if (AGGREGATE_TYPE_P (type))
7648 {
7649 /* Aggregates default to 'present_or_copy', or 'present'. */
7650 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7651 flags |= GOVD_MAP;
7652 else
7653 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7654 }
7655 else
7656 /* Scalars default to 'copy'. */
7657 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7658
7659 break;
7660
7661 case ORT_ACC_PARALLEL:
7662 case ORT_ACC_SERIAL:
7663 rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7664
7665 if (is_private)
7666 flags |= GOVD_FIRSTPRIVATE;
7667 else if (on_device || declared)
7668 flags |= GOVD_MAP;
7669 else if (AGGREGATE_TYPE_P (type))
7670 {
7671 /* Aggregates default to 'present_or_copy', or 'present'. */
7672 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7673 flags |= GOVD_MAP;
7674 else
7675 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7676 }
7677 else
7678 /* Scalars default to 'firstprivate'. */
7679 flags |= GOVD_FIRSTPRIVATE;
7680
7681 break;
7682
7683 default:
7684 gcc_unreachable ();
7685 }
7686
7687 if (DECL_ARTIFICIAL (decl))
7688 ; /* We can get compiler-generated decls, and should not complain
7689 about them. */
7690 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7691 {
7692 error ("%qE not specified in enclosing OpenACC %qs construct",
7693 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7694 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7695 }
7696 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7697 ; /* Handled above. */
7698 else
7699 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7700
7701 return flags;
7702 }
7703
7704 /* Record the fact that DECL was used within the OMP context CTX.
7705 IN_CODE is true when real code uses DECL, and false when we should
7706 merely emit default(none) errors. Return true if DECL is going to
7707 be remapped and thus DECL shouldn't be gimplified into its
7708 DECL_VALUE_EXPR (if any). */
7709
7710 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)7711 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7712 {
7713 splay_tree_node n;
7714 unsigned flags = in_code ? GOVD_SEEN : 0;
7715 bool ret = false, shared;
7716
7717 if (error_operand_p (decl))
7718 return false;
7719
7720 if (ctx->region_type == ORT_NONE)
7721 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7722
7723 if (is_global_var (decl))
7724 {
7725 /* Threadprivate variables are predetermined. */
7726 if (DECL_THREAD_LOCAL_P (decl))
7727 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7728
7729 if (DECL_HAS_VALUE_EXPR_P (decl))
7730 {
7731 if (ctx->region_type & ORT_ACC)
7732 /* For OpenACC, defer expansion of value to avoid transfering
7733 privatized common block data instead of im-/explicitly transfered
7734 variables which are in common blocks. */
7735 ;
7736 else
7737 {
7738 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7739
7740 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7741 return omp_notice_threadprivate_variable (ctx, decl, value);
7742 }
7743 }
7744
7745 if (gimplify_omp_ctxp->outer_context == NULL
7746 && VAR_P (decl)
7747 && oacc_get_fn_attrib (current_function_decl))
7748 {
7749 location_t loc = DECL_SOURCE_LOCATION (decl);
7750
7751 if (lookup_attribute ("omp declare target link",
7752 DECL_ATTRIBUTES (decl)))
7753 {
7754 error_at (loc,
7755 "%qE with %<link%> clause used in %<routine%> function",
7756 DECL_NAME (decl));
7757 return false;
7758 }
7759 else if (!lookup_attribute ("omp declare target",
7760 DECL_ATTRIBUTES (decl)))
7761 {
7762 error_at (loc,
7763 "%qE requires a %<declare%> directive for use "
7764 "in a %<routine%> function", DECL_NAME (decl));
7765 return false;
7766 }
7767 }
7768 }
7769
7770 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7771 if ((ctx->region_type & ORT_TARGET) != 0)
7772 {
7773 if (ctx->region_type & ORT_ACC)
7774 /* For OpenACC, as remarked above, defer expansion. */
7775 shared = false;
7776 else
7777 shared = true;
7778
7779 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7780 if (n == NULL)
7781 {
7782 unsigned nflags = flags;
7783 if ((ctx->region_type & ORT_ACC) == 0)
7784 {
7785 bool is_declare_target = false;
7786 if (is_global_var (decl)
7787 && varpool_node::get_create (decl)->offloadable)
7788 {
7789 struct gimplify_omp_ctx *octx;
7790 for (octx = ctx->outer_context;
7791 octx; octx = octx->outer_context)
7792 {
7793 n = splay_tree_lookup (octx->variables,
7794 (splay_tree_key)decl);
7795 if (n
7796 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7797 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7798 break;
7799 }
7800 is_declare_target = octx == NULL;
7801 }
7802 if (!is_declare_target)
7803 {
7804 int gdmk;
7805 enum omp_clause_defaultmap_kind kind;
7806 if (lang_hooks.decls.omp_allocatable_p (decl))
7807 gdmk = GDMK_ALLOCATABLE;
7808 else if (lang_hooks.decls.omp_scalar_target_p (decl))
7809 gdmk = GDMK_SCALAR_TARGET;
7810 else if (lang_hooks.decls.omp_scalar_p (decl, false))
7811 gdmk = GDMK_SCALAR;
7812 else if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7813 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7814 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7815 == POINTER_TYPE)))
7816 gdmk = GDMK_POINTER;
7817 else
7818 gdmk = GDMK_AGGREGATE;
7819 kind = lang_hooks.decls.omp_predetermined_mapping (decl);
7820 if (kind != OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED)
7821 {
7822 if (kind == OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE)
7823 nflags |= GOVD_FIRSTPRIVATE;
7824 else if (kind == OMP_CLAUSE_DEFAULTMAP_TO)
7825 nflags |= GOVD_MAP | GOVD_MAP_TO_ONLY;
7826 else
7827 gcc_unreachable ();
7828 }
7829 else if (ctx->defaultmap[gdmk] == 0)
7830 {
7831 tree d = lang_hooks.decls.omp_report_decl (decl);
7832 error ("%qE not specified in enclosing %<target%>",
7833 DECL_NAME (d));
7834 inform (ctx->location, "enclosing %<target%>");
7835 }
7836 else if (ctx->defaultmap[gdmk]
7837 & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7838 nflags |= ctx->defaultmap[gdmk];
7839 else
7840 {
7841 gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7842 nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7843 }
7844 }
7845 }
7846
7847 struct gimplify_omp_ctx *octx = ctx->outer_context;
7848 if ((ctx->region_type & ORT_ACC) && octx)
7849 {
7850 /* Look in outer OpenACC contexts, to see if there's a
7851 data attribute for this variable. */
7852 omp_notice_variable (octx, decl, in_code);
7853
7854 for (; octx; octx = octx->outer_context)
7855 {
7856 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7857 break;
7858 splay_tree_node n2
7859 = splay_tree_lookup (octx->variables,
7860 (splay_tree_key) decl);
7861 if (n2)
7862 {
7863 if (octx->region_type == ORT_ACC_HOST_DATA)
7864 error ("variable %qE declared in enclosing "
7865 "%<host_data%> region", DECL_NAME (decl));
7866 nflags |= GOVD_MAP;
7867 if (octx->region_type == ORT_ACC_DATA
7868 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7869 nflags |= GOVD_MAP_0LEN_ARRAY;
7870 goto found_outer;
7871 }
7872 }
7873 }
7874
7875 if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7876 | GOVD_MAP_ALLOC_ONLY)) == flags)
7877 {
7878 tree type = TREE_TYPE (decl);
7879
7880 if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7881 && omp_privatize_by_reference (decl))
7882 type = TREE_TYPE (type);
7883 if (!lang_hooks.types.omp_mappable_type (type))
7884 {
7885 error ("%qD referenced in target region does not have "
7886 "a mappable type", decl);
7887 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7888 }
7889 else
7890 {
7891 if ((ctx->region_type & ORT_ACC) != 0)
7892 nflags = oacc_default_clause (ctx, decl, flags);
7893 else
7894 nflags |= GOVD_MAP;
7895 }
7896 }
7897 found_outer:
7898 omp_add_variable (ctx, decl, nflags);
7899 }
7900 else
7901 {
7902 /* If nothing changed, there's nothing left to do. */
7903 if ((n->value & flags) == flags)
7904 return ret;
7905 flags |= n->value;
7906 n->value = flags;
7907 }
7908 goto do_outer;
7909 }
7910
7911 if (n == NULL)
7912 {
7913 if (ctx->region_type == ORT_WORKSHARE
7914 || ctx->region_type == ORT_TASKGROUP
7915 || ctx->region_type == ORT_SIMD
7916 || ctx->region_type == ORT_ACC
7917 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7918 goto do_outer;
7919
7920 flags = omp_default_clause (ctx, decl, in_code, flags);
7921
7922 if ((flags & GOVD_PRIVATE)
7923 && lang_hooks.decls.omp_private_outer_ref (decl))
7924 flags |= GOVD_PRIVATE_OUTER_REF;
7925
7926 omp_add_variable (ctx, decl, flags);
7927
7928 shared = (flags & GOVD_SHARED) != 0;
7929 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7930 goto do_outer;
7931 }
7932
7933 /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7934 lb, b or incr expressions, those shouldn't be turned into simd arrays. */
7935 if (ctx->region_type == ORT_SIMD
7936 && ctx->in_for_exprs
7937 && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
7938 == GOVD_PRIVATE))
7939 flags &= ~GOVD_SEEN;
7940
7941 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7942 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7943 && DECL_SIZE (decl))
7944 {
7945 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7946 {
7947 splay_tree_node n2;
7948 tree t = DECL_VALUE_EXPR (decl);
7949 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7950 t = TREE_OPERAND (t, 0);
7951 gcc_assert (DECL_P (t));
7952 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7953 n2->value |= GOVD_SEEN;
7954 }
7955 else if (omp_privatize_by_reference (decl)
7956 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7957 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7958 != INTEGER_CST))
7959 {
7960 splay_tree_node n2;
7961 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7962 gcc_assert (DECL_P (t));
7963 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7964 if (n2)
7965 omp_notice_variable (ctx, t, true);
7966 }
7967 }
7968
7969 if (ctx->region_type & ORT_ACC)
7970 /* For OpenACC, as remarked above, defer expansion. */
7971 shared = false;
7972 else
7973 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7974 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7975
7976 /* If nothing changed, there's nothing left to do. */
7977 if ((n->value & flags) == flags)
7978 return ret;
7979 flags |= n->value;
7980 n->value = flags;
7981
7982 do_outer:
7983 /* If the variable is private in the current context, then we don't
7984 need to propagate anything to an outer context. */
7985 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7986 return ret;
7987 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7988 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7989 return ret;
7990 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7991 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7992 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7993 return ret;
7994 if (ctx->outer_context
7995 && omp_notice_variable (ctx->outer_context, decl, in_code))
7996 return true;
7997 return ret;
7998 }
7999
8000 /* Verify that DECL is private within CTX. If there's specific information
8001 to the contrary in the innermost scope, generate an error. */
8002
8003 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl,int simd)8004 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
8005 {
8006 splay_tree_node n;
8007
8008 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8009 if (n != NULL)
8010 {
8011 if (n->value & GOVD_SHARED)
8012 {
8013 if (ctx == gimplify_omp_ctxp)
8014 {
8015 if (simd)
8016 error ("iteration variable %qE is predetermined linear",
8017 DECL_NAME (decl));
8018 else
8019 error ("iteration variable %qE should be private",
8020 DECL_NAME (decl));
8021 n->value = GOVD_PRIVATE;
8022 return true;
8023 }
8024 else
8025 return false;
8026 }
8027 else if ((n->value & GOVD_EXPLICIT) != 0
8028 && (ctx == gimplify_omp_ctxp
8029 || (ctx->region_type == ORT_COMBINED_PARALLEL
8030 && gimplify_omp_ctxp->outer_context == ctx)))
8031 {
8032 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
8033 error ("iteration variable %qE should not be firstprivate",
8034 DECL_NAME (decl));
8035 else if ((n->value & GOVD_REDUCTION) != 0)
8036 error ("iteration variable %qE should not be reduction",
8037 DECL_NAME (decl));
8038 else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
8039 error ("iteration variable %qE should not be linear",
8040 DECL_NAME (decl));
8041 }
8042 return (ctx == gimplify_omp_ctxp
8043 || (ctx->region_type == ORT_COMBINED_PARALLEL
8044 && gimplify_omp_ctxp->outer_context == ctx));
8045 }
8046
8047 if (ctx->region_type != ORT_WORKSHARE
8048 && ctx->region_type != ORT_TASKGROUP
8049 && ctx->region_type != ORT_SIMD
8050 && ctx->region_type != ORT_ACC)
8051 return false;
8052 else if (ctx->outer_context)
8053 return omp_is_private (ctx->outer_context, decl, simd);
8054 return false;
8055 }
8056
8057 /* Return true if DECL is private within a parallel region
8058 that binds to the current construct's context or in parallel
8059 region's REDUCTION clause. */
8060
8061 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl,bool copyprivate)8062 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
8063 {
8064 splay_tree_node n;
8065
8066 do
8067 {
8068 ctx = ctx->outer_context;
8069 if (ctx == NULL)
8070 {
8071 if (is_global_var (decl))
8072 return false;
8073
8074 /* References might be private, but might be shared too,
8075 when checking for copyprivate, assume they might be
8076 private, otherwise assume they might be shared. */
8077 if (copyprivate)
8078 return true;
8079
8080 if (omp_privatize_by_reference (decl))
8081 return false;
8082
8083 /* Treat C++ privatized non-static data members outside
8084 of the privatization the same. */
8085 if (omp_member_access_dummy_var (decl))
8086 return false;
8087
8088 return true;
8089 }
8090
8091 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8092
8093 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
8094 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
8095 {
8096 if ((ctx->region_type & ORT_TARGET_DATA) != 0
8097 || n == NULL
8098 || (n->value & GOVD_MAP) == 0)
8099 continue;
8100 return false;
8101 }
8102
8103 if (n != NULL)
8104 {
8105 if ((n->value & GOVD_LOCAL) != 0
8106 && omp_member_access_dummy_var (decl))
8107 return false;
8108 return (n->value & GOVD_SHARED) == 0;
8109 }
8110
8111 if (ctx->region_type == ORT_WORKSHARE
8112 || ctx->region_type == ORT_TASKGROUP
8113 || ctx->region_type == ORT_SIMD
8114 || ctx->region_type == ORT_ACC)
8115 continue;
8116
8117 break;
8118 }
8119 while (1);
8120 return false;
8121 }
8122
8123 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
8124
8125 static tree
find_decl_expr(tree * tp,int * walk_subtrees,void * data)8126 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
8127 {
8128 tree t = *tp;
8129
8130 /* If this node has been visited, unmark it and keep looking. */
8131 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
8132 return t;
8133
8134 if (IS_TYPE_OR_DECL_P (t))
8135 *walk_subtrees = 0;
8136 return NULL_TREE;
8137 }
8138
8139
8140 /* Gimplify the affinity clause but effectively ignore it.
8141 Generate:
8142 var = begin;
8143 if ((step > 1) ? var <= end : var > end)
8144 locatator_var_expr; */
8145
8146 static void
gimplify_omp_affinity(tree * list_p,gimple_seq * pre_p)8147 gimplify_omp_affinity (tree *list_p, gimple_seq *pre_p)
8148 {
8149 tree last_iter = NULL_TREE;
8150 tree last_bind = NULL_TREE;
8151 tree label = NULL_TREE;
8152 tree *last_body = NULL;
8153 for (tree c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8154 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_AFFINITY)
8155 {
8156 tree t = OMP_CLAUSE_DECL (c);
8157 if (TREE_CODE (t) == TREE_LIST
8158 && TREE_PURPOSE (t)
8159 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8160 {
8161 if (TREE_VALUE (t) == null_pointer_node)
8162 continue;
8163 if (TREE_PURPOSE (t) != last_iter)
8164 {
8165 if (last_bind)
8166 {
8167 append_to_statement_list (label, last_body);
8168 gimplify_and_add (last_bind, pre_p);
8169 last_bind = NULL_TREE;
8170 }
8171 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8172 {
8173 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8174 is_gimple_val, fb_rvalue) == GS_ERROR
8175 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8176 is_gimple_val, fb_rvalue) == GS_ERROR
8177 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8178 is_gimple_val, fb_rvalue) == GS_ERROR
8179 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8180 is_gimple_val, fb_rvalue)
8181 == GS_ERROR))
8182 return;
8183 }
8184 last_iter = TREE_PURPOSE (t);
8185 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8186 last_bind = build3 (BIND_EXPR, void_type_node, BLOCK_VARS (block),
8187 NULL, block);
8188 last_body = &BIND_EXPR_BODY (last_bind);
8189 tree cond = NULL_TREE;
8190 location_t loc = OMP_CLAUSE_LOCATION (c);
8191 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8192 {
8193 tree var = TREE_VEC_ELT (it, 0);
8194 tree begin = TREE_VEC_ELT (it, 1);
8195 tree end = TREE_VEC_ELT (it, 2);
8196 tree step = TREE_VEC_ELT (it, 3);
8197 loc = DECL_SOURCE_LOCATION (var);
8198 tree tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8199 var, begin);
8200 append_to_statement_list_force (tem, last_body);
8201
8202 tree cond1 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8203 step, build_zero_cst (TREE_TYPE (step)));
8204 tree cond2 = fold_build2_loc (loc, LE_EXPR, boolean_type_node,
8205 var, end);
8206 tree cond3 = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8207 var, end);
8208 cond1 = fold_build3_loc (loc, COND_EXPR, boolean_type_node,
8209 cond1, cond2, cond3);
8210 if (cond)
8211 cond = fold_build2_loc (loc, TRUTH_AND_EXPR,
8212 boolean_type_node, cond, cond1);
8213 else
8214 cond = cond1;
8215 }
8216 tree cont_label = create_artificial_label (loc);
8217 label = build1 (LABEL_EXPR, void_type_node, cont_label);
8218 tree tem = fold_build3_loc (loc, COND_EXPR, void_type_node, cond,
8219 void_node,
8220 build_and_jump (&cont_label));
8221 append_to_statement_list_force (tem, last_body);
8222 }
8223 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8224 {
8225 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t), 0),
8226 last_body);
8227 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8228 }
8229 if (error_operand_p (TREE_VALUE (t)))
8230 return;
8231 append_to_statement_list_force (TREE_VALUE (t), last_body);
8232 TREE_VALUE (t) = null_pointer_node;
8233 }
8234 else
8235 {
8236 if (last_bind)
8237 {
8238 append_to_statement_list (label, last_body);
8239 gimplify_and_add (last_bind, pre_p);
8240 last_bind = NULL_TREE;
8241 }
8242 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8243 {
8244 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8245 NULL, is_gimple_val, fb_rvalue);
8246 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8247 }
8248 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8249 return;
8250 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8251 is_gimple_lvalue, fb_lvalue) == GS_ERROR)
8252 return;
8253 gimplify_and_add (OMP_CLAUSE_DECL (c), pre_p);
8254 }
8255 }
8256 if (last_bind)
8257 {
8258 append_to_statement_list (label, last_body);
8259 gimplify_and_add (last_bind, pre_p);
8260 }
8261 return;
8262 }
8263
8264 /* If *LIST_P contains any OpenMP depend clauses with iterators,
8265 lower all the depend clauses by populating corresponding depend
8266 array. Returns 0 if there are no such depend clauses, or
8267 2 if all depend clauses should be removed, 1 otherwise. */
8268
8269 static int
gimplify_omp_depend(tree * list_p,gimple_seq * pre_p)8270 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
8271 {
8272 tree c;
8273 gimple *g;
8274 size_t n[4] = { 0, 0, 0, 0 };
8275 bool unused[4];
8276 tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
8277 tree last_iter = NULL_TREE, last_count = NULL_TREE;
8278 size_t i, j;
8279 location_t first_loc = UNKNOWN_LOCATION;
8280
8281 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8282 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8283 {
8284 switch (OMP_CLAUSE_DEPEND_KIND (c))
8285 {
8286 case OMP_CLAUSE_DEPEND_IN:
8287 i = 2;
8288 break;
8289 case OMP_CLAUSE_DEPEND_OUT:
8290 case OMP_CLAUSE_DEPEND_INOUT:
8291 i = 0;
8292 break;
8293 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8294 i = 1;
8295 break;
8296 case OMP_CLAUSE_DEPEND_DEPOBJ:
8297 i = 3;
8298 break;
8299 case OMP_CLAUSE_DEPEND_SOURCE:
8300 case OMP_CLAUSE_DEPEND_SINK:
8301 continue;
8302 default:
8303 gcc_unreachable ();
8304 }
8305 tree t = OMP_CLAUSE_DECL (c);
8306 if (first_loc == UNKNOWN_LOCATION)
8307 first_loc = OMP_CLAUSE_LOCATION (c);
8308 if (TREE_CODE (t) == TREE_LIST
8309 && TREE_PURPOSE (t)
8310 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8311 {
8312 if (TREE_PURPOSE (t) != last_iter)
8313 {
8314 tree tcnt = size_one_node;
8315 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8316 {
8317 if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
8318 is_gimple_val, fb_rvalue) == GS_ERROR
8319 || gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
8320 is_gimple_val, fb_rvalue) == GS_ERROR
8321 || gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
8322 is_gimple_val, fb_rvalue) == GS_ERROR
8323 || (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
8324 is_gimple_val, fb_rvalue)
8325 == GS_ERROR))
8326 return 2;
8327 tree var = TREE_VEC_ELT (it, 0);
8328 tree begin = TREE_VEC_ELT (it, 1);
8329 tree end = TREE_VEC_ELT (it, 2);
8330 tree step = TREE_VEC_ELT (it, 3);
8331 tree orig_step = TREE_VEC_ELT (it, 4);
8332 tree type = TREE_TYPE (var);
8333 tree stype = TREE_TYPE (step);
8334 location_t loc = DECL_SOURCE_LOCATION (var);
8335 tree endmbegin;
8336 /* Compute count for this iterator as
8337 orig_step > 0
8338 ? (begin < end ? (end - begin + (step - 1)) / step : 0)
8339 : (begin > end ? (end - begin + (step + 1)) / step : 0)
8340 and compute product of those for the entire depend
8341 clause. */
8342 if (POINTER_TYPE_P (type))
8343 endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
8344 stype, end, begin);
8345 else
8346 endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
8347 end, begin);
8348 tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
8349 step,
8350 build_int_cst (stype, 1));
8351 tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
8352 build_int_cst (stype, 1));
8353 tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
8354 unshare_expr (endmbegin),
8355 stepm1);
8356 pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8357 pos, step);
8358 tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
8359 endmbegin, stepp1);
8360 if (TYPE_UNSIGNED (stype))
8361 {
8362 neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
8363 step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
8364 }
8365 neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
8366 neg, step);
8367 step = NULL_TREE;
8368 tree cond = fold_build2_loc (loc, LT_EXPR,
8369 boolean_type_node,
8370 begin, end);
8371 pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
8372 build_int_cst (stype, 0));
8373 cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
8374 end, begin);
8375 neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
8376 build_int_cst (stype, 0));
8377 tree osteptype = TREE_TYPE (orig_step);
8378 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8379 orig_step,
8380 build_int_cst (osteptype, 0));
8381 tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
8382 cond, pos, neg);
8383 cnt = fold_convert_loc (loc, sizetype, cnt);
8384 if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
8385 fb_rvalue) == GS_ERROR)
8386 return 2;
8387 tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
8388 }
8389 if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
8390 fb_rvalue) == GS_ERROR)
8391 return 2;
8392 last_iter = TREE_PURPOSE (t);
8393 last_count = tcnt;
8394 }
8395 if (counts[i] == NULL_TREE)
8396 counts[i] = last_count;
8397 else
8398 counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
8399 PLUS_EXPR, counts[i], last_count);
8400 }
8401 else
8402 n[i]++;
8403 }
8404 for (i = 0; i < 4; i++)
8405 if (counts[i])
8406 break;
8407 if (i == 4)
8408 return 0;
8409
8410 tree total = size_zero_node;
8411 for (i = 0; i < 4; i++)
8412 {
8413 unused[i] = counts[i] == NULL_TREE && n[i] == 0;
8414 if (counts[i] == NULL_TREE)
8415 counts[i] = size_zero_node;
8416 if (n[i])
8417 counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
8418 if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
8419 fb_rvalue) == GS_ERROR)
8420 return 2;
8421 total = size_binop (PLUS_EXPR, total, counts[i]);
8422 }
8423
8424 if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
8425 == GS_ERROR)
8426 return 2;
8427 bool is_old = unused[1] && unused[3];
8428 tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
8429 size_int (is_old ? 1 : 4));
8430 tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
8431 tree array = create_tmp_var_raw (type);
8432 TREE_ADDRESSABLE (array) = 1;
8433 if (!poly_int_tree_p (totalpx))
8434 {
8435 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
8436 gimplify_type_sizes (TREE_TYPE (array), pre_p);
8437 if (gimplify_omp_ctxp)
8438 {
8439 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8440 while (ctx
8441 && (ctx->region_type == ORT_WORKSHARE
8442 || ctx->region_type == ORT_TASKGROUP
8443 || ctx->region_type == ORT_SIMD
8444 || ctx->region_type == ORT_ACC))
8445 ctx = ctx->outer_context;
8446 if (ctx)
8447 omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
8448 }
8449 gimplify_vla_decl (array, pre_p);
8450 }
8451 else
8452 gimple_add_tmp_var (array);
8453 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8454 NULL_TREE);
8455 tree tem;
8456 if (!is_old)
8457 {
8458 tem = build2 (MODIFY_EXPR, void_type_node, r,
8459 build_int_cst (ptr_type_node, 0));
8460 gimplify_and_add (tem, pre_p);
8461 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8462 NULL_TREE);
8463 }
8464 tem = build2 (MODIFY_EXPR, void_type_node, r,
8465 fold_convert (ptr_type_node, total));
8466 gimplify_and_add (tem, pre_p);
8467 for (i = 1; i < (is_old ? 2 : 4); i++)
8468 {
8469 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
8470 NULL_TREE, NULL_TREE);
8471 tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
8472 gimplify_and_add (tem, pre_p);
8473 }
8474
8475 tree cnts[4];
8476 for (j = 4; j; j--)
8477 if (!unused[j - 1])
8478 break;
8479 for (i = 0; i < 4; i++)
8480 {
8481 if (i && (i >= j || unused[i - 1]))
8482 {
8483 cnts[i] = cnts[i - 1];
8484 continue;
8485 }
8486 cnts[i] = create_tmp_var (sizetype);
8487 if (i == 0)
8488 g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8489 else
8490 {
8491 tree t;
8492 if (is_old)
8493 t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8494 else
8495 t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8496 if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8497 == GS_ERROR)
8498 return 2;
8499 g = gimple_build_assign (cnts[i], t);
8500 }
8501 gimple_seq_add_stmt (pre_p, g);
8502 }
8503
8504 last_iter = NULL_TREE;
8505 tree last_bind = NULL_TREE;
8506 tree *last_body = NULL;
8507 for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8509 {
8510 switch (OMP_CLAUSE_DEPEND_KIND (c))
8511 {
8512 case OMP_CLAUSE_DEPEND_IN:
8513 i = 2;
8514 break;
8515 case OMP_CLAUSE_DEPEND_OUT:
8516 case OMP_CLAUSE_DEPEND_INOUT:
8517 i = 0;
8518 break;
8519 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8520 i = 1;
8521 break;
8522 case OMP_CLAUSE_DEPEND_DEPOBJ:
8523 i = 3;
8524 break;
8525 case OMP_CLAUSE_DEPEND_SOURCE:
8526 case OMP_CLAUSE_DEPEND_SINK:
8527 continue;
8528 default:
8529 gcc_unreachable ();
8530 }
8531 tree t = OMP_CLAUSE_DECL (c);
8532 if (TREE_CODE (t) == TREE_LIST
8533 && TREE_PURPOSE (t)
8534 && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8535 {
8536 if (TREE_PURPOSE (t) != last_iter)
8537 {
8538 if (last_bind)
8539 gimplify_and_add (last_bind, pre_p);
8540 tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8541 last_bind = build3 (BIND_EXPR, void_type_node,
8542 BLOCK_VARS (block), NULL, block);
8543 TREE_SIDE_EFFECTS (last_bind) = 1;
8544 SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8545 tree *p = &BIND_EXPR_BODY (last_bind);
8546 for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8547 {
8548 tree var = TREE_VEC_ELT (it, 0);
8549 tree begin = TREE_VEC_ELT (it, 1);
8550 tree end = TREE_VEC_ELT (it, 2);
8551 tree step = TREE_VEC_ELT (it, 3);
8552 tree orig_step = TREE_VEC_ELT (it, 4);
8553 tree type = TREE_TYPE (var);
8554 location_t loc = DECL_SOURCE_LOCATION (var);
8555 /* Emit:
8556 var = begin;
8557 goto cond_label;
8558 beg_label:
8559 ...
8560 var = var + step;
8561 cond_label:
8562 if (orig_step > 0) {
8563 if (var < end) goto beg_label;
8564 } else {
8565 if (var > end) goto beg_label;
8566 }
8567 for each iterator, with inner iterators added to
8568 the ... above. */
8569 tree beg_label = create_artificial_label (loc);
8570 tree cond_label = NULL_TREE;
8571 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8572 var, begin);
8573 append_to_statement_list_force (tem, p);
8574 tem = build_and_jump (&cond_label);
8575 append_to_statement_list_force (tem, p);
8576 tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8577 append_to_statement_list (tem, p);
8578 tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8579 NULL_TREE, NULL_TREE);
8580 TREE_SIDE_EFFECTS (bind) = 1;
8581 SET_EXPR_LOCATION (bind, loc);
8582 append_to_statement_list_force (bind, p);
8583 if (POINTER_TYPE_P (type))
8584 tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8585 var, fold_convert_loc (loc, sizetype,
8586 step));
8587 else
8588 tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8589 tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8590 var, tem);
8591 append_to_statement_list_force (tem, p);
8592 tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8593 append_to_statement_list (tem, p);
8594 tree cond = fold_build2_loc (loc, LT_EXPR,
8595 boolean_type_node,
8596 var, end);
8597 tree pos
8598 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8599 cond, build_and_jump (&beg_label),
8600 void_node);
8601 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8602 var, end);
8603 tree neg
8604 = fold_build3_loc (loc, COND_EXPR, void_type_node,
8605 cond, build_and_jump (&beg_label),
8606 void_node);
8607 tree osteptype = TREE_TYPE (orig_step);
8608 cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8609 orig_step,
8610 build_int_cst (osteptype, 0));
8611 tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8612 cond, pos, neg);
8613 append_to_statement_list_force (tem, p);
8614 p = &BIND_EXPR_BODY (bind);
8615 }
8616 last_body = p;
8617 }
8618 last_iter = TREE_PURPOSE (t);
8619 if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8620 {
8621 append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8622 0), last_body);
8623 TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8624 }
8625 if (error_operand_p (TREE_VALUE (t)))
8626 return 2;
8627 TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8628 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8629 NULL_TREE, NULL_TREE);
8630 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8631 void_type_node, r, TREE_VALUE (t));
8632 append_to_statement_list_force (tem, last_body);
8633 tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8634 void_type_node, cnts[i],
8635 size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8636 append_to_statement_list_force (tem, last_body);
8637 TREE_VALUE (t) = null_pointer_node;
8638 }
8639 else
8640 {
8641 if (last_bind)
8642 {
8643 gimplify_and_add (last_bind, pre_p);
8644 last_bind = NULL_TREE;
8645 }
8646 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8647 {
8648 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8649 NULL, is_gimple_val, fb_rvalue);
8650 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8651 }
8652 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8653 return 2;
8654 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8655 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8656 is_gimple_val, fb_rvalue) == GS_ERROR)
8657 return 2;
8658 r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8659 NULL_TREE, NULL_TREE);
8660 tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8661 gimplify_and_add (tem, pre_p);
8662 g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8663 size_int (1)));
8664 gimple_seq_add_stmt (pre_p, g);
8665 }
8666 }
8667 if (last_bind)
8668 gimplify_and_add (last_bind, pre_p);
8669 tree cond = boolean_false_node;
8670 if (is_old)
8671 {
8672 if (!unused[0])
8673 cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8674 size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8675 size_int (2)));
8676 if (!unused[2])
8677 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8678 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8679 cnts[2],
8680 size_binop_loc (first_loc, PLUS_EXPR,
8681 totalpx,
8682 size_int (1))));
8683 }
8684 else
8685 {
8686 tree prev = size_int (5);
8687 for (i = 0; i < 4; i++)
8688 {
8689 if (unused[i])
8690 continue;
8691 prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8692 cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8693 build2_loc (first_loc, NE_EXPR, boolean_type_node,
8694 cnts[i], unshare_expr (prev)));
8695 }
8696 }
8697 tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8698 build_call_expr_loc (first_loc,
8699 builtin_decl_explicit (BUILT_IN_TRAP),
8700 0), void_node);
8701 gimplify_and_add (tem, pre_p);
8702 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8703 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8704 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8705 OMP_CLAUSE_CHAIN (c) = *list_p;
8706 *list_p = c;
8707 return 1;
8708 }
8709
8710 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8711 GOMP_MAP_STRUCT mapping. C is an always_pointer mapping. STRUCT_NODE is
8712 the struct node to insert the new mapping after (when the struct node is
8713 initially created). PREV_NODE is the first of two or three mappings for a
8714 pointer, and is either:
8715 - the node before C, when a pair of mappings is used, e.g. for a C/C++
8716 array section.
8717 - not the node before C. This is true when we have a reference-to-pointer
8718 type (with a mapping for the reference and for the pointer), or for
8719 Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8720 If SCP is non-null, the new node is inserted before *SCP.
8721 if SCP is null, the new node is inserted before PREV_NODE.
8722 The return type is:
8723 - PREV_NODE, if SCP is non-null.
8724 - The newly-created ALLOC or RELEASE node, if SCP is null.
8725 - The second newly-created ALLOC or RELEASE node, if we are mapping a
8726 reference to a pointer. */
8727
8728 static tree
insert_struct_comp_map(enum tree_code code,tree c,tree struct_node,tree prev_node,tree * scp)8729 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8730 tree prev_node, tree *scp)
8731 {
8732 enum gomp_map_kind mkind
8733 = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8734 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8735
8736 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8737 tree cl = scp ? prev_node : c2;
8738 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8739 OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8740 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8741 if (OMP_CLAUSE_CHAIN (prev_node) != c
8742 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8743 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8744 == GOMP_MAP_TO_PSET))
8745 OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8746 else
8747 OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8748 if (struct_node)
8749 OMP_CLAUSE_CHAIN (struct_node) = c2;
8750
8751 /* We might need to create an additional mapping if we have a reference to a
8752 pointer (in C++). Don't do this if we have something other than a
8753 GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET. */
8754 if (OMP_CLAUSE_CHAIN (prev_node) != c
8755 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8756 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8757 == GOMP_MAP_ALWAYS_POINTER)
8758 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8759 == GOMP_MAP_ATTACH_DETACH)))
8760 {
8761 tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8762 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8763 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8764 OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8765 OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8766 OMP_CLAUSE_CHAIN (c3) = prev_node;
8767 if (!scp)
8768 OMP_CLAUSE_CHAIN (c2) = c3;
8769 else
8770 cl = c3;
8771 }
8772
8773 if (scp)
8774 *scp = c2;
8775
8776 return cl;
8777 }
8778
8779 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8780 and set *BITPOSP and *POFFSETP to the bit offset of the access.
8781 If BASE_REF is non-NULL and the containing object is a reference, set
8782 *BASE_REF to that reference before dereferencing the object.
8783 If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8784 has array type, else return NULL. */
8785
8786 static tree
extract_base_bit_offset(tree base,tree * base_ref,poly_int64 * bitposp,poly_offset_int * poffsetp,tree * offsetp)8787 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8788 poly_offset_int *poffsetp, tree *offsetp)
8789 {
8790 tree offset;
8791 poly_int64 bitsize, bitpos;
8792 machine_mode mode;
8793 int unsignedp, reversep, volatilep = 0;
8794 poly_offset_int poffset;
8795
8796 if (base_ref)
8797 {
8798 *base_ref = NULL_TREE;
8799
8800 while (TREE_CODE (base) == ARRAY_REF)
8801 base = TREE_OPERAND (base, 0);
8802
8803 if (TREE_CODE (base) == INDIRECT_REF)
8804 base = TREE_OPERAND (base, 0);
8805 }
8806 else
8807 {
8808 if (TREE_CODE (base) == ARRAY_REF)
8809 {
8810 while (TREE_CODE (base) == ARRAY_REF)
8811 base = TREE_OPERAND (base, 0);
8812 if (TREE_CODE (base) != COMPONENT_REF
8813 || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8814 return NULL_TREE;
8815 }
8816 else if (TREE_CODE (base) == INDIRECT_REF
8817 && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8818 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8819 == REFERENCE_TYPE))
8820 base = TREE_OPERAND (base, 0);
8821 }
8822
8823 base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8824 &unsignedp, &reversep, &volatilep);
8825
8826 tree orig_base = base;
8827
8828 if ((TREE_CODE (base) == INDIRECT_REF
8829 || (TREE_CODE (base) == MEM_REF
8830 && integer_zerop (TREE_OPERAND (base, 1))))
8831 && DECL_P (TREE_OPERAND (base, 0))
8832 && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8833 base = TREE_OPERAND (base, 0);
8834
8835 if (offset && poly_int_tree_p (offset))
8836 {
8837 poffset = wi::to_poly_offset (offset);
8838 offset = NULL_TREE;
8839 }
8840 else
8841 poffset = 0;
8842
8843 if (maybe_ne (bitpos, 0))
8844 poffset += bits_to_bytes_round_down (bitpos);
8845
8846 *bitposp = bitpos;
8847 *poffsetp = poffset;
8848 *offsetp = offset;
8849
8850 /* Set *BASE_REF if BASE was a dereferenced reference variable. */
8851 if (base_ref && orig_base != base)
8852 *base_ref = orig_base;
8853
8854 return base;
8855 }
8856
8857 /* Returns true if EXPR is or contains (as a sub-component) BASE_PTR. */
8858
8859 static bool
is_or_contains_p(tree expr,tree base_ptr)8860 is_or_contains_p (tree expr, tree base_ptr)
8861 {
8862 if ((TREE_CODE (expr) == INDIRECT_REF && TREE_CODE (base_ptr) == MEM_REF)
8863 || (TREE_CODE (expr) == MEM_REF && TREE_CODE (base_ptr) == INDIRECT_REF))
8864 return operand_equal_p (TREE_OPERAND (expr, 0),
8865 TREE_OPERAND (base_ptr, 0));
8866 while (!operand_equal_p (expr, base_ptr))
8867 {
8868 if (TREE_CODE (base_ptr) == COMPOUND_EXPR)
8869 base_ptr = TREE_OPERAND (base_ptr, 1);
8870 if (TREE_CODE (base_ptr) == COMPONENT_REF
8871 || TREE_CODE (base_ptr) == POINTER_PLUS_EXPR
8872 || TREE_CODE (base_ptr) == SAVE_EXPR)
8873 base_ptr = TREE_OPERAND (base_ptr, 0);
8874 else
8875 break;
8876 }
8877 return operand_equal_p (expr, base_ptr);
8878 }
8879
8880 /* Implement OpenMP 5.x map ordering rules for target directives. There are
8881 several rules, and with some level of ambiguity, hopefully we can at least
8882 collect the complexity here in one place. */
8883
8884 static void
omp_target_reorder_clauses(tree * list_p)8885 omp_target_reorder_clauses (tree *list_p)
8886 {
8887 /* Collect refs to alloc/release/delete maps. */
8888 auto_vec<tree, 32> ard;
8889 tree *cp = list_p;
8890 while (*cp != NULL_TREE)
8891 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8892 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALLOC
8893 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_RELEASE
8894 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_DELETE))
8895 {
8896 /* Unlink cp and push to ard. */
8897 tree c = *cp;
8898 tree nc = OMP_CLAUSE_CHAIN (c);
8899 *cp = nc;
8900 ard.safe_push (c);
8901
8902 /* Any associated pointer type maps should also move along. */
8903 while (*cp != NULL_TREE
8904 && OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP
8905 && (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_REFERENCE
8906 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_FIRSTPRIVATE_POINTER
8907 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH
8908 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_POINTER
8909 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ALWAYS_POINTER
8910 || OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_TO_PSET))
8911 {
8912 c = *cp;
8913 nc = OMP_CLAUSE_CHAIN (c);
8914 *cp = nc;
8915 ard.safe_push (c);
8916 }
8917 }
8918 else
8919 cp = &OMP_CLAUSE_CHAIN (*cp);
8920
8921 /* Link alloc/release/delete maps to the end of list. */
8922 for (unsigned int i = 0; i < ard.length (); i++)
8923 {
8924 *cp = ard[i];
8925 cp = &OMP_CLAUSE_CHAIN (ard[i]);
8926 }
8927 *cp = NULL_TREE;
8928
8929 /* OpenMP 5.0 requires that pointer variables are mapped before
8930 its use as a base-pointer. */
8931 auto_vec<tree *, 32> atf;
8932 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8933 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8934 {
8935 /* Collect alloc, to, from, to/from clause tree pointers. */
8936 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8937 if (k == GOMP_MAP_ALLOC
8938 || k == GOMP_MAP_TO
8939 || k == GOMP_MAP_FROM
8940 || k == GOMP_MAP_TOFROM
8941 || k == GOMP_MAP_ALWAYS_TO
8942 || k == GOMP_MAP_ALWAYS_FROM
8943 || k == GOMP_MAP_ALWAYS_TOFROM)
8944 atf.safe_push (cp);
8945 }
8946
8947 for (unsigned int i = 0; i < atf.length (); i++)
8948 if (atf[i])
8949 {
8950 tree *cp = atf[i];
8951 tree decl = OMP_CLAUSE_DECL (*cp);
8952 if (TREE_CODE (decl) == INDIRECT_REF || TREE_CODE (decl) == MEM_REF)
8953 {
8954 tree base_ptr = TREE_OPERAND (decl, 0);
8955 STRIP_TYPE_NOPS (base_ptr);
8956 for (unsigned int j = i + 1; j < atf.length (); j++)
8957 if (atf[j])
8958 {
8959 tree *cp2 = atf[j];
8960 tree decl2 = OMP_CLAUSE_DECL (*cp2);
8961
8962 decl2 = OMP_CLAUSE_DECL (*cp2);
8963 if (is_or_contains_p (decl2, base_ptr))
8964 {
8965 /* Move *cp2 to before *cp. */
8966 tree c = *cp2;
8967 *cp2 = OMP_CLAUSE_CHAIN (c);
8968 OMP_CLAUSE_CHAIN (c) = *cp;
8969 *cp = c;
8970
8971 if (*cp2 != NULL_TREE
8972 && OMP_CLAUSE_CODE (*cp2) == OMP_CLAUSE_MAP
8973 && OMP_CLAUSE_MAP_KIND (*cp2) == GOMP_MAP_ALWAYS_POINTER)
8974 {
8975 tree c2 = *cp2;
8976 *cp2 = OMP_CLAUSE_CHAIN (c2);
8977 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
8978 OMP_CLAUSE_CHAIN (c) = c2;
8979 }
8980
8981 atf[j] = NULL;
8982 }
8983 }
8984 }
8985 }
8986
8987 /* For attach_detach map clauses, if there is another map that maps the
8988 attached/detached pointer, make sure that map is ordered before the
8989 attach_detach. */
8990 atf.truncate (0);
8991 for (tree *cp = list_p; *cp; cp = &OMP_CLAUSE_CHAIN (*cp))
8992 if (OMP_CLAUSE_CODE (*cp) == OMP_CLAUSE_MAP)
8993 {
8994 /* Collect alloc, to, from, to/from clauses, and
8995 always_pointer/attach_detach clauses. */
8996 gomp_map_kind k = OMP_CLAUSE_MAP_KIND (*cp);
8997 if (k == GOMP_MAP_ALLOC
8998 || k == GOMP_MAP_TO
8999 || k == GOMP_MAP_FROM
9000 || k == GOMP_MAP_TOFROM
9001 || k == GOMP_MAP_ALWAYS_TO
9002 || k == GOMP_MAP_ALWAYS_FROM
9003 || k == GOMP_MAP_ALWAYS_TOFROM
9004 || k == GOMP_MAP_ATTACH_DETACH
9005 || k == GOMP_MAP_ALWAYS_POINTER)
9006 atf.safe_push (cp);
9007 }
9008
9009 for (unsigned int i = 0; i < atf.length (); i++)
9010 if (atf[i])
9011 {
9012 tree *cp = atf[i];
9013 tree ptr = OMP_CLAUSE_DECL (*cp);
9014 STRIP_TYPE_NOPS (ptr);
9015 if (OMP_CLAUSE_MAP_KIND (*cp) == GOMP_MAP_ATTACH_DETACH)
9016 for (unsigned int j = i + 1; j < atf.length (); j++)
9017 {
9018 tree *cp2 = atf[j];
9019 tree decl2 = OMP_CLAUSE_DECL (*cp2);
9020 if (OMP_CLAUSE_MAP_KIND (*cp2) != GOMP_MAP_ATTACH_DETACH
9021 && OMP_CLAUSE_MAP_KIND (*cp2) != GOMP_MAP_ALWAYS_POINTER
9022 && is_or_contains_p (decl2, ptr))
9023 {
9024 /* Move *cp2 to before *cp. */
9025 tree c = *cp2;
9026 *cp2 = OMP_CLAUSE_CHAIN (c);
9027 OMP_CLAUSE_CHAIN (c) = *cp;
9028 *cp = c;
9029 atf[j] = NULL;
9030
9031 /* If decl2 is of the form '*decl2_opnd0', and followed by an
9032 ALWAYS_POINTER or ATTACH_DETACH of 'decl2_opnd0', move the
9033 pointer operation along with *cp2. This can happen for C++
9034 reference sequences. */
9035 if (j + 1 < atf.length ()
9036 && (TREE_CODE (decl2) == INDIRECT_REF
9037 || TREE_CODE (decl2) == MEM_REF))
9038 {
9039 tree *cp3 = atf[j + 1];
9040 tree decl3 = OMP_CLAUSE_DECL (*cp3);
9041 tree decl2_opnd0 = TREE_OPERAND (decl2, 0);
9042 if ((OMP_CLAUSE_MAP_KIND (*cp3) == GOMP_MAP_ALWAYS_POINTER
9043 || OMP_CLAUSE_MAP_KIND (*cp3) == GOMP_MAP_ATTACH_DETACH)
9044 && operand_equal_p (decl3, decl2_opnd0))
9045 {
9046 /* Also move *cp3 to before *cp. */
9047 c = *cp3;
9048 *cp2 = OMP_CLAUSE_CHAIN (c);
9049 OMP_CLAUSE_CHAIN (c) = *cp;
9050 *cp = c;
9051 atf[j + 1] = NULL;
9052 j += 1;
9053 }
9054 }
9055 }
9056 }
9057 }
9058 }
9059
9060 /* DECL is supposed to have lastprivate semantics in the outer contexts
9061 of combined/composite constructs, starting with OCTX.
9062 Add needed lastprivate, shared or map clause if no data sharing or
9063 mapping clause are present. IMPLICIT_P is true if it is an implicit
9064 clause (IV on simd), in which case the lastprivate will not be
9065 copied to some constructs. */
9066
9067 static void
omp_lastprivate_for_combined_outer_constructs(struct gimplify_omp_ctx * octx,tree decl,bool implicit_p)9068 omp_lastprivate_for_combined_outer_constructs (struct gimplify_omp_ctx *octx,
9069 tree decl, bool implicit_p)
9070 {
9071 struct gimplify_omp_ctx *orig_octx = octx;
9072 for (; octx; octx = octx->outer_context)
9073 {
9074 if ((octx->region_type == ORT_COMBINED_PARALLEL
9075 || (octx->region_type & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS)
9076 && splay_tree_lookup (octx->variables,
9077 (splay_tree_key) decl) == NULL)
9078 {
9079 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
9080 continue;
9081 }
9082 if ((octx->region_type & ORT_TASK) != 0
9083 && octx->combined_loop
9084 && splay_tree_lookup (octx->variables,
9085 (splay_tree_key) decl) == NULL)
9086 {
9087 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9088 continue;
9089 }
9090 if (implicit_p
9091 && octx->region_type == ORT_WORKSHARE
9092 && octx->combined_loop
9093 && splay_tree_lookup (octx->variables,
9094 (splay_tree_key) decl) == NULL
9095 && octx->outer_context
9096 && octx->outer_context->region_type == ORT_COMBINED_PARALLEL
9097 && splay_tree_lookup (octx->outer_context->variables,
9098 (splay_tree_key) decl) == NULL)
9099 {
9100 octx = octx->outer_context;
9101 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9102 continue;
9103 }
9104 if ((octx->region_type == ORT_WORKSHARE || octx->region_type == ORT_ACC)
9105 && octx->combined_loop
9106 && splay_tree_lookup (octx->variables,
9107 (splay_tree_key) decl) == NULL
9108 && !omp_check_private (octx, decl, false))
9109 {
9110 omp_add_variable (octx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
9111 continue;
9112 }
9113 if (octx->region_type == ORT_COMBINED_TARGET)
9114 {
9115 splay_tree_node n = splay_tree_lookup (octx->variables,
9116 (splay_tree_key) decl);
9117 if (n == NULL)
9118 {
9119 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9120 octx = octx->outer_context;
9121 }
9122 else if (!implicit_p
9123 && (n->value & GOVD_FIRSTPRIVATE_IMPLICIT))
9124 {
9125 n->value &= ~(GOVD_FIRSTPRIVATE
9126 | GOVD_FIRSTPRIVATE_IMPLICIT
9127 | GOVD_EXPLICIT);
9128 omp_add_variable (octx, decl, GOVD_MAP | GOVD_SEEN);
9129 octx = octx->outer_context;
9130 }
9131 }
9132 break;
9133 }
9134 if (octx && (implicit_p || octx != orig_octx))
9135 omp_notice_variable (octx, decl, true);
9136 }
9137
9138 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
9139 and previous omp contexts. */
9140
9141 static void
gimplify_scan_omp_clauses(tree * list_p,gimple_seq * pre_p,enum omp_region_type region_type,enum tree_code code)9142 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
9143 enum omp_region_type region_type,
9144 enum tree_code code)
9145 {
9146 struct gimplify_omp_ctx *ctx, *outer_ctx;
9147 tree c;
9148 hash_map<tree_operand_hash, tree> *struct_map_to_clause = NULL;
9149 hash_map<tree_operand_hash, tree *> *struct_seen_clause = NULL;
9150 hash_set<tree> *struct_deref_set = NULL;
9151 tree *prev_list_p = NULL, *orig_list_p = list_p;
9152 int handled_depend_iterators = -1;
9153 int nowait = -1;
9154
9155 ctx = new_omp_context (region_type);
9156 ctx->code = code;
9157 outer_ctx = ctx->outer_context;
9158 if (code == OMP_TARGET)
9159 {
9160 if (!lang_GNU_Fortran ())
9161 ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9162 ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
9163 ctx->defaultmap[GDMK_SCALAR_TARGET] = (lang_GNU_Fortran ()
9164 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
9165 }
9166 if (!lang_GNU_Fortran ())
9167 switch (code)
9168 {
9169 case OMP_TARGET:
9170 case OMP_TARGET_DATA:
9171 case OMP_TARGET_ENTER_DATA:
9172 case OMP_TARGET_EXIT_DATA:
9173 case OACC_DECLARE:
9174 case OACC_HOST_DATA:
9175 case OACC_PARALLEL:
9176 case OACC_KERNELS:
9177 ctx->target_firstprivatize_array_bases = true;
9178 default:
9179 break;
9180 }
9181
9182 if (code == OMP_TARGET
9183 || code == OMP_TARGET_DATA
9184 || code == OMP_TARGET_ENTER_DATA
9185 || code == OMP_TARGET_EXIT_DATA)
9186 omp_target_reorder_clauses (list_p);
9187
9188 while ((c = *list_p) != NULL)
9189 {
9190 bool remove = false;
9191 bool notice_outer = true;
9192 const char *check_non_private = NULL;
9193 unsigned int flags;
9194 tree decl;
9195
9196 switch (OMP_CLAUSE_CODE (c))
9197 {
9198 case OMP_CLAUSE_PRIVATE:
9199 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
9200 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
9201 {
9202 flags |= GOVD_PRIVATE_OUTER_REF;
9203 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
9204 }
9205 else
9206 notice_outer = false;
9207 goto do_add;
9208 case OMP_CLAUSE_SHARED:
9209 flags = GOVD_SHARED | GOVD_EXPLICIT;
9210 goto do_add;
9211 case OMP_CLAUSE_FIRSTPRIVATE:
9212 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9213 check_non_private = "firstprivate";
9214 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9215 {
9216 gcc_assert (code == OMP_TARGET);
9217 flags |= GOVD_FIRSTPRIVATE_IMPLICIT;
9218 }
9219 goto do_add;
9220 case OMP_CLAUSE_LASTPRIVATE:
9221 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
9222 switch (code)
9223 {
9224 case OMP_DISTRIBUTE:
9225 error_at (OMP_CLAUSE_LOCATION (c),
9226 "conditional %<lastprivate%> clause on "
9227 "%qs construct", "distribute");
9228 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
9229 break;
9230 case OMP_TASKLOOP:
9231 error_at (OMP_CLAUSE_LOCATION (c),
9232 "conditional %<lastprivate%> clause on "
9233 "%qs construct", "taskloop");
9234 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
9235 break;
9236 default:
9237 break;
9238 }
9239 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
9240 if (code != OMP_LOOP)
9241 check_non_private = "lastprivate";
9242 decl = OMP_CLAUSE_DECL (c);
9243 if (error_operand_p (decl))
9244 goto do_add;
9245 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
9246 && !lang_hooks.decls.omp_scalar_p (decl, true))
9247 {
9248 error_at (OMP_CLAUSE_LOCATION (c),
9249 "non-scalar variable %qD in conditional "
9250 "%<lastprivate%> clause", decl);
9251 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
9252 }
9253 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
9254 flags |= GOVD_LASTPRIVATE_CONDITIONAL;
9255 omp_lastprivate_for_combined_outer_constructs (outer_ctx, decl,
9256 false);
9257 goto do_add;
9258 case OMP_CLAUSE_REDUCTION:
9259 if (OMP_CLAUSE_REDUCTION_TASK (c))
9260 {
9261 if (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
9262 {
9263 if (nowait == -1)
9264 nowait = omp_find_clause (*list_p,
9265 OMP_CLAUSE_NOWAIT) != NULL_TREE;
9266 if (nowait
9267 && (outer_ctx == NULL
9268 || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
9269 {
9270 error_at (OMP_CLAUSE_LOCATION (c),
9271 "%<task%> reduction modifier on a construct "
9272 "with a %<nowait%> clause");
9273 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
9274 }
9275 }
9276 else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
9277 {
9278 error_at (OMP_CLAUSE_LOCATION (c),
9279 "invalid %<task%> reduction modifier on construct "
9280 "other than %<parallel%>, %qs, %<sections%> or "
9281 "%<scope%>", lang_GNU_Fortran () ? "do" : "for");
9282 OMP_CLAUSE_REDUCTION_TASK (c) = 0;
9283 }
9284 }
9285 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
9286 switch (code)
9287 {
9288 case OMP_SECTIONS:
9289 error_at (OMP_CLAUSE_LOCATION (c),
9290 "%<inscan%> %<reduction%> clause on "
9291 "%qs construct", "sections");
9292 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9293 break;
9294 case OMP_PARALLEL:
9295 error_at (OMP_CLAUSE_LOCATION (c),
9296 "%<inscan%> %<reduction%> clause on "
9297 "%qs construct", "parallel");
9298 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9299 break;
9300 case OMP_TEAMS:
9301 error_at (OMP_CLAUSE_LOCATION (c),
9302 "%<inscan%> %<reduction%> clause on "
9303 "%qs construct", "teams");
9304 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9305 break;
9306 case OMP_TASKLOOP:
9307 error_at (OMP_CLAUSE_LOCATION (c),
9308 "%<inscan%> %<reduction%> clause on "
9309 "%qs construct", "taskloop");
9310 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9311 break;
9312 case OMP_SCOPE:
9313 error_at (OMP_CLAUSE_LOCATION (c),
9314 "%<inscan%> %<reduction%> clause on "
9315 "%qs construct", "scope");
9316 OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
9317 break;
9318 default:
9319 break;
9320 }
9321 /* FALLTHRU */
9322 case OMP_CLAUSE_IN_REDUCTION:
9323 case OMP_CLAUSE_TASK_REDUCTION:
9324 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
9325 /* OpenACC permits reductions on private variables. */
9326 if (!(region_type & ORT_ACC)
9327 /* taskgroup is actually not a worksharing region. */
9328 && code != OMP_TASKGROUP)
9329 check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
9330 decl = OMP_CLAUSE_DECL (c);
9331 if (TREE_CODE (decl) == MEM_REF)
9332 {
9333 tree type = TREE_TYPE (decl);
9334 bool saved_into_ssa = gimplify_ctxp->into_ssa;
9335 gimplify_ctxp->into_ssa = false;
9336 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
9337 NULL, is_gimple_val, fb_rvalue, false)
9338 == GS_ERROR)
9339 {
9340 gimplify_ctxp->into_ssa = saved_into_ssa;
9341 remove = true;
9342 break;
9343 }
9344 gimplify_ctxp->into_ssa = saved_into_ssa;
9345 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9346 if (DECL_P (v))
9347 {
9348 omp_firstprivatize_variable (ctx, v);
9349 omp_notice_variable (ctx, v, true);
9350 }
9351 decl = TREE_OPERAND (decl, 0);
9352 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
9353 {
9354 gimplify_ctxp->into_ssa = false;
9355 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
9356 NULL, is_gimple_val, fb_rvalue, false)
9357 == GS_ERROR)
9358 {
9359 gimplify_ctxp->into_ssa = saved_into_ssa;
9360 remove = true;
9361 break;
9362 }
9363 gimplify_ctxp->into_ssa = saved_into_ssa;
9364 v = TREE_OPERAND (decl, 1);
9365 if (DECL_P (v))
9366 {
9367 omp_firstprivatize_variable (ctx, v);
9368 omp_notice_variable (ctx, v, true);
9369 }
9370 decl = TREE_OPERAND (decl, 0);
9371 }
9372 if (TREE_CODE (decl) == ADDR_EXPR
9373 || TREE_CODE (decl) == INDIRECT_REF)
9374 decl = TREE_OPERAND (decl, 0);
9375 }
9376 goto do_add_decl;
9377 case OMP_CLAUSE_LINEAR:
9378 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
9379 is_gimple_val, fb_rvalue) == GS_ERROR)
9380 {
9381 remove = true;
9382 break;
9383 }
9384 else
9385 {
9386 if (code == OMP_SIMD
9387 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9388 {
9389 struct gimplify_omp_ctx *octx = outer_ctx;
9390 if (octx
9391 && octx->region_type == ORT_WORKSHARE
9392 && octx->combined_loop
9393 && !octx->distribute)
9394 {
9395 if (octx->outer_context
9396 && (octx->outer_context->region_type
9397 == ORT_COMBINED_PARALLEL))
9398 octx = octx->outer_context->outer_context;
9399 else
9400 octx = octx->outer_context;
9401 }
9402 if (octx
9403 && octx->region_type == ORT_WORKSHARE
9404 && octx->combined_loop
9405 && octx->distribute)
9406 {
9407 error_at (OMP_CLAUSE_LOCATION (c),
9408 "%<linear%> clause for variable other than "
9409 "loop iterator specified on construct "
9410 "combined with %<distribute%>");
9411 remove = true;
9412 break;
9413 }
9414 }
9415 /* For combined #pragma omp parallel for simd, need to put
9416 lastprivate and perhaps firstprivate too on the
9417 parallel. Similarly for #pragma omp for simd. */
9418 struct gimplify_omp_ctx *octx = outer_ctx;
9419 bool taskloop_seen = false;
9420 decl = NULL_TREE;
9421 do
9422 {
9423 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9424 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9425 break;
9426 decl = OMP_CLAUSE_DECL (c);
9427 if (error_operand_p (decl))
9428 {
9429 decl = NULL_TREE;
9430 break;
9431 }
9432 flags = GOVD_SEEN;
9433 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
9434 flags |= GOVD_FIRSTPRIVATE;
9435 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9436 flags |= GOVD_LASTPRIVATE;
9437 if (octx
9438 && octx->region_type == ORT_WORKSHARE
9439 && octx->combined_loop)
9440 {
9441 if (octx->outer_context
9442 && (octx->outer_context->region_type
9443 == ORT_COMBINED_PARALLEL))
9444 octx = octx->outer_context;
9445 else if (omp_check_private (octx, decl, false))
9446 break;
9447 }
9448 else if (octx
9449 && (octx->region_type & ORT_TASK) != 0
9450 && octx->combined_loop)
9451 taskloop_seen = true;
9452 else if (octx
9453 && octx->region_type == ORT_COMBINED_PARALLEL
9454 && ((ctx->region_type == ORT_WORKSHARE
9455 && octx == outer_ctx)
9456 || taskloop_seen))
9457 flags = GOVD_SEEN | GOVD_SHARED;
9458 else if (octx
9459 && ((octx->region_type & ORT_COMBINED_TEAMS)
9460 == ORT_COMBINED_TEAMS))
9461 flags = GOVD_SEEN | GOVD_SHARED;
9462 else if (octx
9463 && octx->region_type == ORT_COMBINED_TARGET)
9464 {
9465 if (flags & GOVD_LASTPRIVATE)
9466 flags = GOVD_SEEN | GOVD_MAP;
9467 }
9468 else
9469 break;
9470 splay_tree_node on
9471 = splay_tree_lookup (octx->variables,
9472 (splay_tree_key) decl);
9473 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
9474 {
9475 octx = NULL;
9476 break;
9477 }
9478 omp_add_variable (octx, decl, flags);
9479 if (octx->outer_context == NULL)
9480 break;
9481 octx = octx->outer_context;
9482 }
9483 while (1);
9484 if (octx
9485 && decl
9486 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9487 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9488 omp_notice_variable (octx, decl, true);
9489 }
9490 flags = GOVD_LINEAR | GOVD_EXPLICIT;
9491 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
9492 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9493 {
9494 notice_outer = false;
9495 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9496 }
9497 goto do_add;
9498
9499 case OMP_CLAUSE_MAP:
9500 decl = OMP_CLAUSE_DECL (c);
9501 if (error_operand_p (decl))
9502 remove = true;
9503 switch (code)
9504 {
9505 case OMP_TARGET:
9506 break;
9507 case OACC_DATA:
9508 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
9509 break;
9510 /* FALLTHRU */
9511 case OMP_TARGET_DATA:
9512 case OMP_TARGET_ENTER_DATA:
9513 case OMP_TARGET_EXIT_DATA:
9514 case OACC_ENTER_DATA:
9515 case OACC_EXIT_DATA:
9516 case OACC_HOST_DATA:
9517 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9518 || (OMP_CLAUSE_MAP_KIND (c)
9519 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9520 /* For target {,enter ,exit }data only the array slice is
9521 mapped, but not the pointer to it. */
9522 remove = true;
9523 break;
9524 default:
9525 break;
9526 }
9527 /* For Fortran, not only the pointer to the data is mapped but also
9528 the address of the pointer, the array descriptor etc.; for
9529 'exit data' - and in particular for 'delete:' - having an 'alloc:'
9530 does not make sense. Likewise, for 'update' only transferring the
9531 data itself is needed as the rest has been handled in previous
9532 directives. However, for 'exit data', the array descriptor needs
9533 to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
9534
9535 NOTE: Generally, it is not safe to perform "enter data" operations
9536 on arrays where the data *or the descriptor* may go out of scope
9537 before a corresponding "exit data" operation -- and such a
9538 descriptor may be synthesized temporarily, e.g. to pass an
9539 explicit-shape array to a function expecting an assumed-shape
9540 argument. Performing "enter data" inside the called function
9541 would thus be problematic. */
9542 if (code == OMP_TARGET_EXIT_DATA
9543 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
9544 OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
9545 == GOMP_MAP_DELETE
9546 ? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
9547 else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
9548 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9549 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
9550 remove = true;
9551
9552 if (remove)
9553 break;
9554 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
9555 {
9556 struct gimplify_omp_ctx *octx;
9557 for (octx = outer_ctx; octx; octx = octx->outer_context)
9558 {
9559 if (octx->region_type != ORT_ACC_HOST_DATA)
9560 break;
9561 splay_tree_node n2
9562 = splay_tree_lookup (octx->variables,
9563 (splay_tree_key) decl);
9564 if (n2)
9565 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
9566 "declared in enclosing %<host_data%> region",
9567 DECL_NAME (decl));
9568 }
9569 }
9570 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9571 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9572 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9573 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9574 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9575 {
9576 remove = true;
9577 break;
9578 }
9579 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9580 || (OMP_CLAUSE_MAP_KIND (c)
9581 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
9582 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9583 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
9584 {
9585 OMP_CLAUSE_SIZE (c)
9586 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
9587 false);
9588 if ((region_type & ORT_TARGET) != 0)
9589 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
9590 GOVD_FIRSTPRIVATE | GOVD_SEEN);
9591 }
9592
9593 if (TREE_CODE (decl) == TARGET_EXPR)
9594 {
9595 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9596 is_gimple_lvalue, fb_lvalue)
9597 == GS_ERROR)
9598 remove = true;
9599 }
9600 else if (!DECL_P (decl))
9601 {
9602 tree d = decl, *pd;
9603 if (TREE_CODE (d) == ARRAY_REF)
9604 {
9605 while (TREE_CODE (d) == ARRAY_REF)
9606 d = TREE_OPERAND (d, 0);
9607 if (TREE_CODE (d) == COMPONENT_REF
9608 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
9609 decl = d;
9610 }
9611 pd = &OMP_CLAUSE_DECL (c);
9612 if (d == decl
9613 && TREE_CODE (decl) == INDIRECT_REF
9614 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9615 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9616 == REFERENCE_TYPE)
9617 && (OMP_CLAUSE_MAP_KIND (c)
9618 != GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION))
9619 {
9620 pd = &TREE_OPERAND (decl, 0);
9621 decl = TREE_OPERAND (decl, 0);
9622 }
9623 bool indir_p = false;
9624 bool component_ref_p = false;
9625 tree indir_base = NULL_TREE;
9626 tree orig_decl = decl;
9627 tree decl_ref = NULL_TREE;
9628 if ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA)) != 0
9629 && TREE_CODE (*pd) == COMPONENT_REF
9630 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
9631 && code != OACC_UPDATE)
9632 {
9633 while (TREE_CODE (decl) == COMPONENT_REF)
9634 {
9635 decl = TREE_OPERAND (decl, 0);
9636 component_ref_p = true;
9637 if (((TREE_CODE (decl) == MEM_REF
9638 && integer_zerop (TREE_OPERAND (decl, 1)))
9639 || INDIRECT_REF_P (decl))
9640 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9641 == POINTER_TYPE))
9642 {
9643 indir_p = true;
9644 indir_base = decl;
9645 decl = TREE_OPERAND (decl, 0);
9646 STRIP_NOPS (decl);
9647 }
9648 if (TREE_CODE (decl) == INDIRECT_REF
9649 && DECL_P (TREE_OPERAND (decl, 0))
9650 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9651 == REFERENCE_TYPE))
9652 {
9653 decl_ref = decl;
9654 decl = TREE_OPERAND (decl, 0);
9655 }
9656 }
9657 }
9658 else if (TREE_CODE (decl) == COMPONENT_REF
9659 && (OMP_CLAUSE_MAP_KIND (c)
9660 != GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION))
9661 {
9662 component_ref_p = true;
9663 while (TREE_CODE (decl) == COMPONENT_REF)
9664 decl = TREE_OPERAND (decl, 0);
9665 if (TREE_CODE (decl) == INDIRECT_REF
9666 && DECL_P (TREE_OPERAND (decl, 0))
9667 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9668 == REFERENCE_TYPE))
9669 decl = TREE_OPERAND (decl, 0);
9670 }
9671 if (decl != orig_decl && DECL_P (decl) && indir_p
9672 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9673 || (decl_ref
9674 && TREE_CODE (TREE_TYPE (decl_ref)) == POINTER_TYPE)))
9675 {
9676 gomp_map_kind k
9677 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9678 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9679 /* We have a dereference of a struct member. Make this an
9680 attach/detach operation, and ensure the base pointer is
9681 mapped as a FIRSTPRIVATE_POINTER. */
9682 OMP_CLAUSE_SET_MAP_KIND (c, k);
9683 flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
9684 tree next_clause = OMP_CLAUSE_CHAIN (c);
9685 if (k == GOMP_MAP_ATTACH
9686 && code != OACC_ENTER_DATA
9687 && code != OMP_TARGET_ENTER_DATA
9688 && (!next_clause
9689 || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
9690 || (OMP_CLAUSE_MAP_KIND (next_clause)
9691 != GOMP_MAP_POINTER)
9692 || OMP_CLAUSE_DECL (next_clause) != decl)
9693 && (!struct_deref_set
9694 || !struct_deref_set->contains (decl))
9695 && (!struct_map_to_clause
9696 || !struct_map_to_clause->get (indir_base)))
9697 {
9698 if (!struct_deref_set)
9699 struct_deref_set = new hash_set<tree> ();
9700 /* As well as the attach, we also need a
9701 FIRSTPRIVATE_POINTER clause to properly map the
9702 pointer to the struct base. */
9703 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9704 OMP_CLAUSE_MAP);
9705 OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
9706 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
9707 = 1;
9708 tree charptr_zero
9709 = build_int_cst (build_pointer_type (char_type_node),
9710 0);
9711 OMP_CLAUSE_DECL (c2)
9712 = build2 (MEM_REF, char_type_node,
9713 decl_ref ? decl_ref : decl, charptr_zero);
9714 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9715 tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9716 OMP_CLAUSE_MAP);
9717 OMP_CLAUSE_SET_MAP_KIND (c3,
9718 GOMP_MAP_FIRSTPRIVATE_POINTER);
9719 OMP_CLAUSE_DECL (c3) = decl;
9720 OMP_CLAUSE_SIZE (c3) = size_zero_node;
9721 tree mapgrp = *prev_list_p;
9722 *prev_list_p = c2;
9723 OMP_CLAUSE_CHAIN (c3) = mapgrp;
9724 OMP_CLAUSE_CHAIN (c2) = c3;
9725
9726 struct_deref_set->add (decl);
9727 }
9728 goto do_add_decl;
9729 }
9730 /* An "attach/detach" operation on an update directive should
9731 behave as a GOMP_MAP_ALWAYS_POINTER. Beware that
9732 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
9733 depends on the previous mapping. */
9734 if (code == OACC_UPDATE
9735 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9736 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
9737 if ((DECL_P (decl)
9738 || (component_ref_p
9739 && (INDIRECT_REF_P (decl)
9740 || TREE_CODE (decl) == MEM_REF
9741 || TREE_CODE (decl) == ARRAY_REF)))
9742 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9743 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9744 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9745 && code != OACC_UPDATE
9746 && code != OMP_TARGET_UPDATE)
9747 {
9748 if (error_operand_p (decl))
9749 {
9750 remove = true;
9751 break;
9752 }
9753
9754 tree stype = TREE_TYPE (decl);
9755 if (TREE_CODE (stype) == REFERENCE_TYPE)
9756 stype = TREE_TYPE (stype);
9757 if (TYPE_SIZE_UNIT (stype) == NULL
9758 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9759 {
9760 error_at (OMP_CLAUSE_LOCATION (c),
9761 "mapping field %qE of variable length "
9762 "structure", OMP_CLAUSE_DECL (c));
9763 remove = true;
9764 break;
9765 }
9766
9767 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9768 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9769 {
9770 /* Error recovery. */
9771 if (prev_list_p == NULL)
9772 {
9773 remove = true;
9774 break;
9775 }
9776
9777 /* The below prev_list_p based error recovery code is
9778 currently no longer valid for OpenMP. */
9779 if (code != OMP_TARGET
9780 && code != OMP_TARGET_DATA
9781 && code != OMP_TARGET_UPDATE
9782 && code != OMP_TARGET_ENTER_DATA
9783 && code != OMP_TARGET_EXIT_DATA
9784 && OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9785 {
9786 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9787 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9788 {
9789 remove = true;
9790 break;
9791 }
9792 }
9793 }
9794
9795 poly_offset_int offset1;
9796 poly_int64 bitpos1;
9797 tree tree_offset1;
9798 tree base_ref;
9799
9800 tree base
9801 = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9802 &bitpos1, &offset1,
9803 &tree_offset1);
9804
9805 bool do_map_struct = (base == decl && !tree_offset1);
9806
9807 splay_tree_node n
9808 = (DECL_P (decl)
9809 ? splay_tree_lookup (ctx->variables,
9810 (splay_tree_key) decl)
9811 : NULL);
9812 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9813 == GOMP_MAP_ALWAYS_POINTER);
9814 bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9815 == GOMP_MAP_ATTACH_DETACH);
9816 bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9817 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9818 bool has_attachments = false;
9819 /* For OpenACC, pointers in structs should trigger an
9820 attach action. */
9821 if (attach_detach
9822 && ((region_type & (ORT_ACC | ORT_TARGET | ORT_TARGET_DATA))
9823 || code == OMP_TARGET_ENTER_DATA
9824 || code == OMP_TARGET_EXIT_DATA))
9825
9826 {
9827 /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9828 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9829 have detected a case that needs a GOMP_MAP_STRUCT
9830 mapping added. */
9831 gomp_map_kind k
9832 = ((code == OACC_EXIT_DATA || code == OMP_TARGET_EXIT_DATA)
9833 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
9834 OMP_CLAUSE_SET_MAP_KIND (c, k);
9835 has_attachments = true;
9836 }
9837
9838 /* We currently don't handle non-constant offset accesses wrt to
9839 GOMP_MAP_STRUCT elements. */
9840 if (!do_map_struct)
9841 goto skip_map_struct;
9842
9843 /* Nor for attach_detach for OpenMP. */
9844 if ((code == OMP_TARGET
9845 || code == OMP_TARGET_DATA
9846 || code == OMP_TARGET_UPDATE
9847 || code == OMP_TARGET_ENTER_DATA
9848 || code == OMP_TARGET_EXIT_DATA)
9849 && attach_detach)
9850 {
9851 if (DECL_P (decl))
9852 {
9853 if (struct_seen_clause == NULL)
9854 struct_seen_clause
9855 = new hash_map<tree_operand_hash, tree *>;
9856 if (!struct_seen_clause->get (decl))
9857 struct_seen_clause->put (decl, list_p);
9858 }
9859
9860 goto skip_map_struct;
9861 }
9862
9863 if ((DECL_P (decl)
9864 && (n == NULL || (n->value & GOVD_MAP) == 0))
9865 || (!DECL_P (decl)
9866 && (!struct_map_to_clause
9867 || struct_map_to_clause->get (decl) == NULL)))
9868 {
9869 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9870 OMP_CLAUSE_MAP);
9871 gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9872 : GOMP_MAP_STRUCT;
9873
9874 OMP_CLAUSE_SET_MAP_KIND (l, k);
9875 if (base_ref)
9876 OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9877 else
9878 {
9879 OMP_CLAUSE_DECL (l) = unshare_expr (decl);
9880 if (!DECL_P (OMP_CLAUSE_DECL (l))
9881 && (gimplify_expr (&OMP_CLAUSE_DECL (l),
9882 pre_p, NULL, is_gimple_lvalue,
9883 fb_lvalue)
9884 == GS_ERROR))
9885 {
9886 remove = true;
9887 break;
9888 }
9889 }
9890 OMP_CLAUSE_SIZE (l)
9891 = (!attach
9892 ? size_int (1)
9893 : DECL_P (OMP_CLAUSE_DECL (l))
9894 ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9895 : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9896 if (struct_map_to_clause == NULL)
9897 struct_map_to_clause
9898 = new hash_map<tree_operand_hash, tree>;
9899 struct_map_to_clause->put (decl, l);
9900 if (ptr || attach_detach)
9901 {
9902 tree **sc = (struct_seen_clause
9903 ? struct_seen_clause->get (decl)
9904 : NULL);
9905 tree *insert_node_pos = sc ? *sc : prev_list_p;
9906
9907 insert_struct_comp_map (code, c, l, *insert_node_pos,
9908 NULL);
9909 *insert_node_pos = l;
9910 prev_list_p = NULL;
9911 }
9912 else
9913 {
9914 OMP_CLAUSE_CHAIN (l) = c;
9915 *list_p = l;
9916 list_p = &OMP_CLAUSE_CHAIN (l);
9917 }
9918 if (base_ref && code == OMP_TARGET)
9919 {
9920 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9921 OMP_CLAUSE_MAP);
9922 enum gomp_map_kind mkind
9923 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9924 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9925 OMP_CLAUSE_DECL (c2) = decl;
9926 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9927 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9928 OMP_CLAUSE_CHAIN (l) = c2;
9929 }
9930 flags = GOVD_MAP | GOVD_EXPLICIT;
9931 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9932 || ptr
9933 || attach_detach)
9934 flags |= GOVD_SEEN;
9935 if (has_attachments)
9936 flags |= GOVD_MAP_HAS_ATTACHMENTS;
9937
9938 /* If this is a *pointer-to-struct expression, make sure a
9939 firstprivate map of the base-pointer exists. */
9940 if (component_ref_p
9941 && ((TREE_CODE (decl) == MEM_REF
9942 && integer_zerop (TREE_OPERAND (decl, 1)))
9943 || INDIRECT_REF_P (decl))
9944 && DECL_P (TREE_OPERAND (decl, 0))
9945 && !splay_tree_lookup (ctx->variables,
9946 ((splay_tree_key)
9947 TREE_OPERAND (decl, 0))))
9948 {
9949 decl = TREE_OPERAND (decl, 0);
9950 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9951 OMP_CLAUSE_MAP);
9952 enum gomp_map_kind mkind
9953 = GOMP_MAP_FIRSTPRIVATE_POINTER;
9954 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9955 OMP_CLAUSE_DECL (c2) = decl;
9956 OMP_CLAUSE_SIZE (c2) = size_zero_node;
9957 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (c);
9958 OMP_CLAUSE_CHAIN (c) = c2;
9959 }
9960
9961 if (DECL_P (decl))
9962 goto do_add_decl;
9963 }
9964 else if (struct_map_to_clause)
9965 {
9966 tree *osc = struct_map_to_clause->get (decl);
9967 tree *sc = NULL, *scp = NULL;
9968 if (n != NULL
9969 && (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9970 || ptr
9971 || attach_detach))
9972 n->value |= GOVD_SEEN;
9973 sc = &OMP_CLAUSE_CHAIN (*osc);
9974 if (*sc != c
9975 && (OMP_CLAUSE_MAP_KIND (*sc)
9976 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9977 sc = &OMP_CLAUSE_CHAIN (*sc);
9978 /* Here "prev_list_p" is the end of the inserted
9979 alloc/release nodes after the struct node, OSC. */
9980 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9981 if ((ptr || attach_detach) && sc == prev_list_p)
9982 break;
9983 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9984 != COMPONENT_REF
9985 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9986 != INDIRECT_REF)
9987 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9988 != ARRAY_REF))
9989 break;
9990 else
9991 {
9992 tree sc_decl = OMP_CLAUSE_DECL (*sc);
9993 poly_offset_int offsetn;
9994 poly_int64 bitposn;
9995 tree tree_offsetn;
9996 tree base
9997 = extract_base_bit_offset (sc_decl, NULL,
9998 &bitposn, &offsetn,
9999 &tree_offsetn);
10000 if (base != decl)
10001 break;
10002 if (scp)
10003 continue;
10004 if ((region_type & ORT_ACC) != 0)
10005 {
10006 /* This duplicate checking code is currently only
10007 enabled for OpenACC. */
10008 tree d1 = OMP_CLAUSE_DECL (*sc);
10009 tree d2 = OMP_CLAUSE_DECL (c);
10010 while (TREE_CODE (d1) == ARRAY_REF)
10011 d1 = TREE_OPERAND (d1, 0);
10012 while (TREE_CODE (d2) == ARRAY_REF)
10013 d2 = TREE_OPERAND (d2, 0);
10014 if (TREE_CODE (d1) == INDIRECT_REF)
10015 d1 = TREE_OPERAND (d1, 0);
10016 if (TREE_CODE (d2) == INDIRECT_REF)
10017 d2 = TREE_OPERAND (d2, 0);
10018 while (TREE_CODE (d1) == COMPONENT_REF)
10019 if (TREE_CODE (d2) == COMPONENT_REF
10020 && TREE_OPERAND (d1, 1)
10021 == TREE_OPERAND (d2, 1))
10022 {
10023 d1 = TREE_OPERAND (d1, 0);
10024 d2 = TREE_OPERAND (d2, 0);
10025 }
10026 else
10027 break;
10028 if (d1 == d2)
10029 {
10030 error_at (OMP_CLAUSE_LOCATION (c),
10031 "%qE appears more than once in map "
10032 "clauses", OMP_CLAUSE_DECL (c));
10033 remove = true;
10034 break;
10035 }
10036 }
10037 if (maybe_lt (offset1, offsetn)
10038 || (known_eq (offset1, offsetn)
10039 && maybe_lt (bitpos1, bitposn)))
10040 {
10041 if (ptr || attach_detach)
10042 scp = sc;
10043 else
10044 break;
10045 }
10046 }
10047 if (remove)
10048 break;
10049 if (!attach)
10050 OMP_CLAUSE_SIZE (*osc)
10051 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
10052 size_one_node);
10053 if (ptr || attach_detach)
10054 {
10055 tree cl = insert_struct_comp_map (code, c, NULL,
10056 *prev_list_p, scp);
10057 if (sc == prev_list_p)
10058 {
10059 *sc = cl;
10060 prev_list_p = NULL;
10061 }
10062 else
10063 {
10064 *prev_list_p = OMP_CLAUSE_CHAIN (c);
10065 list_p = prev_list_p;
10066 prev_list_p = NULL;
10067 OMP_CLAUSE_CHAIN (c) = *sc;
10068 *sc = cl;
10069 continue;
10070 }
10071 }
10072 else if (*sc != c)
10073 {
10074 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
10075 fb_lvalue)
10076 == GS_ERROR)
10077 {
10078 remove = true;
10079 break;
10080 }
10081 *list_p = OMP_CLAUSE_CHAIN (c);
10082 OMP_CLAUSE_CHAIN (c) = *sc;
10083 *sc = c;
10084 continue;
10085 }
10086 }
10087 skip_map_struct:
10088 ;
10089 }
10090 else if ((code == OACC_ENTER_DATA
10091 || code == OACC_EXIT_DATA
10092 || code == OACC_DATA
10093 || code == OACC_PARALLEL
10094 || code == OACC_KERNELS
10095 || code == OACC_SERIAL
10096 || code == OMP_TARGET_ENTER_DATA
10097 || code == OMP_TARGET_EXIT_DATA)
10098 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
10099 {
10100 gomp_map_kind k = ((code == OACC_EXIT_DATA
10101 || code == OMP_TARGET_EXIT_DATA)
10102 ? GOMP_MAP_DETACH : GOMP_MAP_ATTACH);
10103 OMP_CLAUSE_SET_MAP_KIND (c, k);
10104 }
10105
10106 if (code == OMP_TARGET && OMP_CLAUSE_MAP_IN_REDUCTION (c))
10107 {
10108 /* Don't gimplify *pd fully at this point, as the base
10109 will need to be adjusted during omp lowering. */
10110 auto_vec<tree, 10> expr_stack;
10111 tree *p = pd;
10112 while (handled_component_p (*p)
10113 || TREE_CODE (*p) == INDIRECT_REF
10114 || TREE_CODE (*p) == ADDR_EXPR
10115 || TREE_CODE (*p) == MEM_REF
10116 || TREE_CODE (*p) == NON_LVALUE_EXPR)
10117 {
10118 expr_stack.safe_push (*p);
10119 p = &TREE_OPERAND (*p, 0);
10120 }
10121 for (int i = expr_stack.length () - 1; i >= 0; i--)
10122 {
10123 tree t = expr_stack[i];
10124 if (TREE_CODE (t) == ARRAY_REF
10125 || TREE_CODE (t) == ARRAY_RANGE_REF)
10126 {
10127 if (TREE_OPERAND (t, 2) == NULL_TREE)
10128 {
10129 tree low = unshare_expr (array_ref_low_bound (t));
10130 if (!is_gimple_min_invariant (low))
10131 {
10132 TREE_OPERAND (t, 2) = low;
10133 if (gimplify_expr (&TREE_OPERAND (t, 2),
10134 pre_p, NULL,
10135 is_gimple_reg,
10136 fb_rvalue) == GS_ERROR)
10137 remove = true;
10138 }
10139 }
10140 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
10141 NULL, is_gimple_reg,
10142 fb_rvalue) == GS_ERROR)
10143 remove = true;
10144 if (TREE_OPERAND (t, 3) == NULL_TREE)
10145 {
10146 tree elmt_size = array_ref_element_size (t);
10147 if (!is_gimple_min_invariant (elmt_size))
10148 {
10149 elmt_size = unshare_expr (elmt_size);
10150 tree elmt_type
10151 = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t,
10152 0)));
10153 tree factor
10154 = size_int (TYPE_ALIGN_UNIT (elmt_type));
10155 elmt_size
10156 = size_binop (EXACT_DIV_EXPR, elmt_size,
10157 factor);
10158 TREE_OPERAND (t, 3) = elmt_size;
10159 if (gimplify_expr (&TREE_OPERAND (t, 3),
10160 pre_p, NULL,
10161 is_gimple_reg,
10162 fb_rvalue) == GS_ERROR)
10163 remove = true;
10164 }
10165 }
10166 else if (gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
10167 NULL, is_gimple_reg,
10168 fb_rvalue) == GS_ERROR)
10169 remove = true;
10170 }
10171 else if (TREE_CODE (t) == COMPONENT_REF)
10172 {
10173 if (TREE_OPERAND (t, 2) == NULL_TREE)
10174 {
10175 tree offset = component_ref_field_offset (t);
10176 if (!is_gimple_min_invariant (offset))
10177 {
10178 offset = unshare_expr (offset);
10179 tree field = TREE_OPERAND (t, 1);
10180 tree factor
10181 = size_int (DECL_OFFSET_ALIGN (field)
10182 / BITS_PER_UNIT);
10183 offset = size_binop (EXACT_DIV_EXPR, offset,
10184 factor);
10185 TREE_OPERAND (t, 2) = offset;
10186 if (gimplify_expr (&TREE_OPERAND (t, 2),
10187 pre_p, NULL,
10188 is_gimple_reg,
10189 fb_rvalue) == GS_ERROR)
10190 remove = true;
10191 }
10192 }
10193 else if (gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
10194 NULL, is_gimple_reg,
10195 fb_rvalue) == GS_ERROR)
10196 remove = true;
10197 }
10198 }
10199 for (; expr_stack.length () > 0; )
10200 {
10201 tree t = expr_stack.pop ();
10202
10203 if (TREE_CODE (t) == ARRAY_REF
10204 || TREE_CODE (t) == ARRAY_RANGE_REF)
10205 {
10206 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1))
10207 && gimplify_expr (&TREE_OPERAND (t, 1), pre_p,
10208 NULL, is_gimple_val,
10209 fb_rvalue) == GS_ERROR)
10210 remove = true;
10211 }
10212 }
10213 }
10214 else if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue,
10215 fb_lvalue) == GS_ERROR)
10216 {
10217 remove = true;
10218 break;
10219 }
10220
10221 /* If this was of the form map(*pointer_to_struct), then the
10222 'pointer_to_struct' DECL should be considered deref'ed. */
10223 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALLOC
10224 || GOMP_MAP_COPY_TO_P (OMP_CLAUSE_MAP_KIND (c))
10225 || GOMP_MAP_COPY_FROM_P (OMP_CLAUSE_MAP_KIND (c)))
10226 && INDIRECT_REF_P (orig_decl)
10227 && DECL_P (TREE_OPERAND (orig_decl, 0))
10228 && TREE_CODE (TREE_TYPE (orig_decl)) == RECORD_TYPE)
10229 {
10230 tree ptr = TREE_OPERAND (orig_decl, 0);
10231 if (!struct_deref_set || !struct_deref_set->contains (ptr))
10232 {
10233 if (!struct_deref_set)
10234 struct_deref_set = new hash_set<tree> ();
10235 struct_deref_set->add (ptr);
10236 }
10237 }
10238
10239 if (!remove
10240 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
10241 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
10242 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
10243 && OMP_CLAUSE_CHAIN (c)
10244 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
10245 && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10246 == GOMP_MAP_ALWAYS_POINTER)
10247 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10248 == GOMP_MAP_ATTACH_DETACH)
10249 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10250 == GOMP_MAP_TO_PSET)))
10251 prev_list_p = list_p;
10252
10253 break;
10254 }
10255 else
10256 {
10257 /* DECL_P (decl) == true */
10258 tree *sc;
10259 if (struct_map_to_clause
10260 && (sc = struct_map_to_clause->get (decl)) != NULL
10261 && OMP_CLAUSE_MAP_KIND (*sc) == GOMP_MAP_STRUCT
10262 && decl == OMP_CLAUSE_DECL (*sc))
10263 {
10264 /* We have found a map of the whole structure after a
10265 leading GOMP_MAP_STRUCT has been created, so refill the
10266 leading clause into a map of the whole structure
10267 variable, and remove the current one.
10268 TODO: we should be able to remove some maps of the
10269 following structure element maps if they are of
10270 compatible TO/FROM/ALLOC type. */
10271 OMP_CLAUSE_SET_MAP_KIND (*sc, OMP_CLAUSE_MAP_KIND (c));
10272 OMP_CLAUSE_SIZE (*sc) = unshare_expr (OMP_CLAUSE_SIZE (c));
10273 remove = true;
10274 break;
10275 }
10276 }
10277 flags = GOVD_MAP | GOVD_EXPLICIT;
10278 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
10279 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
10280 flags |= GOVD_MAP_ALWAYS_TO;
10281
10282 if ((code == OMP_TARGET
10283 || code == OMP_TARGET_DATA
10284 || code == OMP_TARGET_ENTER_DATA
10285 || code == OMP_TARGET_EXIT_DATA)
10286 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
10287 {
10288 for (struct gimplify_omp_ctx *octx = outer_ctx; octx;
10289 octx = octx->outer_context)
10290 {
10291 splay_tree_node n
10292 = splay_tree_lookup (octx->variables,
10293 (splay_tree_key) OMP_CLAUSE_DECL (c));
10294 /* If this is contained in an outer OpenMP region as a
10295 firstprivate value, remove the attach/detach. */
10296 if (n && (n->value & GOVD_FIRSTPRIVATE))
10297 {
10298 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FIRSTPRIVATE_POINTER);
10299 goto do_add;
10300 }
10301 }
10302
10303 enum gomp_map_kind map_kind = (code == OMP_TARGET_EXIT_DATA
10304 ? GOMP_MAP_DETACH
10305 : GOMP_MAP_ATTACH);
10306 OMP_CLAUSE_SET_MAP_KIND (c, map_kind);
10307 }
10308
10309 goto do_add;
10310
10311 case OMP_CLAUSE_AFFINITY:
10312 gimplify_omp_affinity (list_p, pre_p);
10313 remove = true;
10314 break;
10315 case OMP_CLAUSE_DEPEND:
10316 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
10317 {
10318 tree deps = OMP_CLAUSE_DECL (c);
10319 while (deps && TREE_CODE (deps) == TREE_LIST)
10320 {
10321 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
10322 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
10323 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
10324 pre_p, NULL, is_gimple_val, fb_rvalue);
10325 deps = TREE_CHAIN (deps);
10326 }
10327 break;
10328 }
10329 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
10330 break;
10331 if (handled_depend_iterators == -1)
10332 handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
10333 if (handled_depend_iterators)
10334 {
10335 if (handled_depend_iterators == 2)
10336 remove = true;
10337 break;
10338 }
10339 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
10340 {
10341 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
10342 NULL, is_gimple_val, fb_rvalue);
10343 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
10344 }
10345 if (error_operand_p (OMP_CLAUSE_DECL (c)))
10346 {
10347 remove = true;
10348 break;
10349 }
10350 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
10351 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
10352 is_gimple_val, fb_rvalue) == GS_ERROR)
10353 {
10354 remove = true;
10355 break;
10356 }
10357 if (code == OMP_TASK)
10358 ctx->has_depend = true;
10359 break;
10360
10361 case OMP_CLAUSE_TO:
10362 case OMP_CLAUSE_FROM:
10363 case OMP_CLAUSE__CACHE_:
10364 decl = OMP_CLAUSE_DECL (c);
10365 if (error_operand_p (decl))
10366 {
10367 remove = true;
10368 break;
10369 }
10370 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10371 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
10372 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
10373 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
10374 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
10375 {
10376 remove = true;
10377 break;
10378 }
10379 if (!DECL_P (decl))
10380 {
10381 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
10382 NULL, is_gimple_lvalue, fb_lvalue)
10383 == GS_ERROR)
10384 {
10385 remove = true;
10386 break;
10387 }
10388 break;
10389 }
10390 goto do_notice;
10391
10392 case OMP_CLAUSE_USE_DEVICE_PTR:
10393 case OMP_CLAUSE_USE_DEVICE_ADDR:
10394 flags = GOVD_EXPLICIT;
10395 goto do_add;
10396
10397 case OMP_CLAUSE_HAS_DEVICE_ADDR:
10398 decl = OMP_CLAUSE_DECL (c);
10399 while (TREE_CODE (decl) == INDIRECT_REF
10400 || TREE_CODE (decl) == ARRAY_REF)
10401 decl = TREE_OPERAND (decl, 0);
10402 flags = GOVD_EXPLICIT;
10403 goto do_add_decl;
10404
10405 case OMP_CLAUSE_IS_DEVICE_PTR:
10406 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
10407 goto do_add;
10408
10409 do_add:
10410 decl = OMP_CLAUSE_DECL (c);
10411 do_add_decl:
10412 if (error_operand_p (decl))
10413 {
10414 remove = true;
10415 break;
10416 }
10417 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
10418 {
10419 tree t = omp_member_access_dummy_var (decl);
10420 if (t)
10421 {
10422 tree v = DECL_VALUE_EXPR (decl);
10423 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
10424 if (outer_ctx)
10425 omp_notice_variable (outer_ctx, t, true);
10426 }
10427 }
10428 if (code == OACC_DATA
10429 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10430 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10431 flags |= GOVD_MAP_0LEN_ARRAY;
10432 omp_add_variable (ctx, decl, flags);
10433 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10434 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
10435 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
10436 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10437 {
10438 struct gimplify_omp_ctx *pctx
10439 = code == OMP_TARGET ? outer_ctx : ctx;
10440 if (pctx)
10441 omp_add_variable (pctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
10442 GOVD_LOCAL | GOVD_SEEN);
10443 if (pctx
10444 && OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
10445 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
10446 find_decl_expr,
10447 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
10448 NULL) == NULL_TREE)
10449 omp_add_variable (pctx,
10450 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
10451 GOVD_LOCAL | GOVD_SEEN);
10452 gimplify_omp_ctxp = pctx;
10453 push_gimplify_context ();
10454
10455 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10456 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10457
10458 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
10459 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
10460 pop_gimplify_context
10461 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
10462 push_gimplify_context ();
10463 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
10464 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
10465 pop_gimplify_context
10466 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
10467 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
10468 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
10469
10470 gimplify_omp_ctxp = outer_ctx;
10471 }
10472 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10473 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
10474 {
10475 gimplify_omp_ctxp = ctx;
10476 push_gimplify_context ();
10477 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
10478 {
10479 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
10480 NULL, NULL);
10481 TREE_SIDE_EFFECTS (bind) = 1;
10482 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
10483 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
10484 }
10485 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
10486 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
10487 pop_gimplify_context
10488 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
10489 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
10490
10491 gimplify_omp_ctxp = outer_ctx;
10492 }
10493 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10494 && OMP_CLAUSE_LINEAR_STMT (c))
10495 {
10496 gimplify_omp_ctxp = ctx;
10497 push_gimplify_context ();
10498 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
10499 {
10500 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
10501 NULL, NULL);
10502 TREE_SIDE_EFFECTS (bind) = 1;
10503 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
10504 OMP_CLAUSE_LINEAR_STMT (c) = bind;
10505 }
10506 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
10507 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
10508 pop_gimplify_context
10509 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
10510 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
10511
10512 gimplify_omp_ctxp = outer_ctx;
10513 }
10514 if (notice_outer)
10515 goto do_notice;
10516 break;
10517
10518 case OMP_CLAUSE_COPYIN:
10519 case OMP_CLAUSE_COPYPRIVATE:
10520 decl = OMP_CLAUSE_DECL (c);
10521 if (error_operand_p (decl))
10522 {
10523 remove = true;
10524 break;
10525 }
10526 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
10527 && !remove
10528 && !omp_check_private (ctx, decl, true))
10529 {
10530 remove = true;
10531 if (is_global_var (decl))
10532 {
10533 if (DECL_THREAD_LOCAL_P (decl))
10534 remove = false;
10535 else if (DECL_HAS_VALUE_EXPR_P (decl))
10536 {
10537 tree value = get_base_address (DECL_VALUE_EXPR (decl));
10538
10539 if (value
10540 && DECL_P (value)
10541 && DECL_THREAD_LOCAL_P (value))
10542 remove = false;
10543 }
10544 }
10545 if (remove)
10546 error_at (OMP_CLAUSE_LOCATION (c),
10547 "copyprivate variable %qE is not threadprivate"
10548 " or private in outer context", DECL_NAME (decl));
10549 }
10550 do_notice:
10551 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10552 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
10553 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10554 && outer_ctx
10555 && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
10556 || (region_type == ORT_WORKSHARE
10557 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10558 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
10559 || code == OMP_LOOP)))
10560 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
10561 || (code == OMP_LOOP
10562 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10563 && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
10564 == ORT_COMBINED_TEAMS))))
10565 {
10566 splay_tree_node on
10567 = splay_tree_lookup (outer_ctx->variables,
10568 (splay_tree_key)decl);
10569 if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
10570 {
10571 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10572 && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
10573 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
10574 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10575 && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
10576 == POINTER_TYPE))))
10577 omp_firstprivatize_variable (outer_ctx, decl);
10578 else
10579 {
10580 omp_add_variable (outer_ctx, decl,
10581 GOVD_SEEN | GOVD_SHARED);
10582 if (outer_ctx->outer_context)
10583 omp_notice_variable (outer_ctx->outer_context, decl,
10584 true);
10585 }
10586 }
10587 }
10588 if (outer_ctx)
10589 omp_notice_variable (outer_ctx, decl, true);
10590 if (check_non_private
10591 && (region_type == ORT_WORKSHARE || code == OMP_SCOPE)
10592 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
10593 || decl == OMP_CLAUSE_DECL (c)
10594 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
10595 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10596 == ADDR_EXPR
10597 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10598 == POINTER_PLUS_EXPR
10599 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
10600 (OMP_CLAUSE_DECL (c), 0), 0))
10601 == ADDR_EXPR)))))
10602 && omp_check_private (ctx, decl, false))
10603 {
10604 error ("%s variable %qE is private in outer context",
10605 check_non_private, DECL_NAME (decl));
10606 remove = true;
10607 }
10608 break;
10609
10610 case OMP_CLAUSE_DETACH:
10611 flags = GOVD_FIRSTPRIVATE | GOVD_SEEN;
10612 goto do_add;
10613
10614 case OMP_CLAUSE_IF:
10615 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
10616 && OMP_CLAUSE_IF_MODIFIER (c) != code)
10617 {
10618 const char *p[2];
10619 for (int i = 0; i < 2; i++)
10620 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
10621 {
10622 case VOID_CST: p[i] = "cancel"; break;
10623 case OMP_PARALLEL: p[i] = "parallel"; break;
10624 case OMP_SIMD: p[i] = "simd"; break;
10625 case OMP_TASK: p[i] = "task"; break;
10626 case OMP_TASKLOOP: p[i] = "taskloop"; break;
10627 case OMP_TARGET_DATA: p[i] = "target data"; break;
10628 case OMP_TARGET: p[i] = "target"; break;
10629 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
10630 case OMP_TARGET_ENTER_DATA:
10631 p[i] = "target enter data"; break;
10632 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
10633 default: gcc_unreachable ();
10634 }
10635 error_at (OMP_CLAUSE_LOCATION (c),
10636 "expected %qs %<if%> clause modifier rather than %qs",
10637 p[0], p[1]);
10638 remove = true;
10639 }
10640 /* Fall through. */
10641
10642 case OMP_CLAUSE_FINAL:
10643 OMP_CLAUSE_OPERAND (c, 0)
10644 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
10645 /* Fall through. */
10646
10647 case OMP_CLAUSE_NUM_TEAMS:
10648 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS
10649 && OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
10650 && !is_gimple_min_invariant (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
10651 {
10652 if (error_operand_p (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)))
10653 {
10654 remove = true;
10655 break;
10656 }
10657 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
10658 = get_initialized_tmp_var (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c),
10659 pre_p, NULL, true);
10660 }
10661 /* Fall through. */
10662
10663 case OMP_CLAUSE_SCHEDULE:
10664 case OMP_CLAUSE_NUM_THREADS:
10665 case OMP_CLAUSE_THREAD_LIMIT:
10666 case OMP_CLAUSE_DIST_SCHEDULE:
10667 case OMP_CLAUSE_DEVICE:
10668 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEVICE
10669 && OMP_CLAUSE_DEVICE_ANCESTOR (c))
10670 {
10671 if (code != OMP_TARGET)
10672 {
10673 error_at (OMP_CLAUSE_LOCATION (c),
10674 "%<device%> clause with %<ancestor%> is only "
10675 "allowed on %<target%> construct");
10676 remove = true;
10677 break;
10678 }
10679
10680 tree clauses = *orig_list_p;
10681 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
10682 if (OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEVICE
10683 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_FIRSTPRIVATE
10684 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_PRIVATE
10685 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_DEFAULTMAP
10686 && OMP_CLAUSE_CODE (clauses) != OMP_CLAUSE_MAP
10687 )
10688 {
10689 error_at (OMP_CLAUSE_LOCATION (c),
10690 "with %<ancestor%>, only the %<device%>, "
10691 "%<firstprivate%>, %<private%>, %<defaultmap%>, "
10692 "and %<map%> clauses may appear on the "
10693 "construct");
10694 remove = true;
10695 break;
10696 }
10697 }
10698 /* Fall through. */
10699
10700 case OMP_CLAUSE_PRIORITY:
10701 case OMP_CLAUSE_GRAINSIZE:
10702 case OMP_CLAUSE_NUM_TASKS:
10703 case OMP_CLAUSE_FILTER:
10704 case OMP_CLAUSE_HINT:
10705 case OMP_CLAUSE_ASYNC:
10706 case OMP_CLAUSE_WAIT:
10707 case OMP_CLAUSE_NUM_GANGS:
10708 case OMP_CLAUSE_NUM_WORKERS:
10709 case OMP_CLAUSE_VECTOR_LENGTH:
10710 case OMP_CLAUSE_WORKER:
10711 case OMP_CLAUSE_VECTOR:
10712 if (OMP_CLAUSE_OPERAND (c, 0)
10713 && !is_gimple_min_invariant (OMP_CLAUSE_OPERAND (c, 0)))
10714 {
10715 if (error_operand_p (OMP_CLAUSE_OPERAND (c, 0)))
10716 {
10717 remove = true;
10718 break;
10719 }
10720 /* All these clauses care about value, not a particular decl,
10721 so try to force it into a SSA_NAME or fresh temporary. */
10722 OMP_CLAUSE_OPERAND (c, 0)
10723 = get_initialized_tmp_var (OMP_CLAUSE_OPERAND (c, 0),
10724 pre_p, NULL, true);
10725 }
10726 break;
10727
10728 case OMP_CLAUSE_GANG:
10729 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
10730 is_gimple_val, fb_rvalue) == GS_ERROR)
10731 remove = true;
10732 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
10733 is_gimple_val, fb_rvalue) == GS_ERROR)
10734 remove = true;
10735 break;
10736
10737 case OMP_CLAUSE_NOWAIT:
10738 nowait = 1;
10739 break;
10740
10741 case OMP_CLAUSE_ORDERED:
10742 case OMP_CLAUSE_UNTIED:
10743 case OMP_CLAUSE_COLLAPSE:
10744 case OMP_CLAUSE_TILE:
10745 case OMP_CLAUSE_AUTO:
10746 case OMP_CLAUSE_SEQ:
10747 case OMP_CLAUSE_INDEPENDENT:
10748 case OMP_CLAUSE_MERGEABLE:
10749 case OMP_CLAUSE_PROC_BIND:
10750 case OMP_CLAUSE_SAFELEN:
10751 case OMP_CLAUSE_SIMDLEN:
10752 case OMP_CLAUSE_NOGROUP:
10753 case OMP_CLAUSE_THREADS:
10754 case OMP_CLAUSE_SIMD:
10755 case OMP_CLAUSE_BIND:
10756 case OMP_CLAUSE_IF_PRESENT:
10757 case OMP_CLAUSE_FINALIZE:
10758 break;
10759
10760 case OMP_CLAUSE_ORDER:
10761 ctx->order_concurrent = true;
10762 break;
10763
10764 case OMP_CLAUSE_DEFAULTMAP:
10765 enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
10766 switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
10767 {
10768 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
10769 gdmkmin = GDMK_SCALAR;
10770 gdmkmax = GDMK_POINTER;
10771 break;
10772 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
10773 gdmkmin = GDMK_SCALAR;
10774 gdmkmax = GDMK_SCALAR_TARGET;
10775 break;
10776 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
10777 gdmkmin = gdmkmax = GDMK_AGGREGATE;
10778 break;
10779 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
10780 gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
10781 break;
10782 case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
10783 gdmkmin = gdmkmax = GDMK_POINTER;
10784 break;
10785 default:
10786 gcc_unreachable ();
10787 }
10788 for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
10789 switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
10790 {
10791 case OMP_CLAUSE_DEFAULTMAP_ALLOC:
10792 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
10793 break;
10794 case OMP_CLAUSE_DEFAULTMAP_TO:
10795 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
10796 break;
10797 case OMP_CLAUSE_DEFAULTMAP_FROM:
10798 ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
10799 break;
10800 case OMP_CLAUSE_DEFAULTMAP_TOFROM:
10801 ctx->defaultmap[gdmk] = GOVD_MAP;
10802 break;
10803 case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
10804 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
10805 break;
10806 case OMP_CLAUSE_DEFAULTMAP_NONE:
10807 ctx->defaultmap[gdmk] = 0;
10808 break;
10809 case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
10810 switch (gdmk)
10811 {
10812 case GDMK_SCALAR:
10813 ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
10814 break;
10815 case GDMK_SCALAR_TARGET:
10816 ctx->defaultmap[gdmk] = (lang_GNU_Fortran ()
10817 ? GOVD_MAP : GOVD_FIRSTPRIVATE);
10818 break;
10819 case GDMK_AGGREGATE:
10820 case GDMK_ALLOCATABLE:
10821 ctx->defaultmap[gdmk] = GOVD_MAP;
10822 break;
10823 case GDMK_POINTER:
10824 ctx->defaultmap[gdmk] = GOVD_MAP;
10825 if (!lang_GNU_Fortran ())
10826 ctx->defaultmap[gdmk] |= GOVD_MAP_0LEN_ARRAY;
10827 break;
10828 default:
10829 gcc_unreachable ();
10830 }
10831 break;
10832 default:
10833 gcc_unreachable ();
10834 }
10835 break;
10836
10837 case OMP_CLAUSE_ALIGNED:
10838 decl = OMP_CLAUSE_DECL (c);
10839 if (error_operand_p (decl))
10840 {
10841 remove = true;
10842 break;
10843 }
10844 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
10845 is_gimple_val, fb_rvalue) == GS_ERROR)
10846 {
10847 remove = true;
10848 break;
10849 }
10850 if (!is_global_var (decl)
10851 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10852 omp_add_variable (ctx, decl, GOVD_ALIGNED);
10853 break;
10854
10855 case OMP_CLAUSE_NONTEMPORAL:
10856 decl = OMP_CLAUSE_DECL (c);
10857 if (error_operand_p (decl))
10858 {
10859 remove = true;
10860 break;
10861 }
10862 omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
10863 break;
10864
10865 case OMP_CLAUSE_ALLOCATE:
10866 decl = OMP_CLAUSE_DECL (c);
10867 if (error_operand_p (decl))
10868 {
10869 remove = true;
10870 break;
10871 }
10872 if (gimplify_expr (&OMP_CLAUSE_ALLOCATE_ALLOCATOR (c), pre_p, NULL,
10873 is_gimple_val, fb_rvalue) == GS_ERROR)
10874 {
10875 remove = true;
10876 break;
10877 }
10878 else if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
10879 || (TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
10880 == INTEGER_CST))
10881 ;
10882 else if (code == OMP_TASKLOOP
10883 || !DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
10884 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
10885 = get_initialized_tmp_var (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
10886 pre_p, NULL, false);
10887 break;
10888
10889 case OMP_CLAUSE_DEFAULT:
10890 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
10891 break;
10892
10893 case OMP_CLAUSE_INCLUSIVE:
10894 case OMP_CLAUSE_EXCLUSIVE:
10895 decl = OMP_CLAUSE_DECL (c);
10896 {
10897 splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
10898 (splay_tree_key) decl);
10899 if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
10900 {
10901 error_at (OMP_CLAUSE_LOCATION (c),
10902 "%qD specified in %qs clause but not in %<inscan%> "
10903 "%<reduction%> clause on the containing construct",
10904 decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10905 remove = true;
10906 }
10907 else
10908 {
10909 n->value |= GOVD_REDUCTION_INSCAN;
10910 if (outer_ctx->region_type == ORT_SIMD
10911 && outer_ctx->outer_context
10912 && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
10913 {
10914 n = splay_tree_lookup (outer_ctx->outer_context->variables,
10915 (splay_tree_key) decl);
10916 if (n && (n->value & GOVD_REDUCTION) != 0)
10917 n->value |= GOVD_REDUCTION_INSCAN;
10918 }
10919 }
10920 }
10921 break;
10922
10923 case OMP_CLAUSE_NOHOST:
10924 default:
10925 gcc_unreachable ();
10926 }
10927
10928 if (code == OACC_DATA
10929 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
10930 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10931 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10932 remove = true;
10933 if (remove)
10934 *list_p = OMP_CLAUSE_CHAIN (c);
10935 else
10936 list_p = &OMP_CLAUSE_CHAIN (c);
10937 }
10938
10939 ctx->clauses = *orig_list_p;
10940 gimplify_omp_ctxp = ctx;
10941 if (struct_seen_clause)
10942 delete struct_seen_clause;
10943 if (struct_map_to_clause)
10944 delete struct_map_to_clause;
10945 if (struct_deref_set)
10946 delete struct_deref_set;
10947 }
10948
10949 /* Return true if DECL is a candidate for shared to firstprivate
10950 optimization. We only consider non-addressable scalars, not
10951 too big, and not references. */
10952
10953 static bool
omp_shared_to_firstprivate_optimizable_decl_p(tree decl)10954 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
10955 {
10956 if (TREE_ADDRESSABLE (decl))
10957 return false;
10958 tree type = TREE_TYPE (decl);
10959 if (!is_gimple_reg_type (type)
10960 || TREE_CODE (type) == REFERENCE_TYPE
10961 || TREE_ADDRESSABLE (type))
10962 return false;
10963 /* Don't optimize too large decls, as each thread/task will have
10964 its own. */
10965 HOST_WIDE_INT len = int_size_in_bytes (type);
10966 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
10967 return false;
10968 if (omp_privatize_by_reference (decl))
10969 return false;
10970 return true;
10971 }
10972
10973 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
10974 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
10975 GOVD_WRITTEN in outer contexts. */
10976
10977 static void
omp_mark_stores(struct gimplify_omp_ctx * ctx,tree decl)10978 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
10979 {
10980 for (; ctx; ctx = ctx->outer_context)
10981 {
10982 splay_tree_node n = splay_tree_lookup (ctx->variables,
10983 (splay_tree_key) decl);
10984 if (n == NULL)
10985 continue;
10986 else if (n->value & GOVD_SHARED)
10987 {
10988 n->value |= GOVD_WRITTEN;
10989 return;
10990 }
10991 else if (n->value & GOVD_DATA_SHARE_CLASS)
10992 return;
10993 }
10994 }
10995
10996 /* Helper callback for walk_gimple_seq to discover possible stores
10997 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
10998 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
10999 for those. */
11000
11001 static tree
omp_find_stores_op(tree * tp,int * walk_subtrees,void * data)11002 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
11003 {
11004 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
11005
11006 *walk_subtrees = 0;
11007 if (!wi->is_lhs)
11008 return NULL_TREE;
11009
11010 tree op = *tp;
11011 do
11012 {
11013 if (handled_component_p (op))
11014 op = TREE_OPERAND (op, 0);
11015 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
11016 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
11017 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
11018 else
11019 break;
11020 }
11021 while (1);
11022 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
11023 return NULL_TREE;
11024
11025 omp_mark_stores (gimplify_omp_ctxp, op);
11026 return NULL_TREE;
11027 }
11028
11029 /* Helper callback for walk_gimple_seq to discover possible stores
11030 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
11031 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
11032 for those. */
11033
11034 static tree
omp_find_stores_stmt(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)11035 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
11036 bool *handled_ops_p,
11037 struct walk_stmt_info *wi)
11038 {
11039 gimple *stmt = gsi_stmt (*gsi_p);
11040 switch (gimple_code (stmt))
11041 {
11042 /* Don't recurse on OpenMP constructs for which
11043 gimplify_adjust_omp_clauses already handled the bodies,
11044 except handle gimple_omp_for_pre_body. */
11045 case GIMPLE_OMP_FOR:
11046 *handled_ops_p = true;
11047 if (gimple_omp_for_pre_body (stmt))
11048 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
11049 omp_find_stores_stmt, omp_find_stores_op, wi);
11050 break;
11051 case GIMPLE_OMP_PARALLEL:
11052 case GIMPLE_OMP_TASK:
11053 case GIMPLE_OMP_SECTIONS:
11054 case GIMPLE_OMP_SINGLE:
11055 case GIMPLE_OMP_SCOPE:
11056 case GIMPLE_OMP_TARGET:
11057 case GIMPLE_OMP_TEAMS:
11058 case GIMPLE_OMP_CRITICAL:
11059 *handled_ops_p = true;
11060 break;
11061 default:
11062 break;
11063 }
11064 return NULL_TREE;
11065 }
11066
11067 struct gimplify_adjust_omp_clauses_data
11068 {
11069 tree *list_p;
11070 gimple_seq *pre_p;
11071 };
11072
11073 /* For all variables that were not actually used within the context,
11074 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
11075
11076 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)11077 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
11078 {
11079 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
11080 gimple_seq *pre_p
11081 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
11082 tree decl = (tree) n->key;
11083 unsigned flags = n->value;
11084 enum omp_clause_code code;
11085 tree clause;
11086 bool private_debug;
11087
11088 if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
11089 && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
11090 flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
11091 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
11092 return 0;
11093 if ((flags & GOVD_SEEN) == 0)
11094 return 0;
11095 if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
11096 return 0;
11097 if (flags & GOVD_DEBUG_PRIVATE)
11098 {
11099 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
11100 private_debug = true;
11101 }
11102 else if (flags & GOVD_MAP)
11103 private_debug = false;
11104 else
11105 private_debug
11106 = lang_hooks.decls.omp_private_debug_clause (decl,
11107 !!(flags & GOVD_SHARED));
11108 if (private_debug)
11109 code = OMP_CLAUSE_PRIVATE;
11110 else if (flags & GOVD_MAP)
11111 {
11112 code = OMP_CLAUSE_MAP;
11113 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
11114 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
11115 {
11116 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
11117 return 0;
11118 }
11119 if (VAR_P (decl)
11120 && DECL_IN_CONSTANT_POOL (decl)
11121 && !lookup_attribute ("omp declare target",
11122 DECL_ATTRIBUTES (decl)))
11123 {
11124 tree id = get_identifier ("omp declare target");
11125 DECL_ATTRIBUTES (decl)
11126 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
11127 varpool_node *node = varpool_node::get (decl);
11128 if (node)
11129 {
11130 node->offloadable = 1;
11131 if (ENABLE_OFFLOADING)
11132 g->have_offload = true;
11133 }
11134 }
11135 }
11136 else if (flags & GOVD_SHARED)
11137 {
11138 if (is_global_var (decl))
11139 {
11140 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
11141 while (ctx != NULL)
11142 {
11143 splay_tree_node on
11144 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11145 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
11146 | GOVD_PRIVATE | GOVD_REDUCTION
11147 | GOVD_LINEAR | GOVD_MAP)) != 0)
11148 break;
11149 ctx = ctx->outer_context;
11150 }
11151 if (ctx == NULL)
11152 return 0;
11153 }
11154 code = OMP_CLAUSE_SHARED;
11155 /* Don't optimize shared into firstprivate for read-only vars
11156 on tasks with depend clause, we shouldn't try to copy them
11157 until the dependencies are satisfied. */
11158 if (gimplify_omp_ctxp->has_depend)
11159 flags |= GOVD_WRITTEN;
11160 }
11161 else if (flags & GOVD_PRIVATE)
11162 code = OMP_CLAUSE_PRIVATE;
11163 else if (flags & GOVD_FIRSTPRIVATE)
11164 {
11165 code = OMP_CLAUSE_FIRSTPRIVATE;
11166 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
11167 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
11168 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
11169 {
11170 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
11171 "%<target%> construct", decl);
11172 return 0;
11173 }
11174 }
11175 else if (flags & GOVD_LASTPRIVATE)
11176 code = OMP_CLAUSE_LASTPRIVATE;
11177 else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
11178 return 0;
11179 else if (flags & GOVD_CONDTEMP)
11180 {
11181 code = OMP_CLAUSE__CONDTEMP_;
11182 gimple_add_tmp_var (decl);
11183 }
11184 else
11185 gcc_unreachable ();
11186
11187 if (((flags & GOVD_LASTPRIVATE)
11188 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
11189 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11190 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11191
11192 tree chain = *list_p;
11193 clause = build_omp_clause (input_location, code);
11194 OMP_CLAUSE_DECL (clause) = decl;
11195 OMP_CLAUSE_CHAIN (clause) = chain;
11196 if (private_debug)
11197 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
11198 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
11199 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
11200 else if (code == OMP_CLAUSE_SHARED
11201 && (flags & GOVD_WRITTEN) == 0
11202 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11203 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
11204 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
11205 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
11206 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
11207 {
11208 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
11209 OMP_CLAUSE_DECL (nc) = decl;
11210 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11211 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11212 OMP_CLAUSE_DECL (clause)
11213 = build_simple_mem_ref_loc (input_location, decl);
11214 OMP_CLAUSE_DECL (clause)
11215 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
11216 build_int_cst (build_pointer_type (char_type_node), 0));
11217 OMP_CLAUSE_SIZE (clause) = size_zero_node;
11218 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11219 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
11220 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
11221 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
11222 OMP_CLAUSE_CHAIN (nc) = chain;
11223 OMP_CLAUSE_CHAIN (clause) = nc;
11224 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11225 gimplify_omp_ctxp = ctx->outer_context;
11226 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
11227 pre_p, NULL, is_gimple_val, fb_rvalue);
11228 gimplify_omp_ctxp = ctx;
11229 }
11230 else if (code == OMP_CLAUSE_MAP)
11231 {
11232 int kind;
11233 /* Not all combinations of these GOVD_MAP flags are actually valid. */
11234 switch (flags & (GOVD_MAP_TO_ONLY
11235 | GOVD_MAP_FORCE
11236 | GOVD_MAP_FORCE_PRESENT
11237 | GOVD_MAP_ALLOC_ONLY
11238 | GOVD_MAP_FROM_ONLY))
11239 {
11240 case 0:
11241 kind = GOMP_MAP_TOFROM;
11242 break;
11243 case GOVD_MAP_FORCE:
11244 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
11245 break;
11246 case GOVD_MAP_TO_ONLY:
11247 kind = GOMP_MAP_TO;
11248 break;
11249 case GOVD_MAP_FROM_ONLY:
11250 kind = GOMP_MAP_FROM;
11251 break;
11252 case GOVD_MAP_ALLOC_ONLY:
11253 kind = GOMP_MAP_ALLOC;
11254 break;
11255 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
11256 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
11257 break;
11258 case GOVD_MAP_FORCE_PRESENT:
11259 kind = GOMP_MAP_FORCE_PRESENT;
11260 break;
11261 default:
11262 gcc_unreachable ();
11263 }
11264 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
11265 /* Setting of the implicit flag for the runtime is currently disabled for
11266 OpenACC. */
11267 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
11268 OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (clause) = 1;
11269 if (DECL_SIZE (decl)
11270 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
11271 {
11272 tree decl2 = DECL_VALUE_EXPR (decl);
11273 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11274 decl2 = TREE_OPERAND (decl2, 0);
11275 gcc_assert (DECL_P (decl2));
11276 tree mem = build_simple_mem_ref (decl2);
11277 OMP_CLAUSE_DECL (clause) = mem;
11278 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11279 if (gimplify_omp_ctxp->outer_context)
11280 {
11281 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
11282 omp_notice_variable (ctx, decl2, true);
11283 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
11284 }
11285 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
11286 OMP_CLAUSE_MAP);
11287 OMP_CLAUSE_DECL (nc) = decl;
11288 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11289 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
11290 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
11291 else
11292 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
11293 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
11294 OMP_CLAUSE_CHAIN (clause) = nc;
11295 }
11296 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
11297 && omp_privatize_by_reference (decl))
11298 {
11299 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
11300 OMP_CLAUSE_SIZE (clause)
11301 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
11302 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11303 gimplify_omp_ctxp = ctx->outer_context;
11304 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
11305 pre_p, NULL, is_gimple_val, fb_rvalue);
11306 gimplify_omp_ctxp = ctx;
11307 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
11308 OMP_CLAUSE_MAP);
11309 OMP_CLAUSE_DECL (nc) = decl;
11310 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11311 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
11312 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
11313 OMP_CLAUSE_CHAIN (clause) = nc;
11314 }
11315 else
11316 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
11317 }
11318 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
11319 {
11320 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
11321 OMP_CLAUSE_DECL (nc) = decl;
11322 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
11323 OMP_CLAUSE_CHAIN (nc) = chain;
11324 OMP_CLAUSE_CHAIN (clause) = nc;
11325 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11326 gimplify_omp_ctxp = ctx->outer_context;
11327 lang_hooks.decls.omp_finish_clause (nc, pre_p,
11328 (ctx->region_type & ORT_ACC) != 0);
11329 gimplify_omp_ctxp = ctx;
11330 }
11331 *list_p = clause;
11332 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11333 gimplify_omp_ctxp = ctx->outer_context;
11334 /* Don't call omp_finish_clause on implicitly added OMP_CLAUSE_PRIVATE
11335 in simd. Those are only added for the local vars inside of simd body
11336 and they don't need to be e.g. default constructible. */
11337 if (code != OMP_CLAUSE_PRIVATE || ctx->region_type != ORT_SIMD)
11338 lang_hooks.decls.omp_finish_clause (clause, pre_p,
11339 (ctx->region_type & ORT_ACC) != 0);
11340 if (gimplify_omp_ctxp)
11341 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
11342 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
11343 && DECL_P (OMP_CLAUSE_SIZE (clause)))
11344 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
11345 true);
11346 gimplify_omp_ctxp = ctx;
11347 return 0;
11348 }
11349
11350 static void
gimplify_adjust_omp_clauses(gimple_seq * pre_p,gimple_seq body,tree * list_p,enum tree_code code)11351 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
11352 enum tree_code code)
11353 {
11354 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11355 tree *orig_list_p = list_p;
11356 tree c, decl;
11357 bool has_inscan_reductions = false;
11358
11359 if (body)
11360 {
11361 struct gimplify_omp_ctx *octx;
11362 for (octx = ctx; octx; octx = octx->outer_context)
11363 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
11364 break;
11365 if (octx)
11366 {
11367 struct walk_stmt_info wi;
11368 memset (&wi, 0, sizeof (wi));
11369 walk_gimple_seq (body, omp_find_stores_stmt,
11370 omp_find_stores_op, &wi);
11371 }
11372 }
11373
11374 if (ctx->add_safelen1)
11375 {
11376 /* If there are VLAs in the body of simd loop, prevent
11377 vectorization. */
11378 gcc_assert (ctx->region_type == ORT_SIMD);
11379 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
11380 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
11381 OMP_CLAUSE_CHAIN (c) = *list_p;
11382 *list_p = c;
11383 list_p = &OMP_CLAUSE_CHAIN (c);
11384 }
11385
11386 if (ctx->region_type == ORT_WORKSHARE
11387 && ctx->outer_context
11388 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
11389 {
11390 for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
11391 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11392 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
11393 {
11394 decl = OMP_CLAUSE_DECL (c);
11395 splay_tree_node n
11396 = splay_tree_lookup (ctx->outer_context->variables,
11397 (splay_tree_key) decl);
11398 gcc_checking_assert (!splay_tree_lookup (ctx->variables,
11399 (splay_tree_key) decl));
11400 omp_add_variable (ctx, decl, n->value);
11401 tree c2 = copy_node (c);
11402 OMP_CLAUSE_CHAIN (c2) = *list_p;
11403 *list_p = c2;
11404 if ((n->value & GOVD_FIRSTPRIVATE) == 0)
11405 continue;
11406 c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11407 OMP_CLAUSE_FIRSTPRIVATE);
11408 OMP_CLAUSE_DECL (c2) = decl;
11409 OMP_CLAUSE_CHAIN (c2) = *list_p;
11410 *list_p = c2;
11411 }
11412 }
11413 while ((c = *list_p) != NULL)
11414 {
11415 splay_tree_node n;
11416 bool remove = false;
11417
11418 switch (OMP_CLAUSE_CODE (c))
11419 {
11420 case OMP_CLAUSE_FIRSTPRIVATE:
11421 if ((ctx->region_type & ORT_TARGET)
11422 && (ctx->region_type & ORT_ACC) == 0
11423 && TYPE_ATOMIC (strip_array_types
11424 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
11425 {
11426 error_at (OMP_CLAUSE_LOCATION (c),
11427 "%<_Atomic%> %qD in %<firstprivate%> clause on "
11428 "%<target%> construct", OMP_CLAUSE_DECL (c));
11429 remove = true;
11430 break;
11431 }
11432 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11433 {
11434 decl = OMP_CLAUSE_DECL (c);
11435 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11436 if ((n->value & GOVD_MAP) != 0)
11437 {
11438 remove = true;
11439 break;
11440 }
11441 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT_TARGET (c) = 0;
11442 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c) = 0;
11443 }
11444 /* FALLTHRU */
11445 case OMP_CLAUSE_PRIVATE:
11446 case OMP_CLAUSE_SHARED:
11447 case OMP_CLAUSE_LINEAR:
11448 decl = OMP_CLAUSE_DECL (c);
11449 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11450 remove = !(n->value & GOVD_SEEN);
11451 if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
11452 && code == OMP_PARALLEL
11453 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11454 remove = true;
11455 if (! remove)
11456 {
11457 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
11458 if ((n->value & GOVD_DEBUG_PRIVATE)
11459 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
11460 {
11461 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
11462 || ((n->value & GOVD_DATA_SHARE_CLASS)
11463 == GOVD_SHARED));
11464 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
11465 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
11466 }
11467 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
11468 && ctx->has_depend
11469 && DECL_P (decl))
11470 n->value |= GOVD_WRITTEN;
11471 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
11472 && (n->value & GOVD_WRITTEN) == 0
11473 && DECL_P (decl)
11474 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11475 OMP_CLAUSE_SHARED_READONLY (c) = 1;
11476 else if (DECL_P (decl)
11477 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
11478 && (n->value & GOVD_WRITTEN) != 0)
11479 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11480 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
11481 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11482 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11483 }
11484 else
11485 n->value &= ~GOVD_EXPLICIT;
11486 break;
11487
11488 case OMP_CLAUSE_LASTPRIVATE:
11489 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
11490 accurately reflect the presence of a FIRSTPRIVATE clause. */
11491 decl = OMP_CLAUSE_DECL (c);
11492 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11493 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
11494 = (n->value & GOVD_FIRSTPRIVATE) != 0;
11495 if (code == OMP_DISTRIBUTE
11496 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
11497 {
11498 remove = true;
11499 error_at (OMP_CLAUSE_LOCATION (c),
11500 "same variable used in %<firstprivate%> and "
11501 "%<lastprivate%> clauses on %<distribute%> "
11502 "construct");
11503 }
11504 if (!remove
11505 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11506 && DECL_P (decl)
11507 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11508 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11509 if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
11510 remove = true;
11511 break;
11512
11513 case OMP_CLAUSE_ALIGNED:
11514 decl = OMP_CLAUSE_DECL (c);
11515 if (!is_global_var (decl))
11516 {
11517 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11518 remove = n == NULL || !(n->value & GOVD_SEEN);
11519 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
11520 {
11521 struct gimplify_omp_ctx *octx;
11522 if (n != NULL
11523 && (n->value & (GOVD_DATA_SHARE_CLASS
11524 & ~GOVD_FIRSTPRIVATE)))
11525 remove = true;
11526 else
11527 for (octx = ctx->outer_context; octx;
11528 octx = octx->outer_context)
11529 {
11530 n = splay_tree_lookup (octx->variables,
11531 (splay_tree_key) decl);
11532 if (n == NULL)
11533 continue;
11534 if (n->value & GOVD_LOCAL)
11535 break;
11536 /* We have to avoid assigning a shared variable
11537 to itself when trying to add
11538 __builtin_assume_aligned. */
11539 if (n->value & GOVD_SHARED)
11540 {
11541 remove = true;
11542 break;
11543 }
11544 }
11545 }
11546 }
11547 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
11548 {
11549 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11550 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11551 remove = true;
11552 }
11553 break;
11554
11555 case OMP_CLAUSE_HAS_DEVICE_ADDR:
11556 decl = OMP_CLAUSE_DECL (c);
11557 while (TREE_CODE (decl) == INDIRECT_REF
11558 || TREE_CODE (decl) == ARRAY_REF)
11559 decl = TREE_OPERAND (decl, 0);
11560 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11561 remove = n == NULL || !(n->value & GOVD_SEEN);
11562 break;
11563
11564 case OMP_CLAUSE_IS_DEVICE_PTR:
11565 case OMP_CLAUSE_NONTEMPORAL:
11566 decl = OMP_CLAUSE_DECL (c);
11567 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11568 remove = n == NULL || !(n->value & GOVD_SEEN);
11569 break;
11570
11571 case OMP_CLAUSE_MAP:
11572 if (code == OMP_TARGET_EXIT_DATA
11573 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
11574 {
11575 remove = true;
11576 break;
11577 }
11578 decl = OMP_CLAUSE_DECL (c);
11579 /* Data clauses associated with reductions must be
11580 compatible with present_or_copy. Warn and adjust the clause
11581 if that is not the case. */
11582 if (ctx->region_type == ORT_ACC_PARALLEL
11583 || ctx->region_type == ORT_ACC_SERIAL)
11584 {
11585 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
11586 n = NULL;
11587
11588 if (DECL_P (t))
11589 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
11590
11591 if (n && (n->value & GOVD_REDUCTION))
11592 {
11593 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
11594
11595 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
11596 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
11597 && kind != GOMP_MAP_FORCE_PRESENT
11598 && kind != GOMP_MAP_POINTER)
11599 {
11600 warning_at (OMP_CLAUSE_LOCATION (c), 0,
11601 "incompatible data clause with reduction "
11602 "on %qE; promoting to %<present_or_copy%>",
11603 DECL_NAME (t));
11604 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
11605 }
11606 }
11607 }
11608 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
11609 && (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA))
11610 {
11611 remove = true;
11612 break;
11613 }
11614 if (!DECL_P (decl))
11615 {
11616 if ((ctx->region_type & ORT_TARGET) != 0
11617 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
11618 {
11619 if (TREE_CODE (decl) == INDIRECT_REF
11620 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
11621 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
11622 == REFERENCE_TYPE))
11623 decl = TREE_OPERAND (decl, 0);
11624 if (TREE_CODE (decl) == COMPONENT_REF)
11625 {
11626 while (TREE_CODE (decl) == COMPONENT_REF)
11627 decl = TREE_OPERAND (decl, 0);
11628 if (DECL_P (decl))
11629 {
11630 n = splay_tree_lookup (ctx->variables,
11631 (splay_tree_key) decl);
11632 if (!(n->value & GOVD_SEEN))
11633 remove = true;
11634 }
11635 }
11636 }
11637 break;
11638 }
11639 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11640 if ((ctx->region_type & ORT_TARGET) != 0
11641 && !(n->value & GOVD_SEEN)
11642 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
11643 && (!is_global_var (decl)
11644 || !lookup_attribute ("omp declare target link",
11645 DECL_ATTRIBUTES (decl))))
11646 {
11647 remove = true;
11648 /* For struct element mapping, if struct is never referenced
11649 in target block and none of the mapping has always modifier,
11650 remove all the struct element mappings, which immediately
11651 follow the GOMP_MAP_STRUCT map clause. */
11652 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
11653 {
11654 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
11655 while (cnt--)
11656 OMP_CLAUSE_CHAIN (c)
11657 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
11658 }
11659 }
11660 else if (DECL_SIZE (decl)
11661 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
11662 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
11663 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
11664 && (OMP_CLAUSE_MAP_KIND (c)
11665 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11666 {
11667 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
11668 for these, TREE_CODE (DECL_SIZE (decl)) will always be
11669 INTEGER_CST. */
11670 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
11671
11672 tree decl2 = DECL_VALUE_EXPR (decl);
11673 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11674 decl2 = TREE_OPERAND (decl2, 0);
11675 gcc_assert (DECL_P (decl2));
11676 tree mem = build_simple_mem_ref (decl2);
11677 OMP_CLAUSE_DECL (c) = mem;
11678 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11679 if (ctx->outer_context)
11680 {
11681 omp_notice_variable (ctx->outer_context, decl2, true);
11682 omp_notice_variable (ctx->outer_context,
11683 OMP_CLAUSE_SIZE (c), true);
11684 }
11685 if (((ctx->region_type & ORT_TARGET) != 0
11686 || !ctx->target_firstprivatize_array_bases)
11687 && ((n->value & GOVD_SEEN) == 0
11688 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
11689 {
11690 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
11691 OMP_CLAUSE_MAP);
11692 OMP_CLAUSE_DECL (nc) = decl;
11693 OMP_CLAUSE_SIZE (nc) = size_zero_node;
11694 if (ctx->target_firstprivatize_array_bases)
11695 OMP_CLAUSE_SET_MAP_KIND (nc,
11696 GOMP_MAP_FIRSTPRIVATE_POINTER);
11697 else
11698 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
11699 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
11700 OMP_CLAUSE_CHAIN (c) = nc;
11701 c = nc;
11702 }
11703 }
11704 else
11705 {
11706 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11707 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
11708 gcc_assert ((n->value & GOVD_SEEN) == 0
11709 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
11710 == 0));
11711 }
11712 break;
11713
11714 case OMP_CLAUSE_TO:
11715 case OMP_CLAUSE_FROM:
11716 case OMP_CLAUSE__CACHE_:
11717 decl = OMP_CLAUSE_DECL (c);
11718 if (!DECL_P (decl))
11719 break;
11720 if (DECL_SIZE (decl)
11721 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
11722 {
11723 tree decl2 = DECL_VALUE_EXPR (decl);
11724 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
11725 decl2 = TREE_OPERAND (decl2, 0);
11726 gcc_assert (DECL_P (decl2));
11727 tree mem = build_simple_mem_ref (decl2);
11728 OMP_CLAUSE_DECL (c) = mem;
11729 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
11730 if (ctx->outer_context)
11731 {
11732 omp_notice_variable (ctx->outer_context, decl2, true);
11733 omp_notice_variable (ctx->outer_context,
11734 OMP_CLAUSE_SIZE (c), true);
11735 }
11736 }
11737 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
11738 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
11739 break;
11740
11741 case OMP_CLAUSE_REDUCTION:
11742 if (OMP_CLAUSE_REDUCTION_INSCAN (c))
11743 {
11744 decl = OMP_CLAUSE_DECL (c);
11745 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11746 if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
11747 {
11748 remove = true;
11749 error_at (OMP_CLAUSE_LOCATION (c),
11750 "%qD specified in %<inscan%> %<reduction%> clause "
11751 "but not in %<scan%> directive clause", decl);
11752 break;
11753 }
11754 has_inscan_reductions = true;
11755 }
11756 /* FALLTHRU */
11757 case OMP_CLAUSE_IN_REDUCTION:
11758 case OMP_CLAUSE_TASK_REDUCTION:
11759 decl = OMP_CLAUSE_DECL (c);
11760 /* OpenACC reductions need a present_or_copy data clause.
11761 Add one if necessary. Emit error when the reduction is private. */
11762 if (ctx->region_type == ORT_ACC_PARALLEL
11763 || ctx->region_type == ORT_ACC_SERIAL)
11764 {
11765 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11766 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
11767 {
11768 remove = true;
11769 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
11770 "reduction on %qE", DECL_NAME (decl));
11771 }
11772 else if ((n->value & GOVD_MAP) == 0)
11773 {
11774 tree next = OMP_CLAUSE_CHAIN (c);
11775 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
11776 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
11777 OMP_CLAUSE_DECL (nc) = decl;
11778 OMP_CLAUSE_CHAIN (c) = nc;
11779 lang_hooks.decls.omp_finish_clause (nc, pre_p,
11780 (ctx->region_type
11781 & ORT_ACC) != 0);
11782 while (1)
11783 {
11784 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
11785 if (OMP_CLAUSE_CHAIN (nc) == NULL)
11786 break;
11787 nc = OMP_CLAUSE_CHAIN (nc);
11788 }
11789 OMP_CLAUSE_CHAIN (nc) = next;
11790 n->value |= GOVD_MAP;
11791 }
11792 }
11793 if (DECL_P (decl)
11794 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
11795 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
11796 break;
11797
11798 case OMP_CLAUSE_ALLOCATE:
11799 decl = OMP_CLAUSE_DECL (c);
11800 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
11801 if (n != NULL && !(n->value & GOVD_SEEN))
11802 {
11803 if ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE | GOVD_LINEAR))
11804 != 0
11805 && (n->value & (GOVD_REDUCTION | GOVD_LASTPRIVATE)) == 0)
11806 remove = true;
11807 }
11808 if (!remove
11809 && OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
11810 && TREE_CODE (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)) != INTEGER_CST
11811 && ((ctx->region_type & (ORT_PARALLEL | ORT_TARGET)) != 0
11812 || (ctx->region_type & ORT_TASKLOOP) == ORT_TASK
11813 || (ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS))
11814 {
11815 tree allocator = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
11816 n = splay_tree_lookup (ctx->variables, (splay_tree_key) allocator);
11817 if (n == NULL)
11818 {
11819 enum omp_clause_default_kind default_kind
11820 = ctx->default_kind;
11821 ctx->default_kind = OMP_CLAUSE_DEFAULT_FIRSTPRIVATE;
11822 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
11823 true);
11824 ctx->default_kind = default_kind;
11825 }
11826 else
11827 omp_notice_variable (ctx, OMP_CLAUSE_ALLOCATE_ALLOCATOR (c),
11828 true);
11829 }
11830 break;
11831
11832 case OMP_CLAUSE_COPYIN:
11833 case OMP_CLAUSE_COPYPRIVATE:
11834 case OMP_CLAUSE_IF:
11835 case OMP_CLAUSE_NUM_THREADS:
11836 case OMP_CLAUSE_NUM_TEAMS:
11837 case OMP_CLAUSE_THREAD_LIMIT:
11838 case OMP_CLAUSE_DIST_SCHEDULE:
11839 case OMP_CLAUSE_DEVICE:
11840 case OMP_CLAUSE_SCHEDULE:
11841 case OMP_CLAUSE_NOWAIT:
11842 case OMP_CLAUSE_ORDERED:
11843 case OMP_CLAUSE_DEFAULT:
11844 case OMP_CLAUSE_UNTIED:
11845 case OMP_CLAUSE_COLLAPSE:
11846 case OMP_CLAUSE_FINAL:
11847 case OMP_CLAUSE_MERGEABLE:
11848 case OMP_CLAUSE_PROC_BIND:
11849 case OMP_CLAUSE_SAFELEN:
11850 case OMP_CLAUSE_SIMDLEN:
11851 case OMP_CLAUSE_DEPEND:
11852 case OMP_CLAUSE_PRIORITY:
11853 case OMP_CLAUSE_GRAINSIZE:
11854 case OMP_CLAUSE_NUM_TASKS:
11855 case OMP_CLAUSE_NOGROUP:
11856 case OMP_CLAUSE_THREADS:
11857 case OMP_CLAUSE_SIMD:
11858 case OMP_CLAUSE_FILTER:
11859 case OMP_CLAUSE_HINT:
11860 case OMP_CLAUSE_DEFAULTMAP:
11861 case OMP_CLAUSE_ORDER:
11862 case OMP_CLAUSE_BIND:
11863 case OMP_CLAUSE_DETACH:
11864 case OMP_CLAUSE_USE_DEVICE_PTR:
11865 case OMP_CLAUSE_USE_DEVICE_ADDR:
11866 case OMP_CLAUSE_ASYNC:
11867 case OMP_CLAUSE_WAIT:
11868 case OMP_CLAUSE_INDEPENDENT:
11869 case OMP_CLAUSE_NUM_GANGS:
11870 case OMP_CLAUSE_NUM_WORKERS:
11871 case OMP_CLAUSE_VECTOR_LENGTH:
11872 case OMP_CLAUSE_GANG:
11873 case OMP_CLAUSE_WORKER:
11874 case OMP_CLAUSE_VECTOR:
11875 case OMP_CLAUSE_AUTO:
11876 case OMP_CLAUSE_SEQ:
11877 case OMP_CLAUSE_TILE:
11878 case OMP_CLAUSE_IF_PRESENT:
11879 case OMP_CLAUSE_FINALIZE:
11880 case OMP_CLAUSE_INCLUSIVE:
11881 case OMP_CLAUSE_EXCLUSIVE:
11882 break;
11883
11884 case OMP_CLAUSE_NOHOST:
11885 default:
11886 gcc_unreachable ();
11887 }
11888
11889 if (remove)
11890 *list_p = OMP_CLAUSE_CHAIN (c);
11891 else
11892 list_p = &OMP_CLAUSE_CHAIN (c);
11893 }
11894
11895 /* Add in any implicit data sharing. */
11896 struct gimplify_adjust_omp_clauses_data data;
11897 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0)
11898 {
11899 /* OpenMP. Implicit clauses are added at the start of the clause list,
11900 but after any non-map clauses. */
11901 tree *implicit_add_list_p = orig_list_p;
11902 while (*implicit_add_list_p
11903 && OMP_CLAUSE_CODE (*implicit_add_list_p) != OMP_CLAUSE_MAP)
11904 implicit_add_list_p = &OMP_CLAUSE_CHAIN (*implicit_add_list_p);
11905 data.list_p = implicit_add_list_p;
11906 }
11907 else
11908 /* OpenACC. */
11909 data.list_p = list_p;
11910 data.pre_p = pre_p;
11911 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
11912
11913 if (has_inscan_reductions)
11914 for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
11915 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11916 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11917 {
11918 error_at (OMP_CLAUSE_LOCATION (c),
11919 "%<inscan%> %<reduction%> clause used together with "
11920 "%<linear%> clause for a variable other than loop "
11921 "iterator");
11922 break;
11923 }
11924
11925 gimplify_omp_ctxp = ctx->outer_context;
11926 delete_omp_context (ctx);
11927 }
11928
11929 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
11930 -1 if unknown yet (simd is involved, won't be known until vectorization)
11931 and 1 if they do. If SCORES is non-NULL, it should point to an array
11932 of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
11933 of the CONSTRUCTS (position -1 if it will never match) followed by
11934 number of constructs in the OpenMP context construct trait. If the
11935 score depends on whether it will be in a declare simd clone or not,
11936 the function returns 2 and there will be two sets of the scores, the first
11937 one for the case that it is not in a declare simd clone, the other
11938 that it is in a declare simd clone. */
11939
11940 int
omp_construct_selector_matches(enum tree_code * constructs,int nconstructs,int * scores)11941 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
11942 int *scores)
11943 {
11944 int matched = 0, cnt = 0;
11945 bool simd_seen = false;
11946 bool target_seen = false;
11947 int declare_simd_cnt = -1;
11948 auto_vec<enum tree_code, 16> codes;
11949 for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
11950 {
11951 if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
11952 || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
11953 == ORT_TARGET && ctx->code == OMP_TARGET)
11954 || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
11955 || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
11956 || (ctx->region_type == ORT_SIMD
11957 && ctx->code == OMP_SIMD
11958 && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
11959 {
11960 ++cnt;
11961 if (scores)
11962 codes.safe_push (ctx->code);
11963 else if (matched < nconstructs && ctx->code == constructs[matched])
11964 {
11965 if (ctx->code == OMP_SIMD)
11966 {
11967 if (matched)
11968 return 0;
11969 simd_seen = true;
11970 }
11971 ++matched;
11972 }
11973 if (ctx->code == OMP_TARGET)
11974 {
11975 if (scores == NULL)
11976 return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
11977 target_seen = true;
11978 break;
11979 }
11980 }
11981 else if (ctx->region_type == ORT_WORKSHARE
11982 && ctx->code == OMP_LOOP
11983 && ctx->outer_context
11984 && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
11985 && ctx->outer_context->outer_context
11986 && ctx->outer_context->outer_context->code == OMP_LOOP
11987 && ctx->outer_context->outer_context->distribute)
11988 ctx = ctx->outer_context->outer_context;
11989 ctx = ctx->outer_context;
11990 }
11991 if (!target_seen
11992 && lookup_attribute ("omp declare simd",
11993 DECL_ATTRIBUTES (current_function_decl)))
11994 {
11995 /* Declare simd is a maybe case, it is supposed to be added only to the
11996 omp-simd-clone.cc added clones and not to the base function. */
11997 declare_simd_cnt = cnt++;
11998 if (scores)
11999 codes.safe_push (OMP_SIMD);
12000 else if (cnt == 0
12001 && constructs[0] == OMP_SIMD)
12002 {
12003 gcc_assert (matched == 0);
12004 simd_seen = true;
12005 if (++matched == nconstructs)
12006 return -1;
12007 }
12008 }
12009 if (tree attr = lookup_attribute ("omp declare variant variant",
12010 DECL_ATTRIBUTES (current_function_decl)))
12011 {
12012 enum tree_code variant_constructs[5];
12013 int variant_nconstructs = 0;
12014 if (!target_seen)
12015 variant_nconstructs
12016 = omp_constructor_traits_to_codes (TREE_VALUE (attr),
12017 variant_constructs);
12018 for (int i = 0; i < variant_nconstructs; i++)
12019 {
12020 ++cnt;
12021 if (scores)
12022 codes.safe_push (variant_constructs[i]);
12023 else if (matched < nconstructs
12024 && variant_constructs[i] == constructs[matched])
12025 {
12026 if (variant_constructs[i] == OMP_SIMD)
12027 {
12028 if (matched)
12029 return 0;
12030 simd_seen = true;
12031 }
12032 ++matched;
12033 }
12034 }
12035 }
12036 if (!target_seen
12037 && lookup_attribute ("omp declare target block",
12038 DECL_ATTRIBUTES (current_function_decl)))
12039 {
12040 if (scores)
12041 codes.safe_push (OMP_TARGET);
12042 else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
12043 ++matched;
12044 }
12045 if (scores)
12046 {
12047 for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
12048 {
12049 int j = codes.length () - 1;
12050 for (int i = nconstructs - 1; i >= 0; i--)
12051 {
12052 while (j >= 0
12053 && (pass != 0 || declare_simd_cnt != j)
12054 && constructs[i] != codes[j])
12055 --j;
12056 if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
12057 *scores++ = j - 1;
12058 else
12059 *scores++ = j;
12060 }
12061 *scores++ = ((pass == 0 && declare_simd_cnt != -1)
12062 ? codes.length () - 1 : codes.length ());
12063 }
12064 return declare_simd_cnt == -1 ? 1 : 2;
12065 }
12066 if (matched == nconstructs)
12067 return simd_seen ? -1 : 1;
12068 return 0;
12069 }
12070
12071 /* Gimplify OACC_CACHE. */
12072
12073 static void
gimplify_oacc_cache(tree * expr_p,gimple_seq * pre_p)12074 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
12075 {
12076 tree expr = *expr_p;
12077
12078 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
12079 OACC_CACHE);
12080 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
12081 OACC_CACHE);
12082
12083 /* TODO: Do something sensible with this information. */
12084
12085 *expr_p = NULL_TREE;
12086 }
12087
12088 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
12089 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
12090 kind. The entry kind will replace the one in CLAUSE, while the exit
12091 kind will be used in a new omp_clause and returned to the caller. */
12092
12093 static tree
gimplify_oacc_declare_1(tree clause)12094 gimplify_oacc_declare_1 (tree clause)
12095 {
12096 HOST_WIDE_INT kind, new_op;
12097 bool ret = false;
12098 tree c = NULL;
12099
12100 kind = OMP_CLAUSE_MAP_KIND (clause);
12101
12102 switch (kind)
12103 {
12104 case GOMP_MAP_ALLOC:
12105 new_op = GOMP_MAP_RELEASE;
12106 ret = true;
12107 break;
12108
12109 case GOMP_MAP_FROM:
12110 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
12111 new_op = GOMP_MAP_FROM;
12112 ret = true;
12113 break;
12114
12115 case GOMP_MAP_TOFROM:
12116 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
12117 new_op = GOMP_MAP_FROM;
12118 ret = true;
12119 break;
12120
12121 case GOMP_MAP_DEVICE_RESIDENT:
12122 case GOMP_MAP_FORCE_DEVICEPTR:
12123 case GOMP_MAP_FORCE_PRESENT:
12124 case GOMP_MAP_LINK:
12125 case GOMP_MAP_POINTER:
12126 case GOMP_MAP_TO:
12127 break;
12128
12129 default:
12130 gcc_unreachable ();
12131 break;
12132 }
12133
12134 if (ret)
12135 {
12136 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
12137 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
12138 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
12139 }
12140
12141 return c;
12142 }
12143
12144 /* Gimplify OACC_DECLARE. */
12145
12146 static void
gimplify_oacc_declare(tree * expr_p,gimple_seq * pre_p)12147 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
12148 {
12149 tree expr = *expr_p;
12150 gomp_target *stmt;
12151 tree clauses, t, decl;
12152
12153 clauses = OACC_DECLARE_CLAUSES (expr);
12154
12155 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
12156 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
12157
12158 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
12159 {
12160 decl = OMP_CLAUSE_DECL (t);
12161
12162 if (TREE_CODE (decl) == MEM_REF)
12163 decl = TREE_OPERAND (decl, 0);
12164
12165 if (VAR_P (decl) && !is_oacc_declared (decl))
12166 {
12167 tree attr = get_identifier ("oacc declare target");
12168 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
12169 DECL_ATTRIBUTES (decl));
12170 }
12171
12172 if (VAR_P (decl)
12173 && !is_global_var (decl)
12174 && DECL_CONTEXT (decl) == current_function_decl)
12175 {
12176 tree c = gimplify_oacc_declare_1 (t);
12177 if (c)
12178 {
12179 if (oacc_declare_returns == NULL)
12180 oacc_declare_returns = new hash_map<tree, tree>;
12181
12182 oacc_declare_returns->put (decl, c);
12183 }
12184 }
12185
12186 if (gimplify_omp_ctxp)
12187 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
12188 }
12189
12190 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
12191 clauses);
12192
12193 gimplify_seq_add_stmt (pre_p, stmt);
12194
12195 *expr_p = NULL_TREE;
12196 }
12197
12198 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
12199 gimplification of the body, as well as scanning the body for used
12200 variables. We need to do this scan now, because variable-sized
12201 decls will be decomposed during gimplification. */
12202
12203 static void
gimplify_omp_parallel(tree * expr_p,gimple_seq * pre_p)12204 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
12205 {
12206 tree expr = *expr_p;
12207 gimple *g;
12208 gimple_seq body = NULL;
12209
12210 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
12211 OMP_PARALLEL_COMBINED (expr)
12212 ? ORT_COMBINED_PARALLEL
12213 : ORT_PARALLEL, OMP_PARALLEL);
12214
12215 push_gimplify_context ();
12216
12217 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
12218 if (gimple_code (g) == GIMPLE_BIND)
12219 pop_gimplify_context (g);
12220 else
12221 pop_gimplify_context (NULL);
12222
12223 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
12224 OMP_PARALLEL);
12225
12226 g = gimple_build_omp_parallel (body,
12227 OMP_PARALLEL_CLAUSES (expr),
12228 NULL_TREE, NULL_TREE);
12229 if (OMP_PARALLEL_COMBINED (expr))
12230 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
12231 gimplify_seq_add_stmt (pre_p, g);
12232 *expr_p = NULL_TREE;
12233 }
12234
12235 /* Gimplify the contents of an OMP_TASK statement. This involves
12236 gimplification of the body, as well as scanning the body for used
12237 variables. We need to do this scan now, because variable-sized
12238 decls will be decomposed during gimplification. */
12239
12240 static void
gimplify_omp_task(tree * expr_p,gimple_seq * pre_p)12241 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
12242 {
12243 tree expr = *expr_p;
12244 gimple *g;
12245 gimple_seq body = NULL;
12246
12247 if (OMP_TASK_BODY (expr) == NULL_TREE)
12248 for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12249 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
12250 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
12251 {
12252 error_at (OMP_CLAUSE_LOCATION (c),
12253 "%<mutexinoutset%> kind in %<depend%> clause on a "
12254 "%<taskwait%> construct");
12255 break;
12256 }
12257
12258 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
12259 omp_find_clause (OMP_TASK_CLAUSES (expr),
12260 OMP_CLAUSE_UNTIED)
12261 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
12262
12263 if (OMP_TASK_BODY (expr))
12264 {
12265 push_gimplify_context ();
12266
12267 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
12268 if (gimple_code (g) == GIMPLE_BIND)
12269 pop_gimplify_context (g);
12270 else
12271 pop_gimplify_context (NULL);
12272 }
12273
12274 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
12275 OMP_TASK);
12276
12277 g = gimple_build_omp_task (body,
12278 OMP_TASK_CLAUSES (expr),
12279 NULL_TREE, NULL_TREE,
12280 NULL_TREE, NULL_TREE, NULL_TREE);
12281 if (OMP_TASK_BODY (expr) == NULL_TREE)
12282 gimple_omp_task_set_taskwait_p (g, true);
12283 gimplify_seq_add_stmt (pre_p, g);
12284 *expr_p = NULL_TREE;
12285 }
12286
12287 /* Helper function for gimplify_omp_for. If *TP is not a gimple constant,
12288 force it into a temporary initialized in PRE_P and add firstprivate clause
12289 to ORIG_FOR_STMT. */
12290
12291 static void
gimplify_omp_taskloop_expr(tree type,tree * tp,gimple_seq * pre_p,tree orig_for_stmt)12292 gimplify_omp_taskloop_expr (tree type, tree *tp, gimple_seq *pre_p,
12293 tree orig_for_stmt)
12294 {
12295 if (*tp == NULL || is_gimple_constant (*tp))
12296 return;
12297
12298 *tp = get_initialized_tmp_var (*tp, pre_p, NULL, false);
12299 /* Reference to pointer conversion is considered useless,
12300 but is significant for firstprivate clause. Force it
12301 here. */
12302 if (type
12303 && TREE_CODE (type) == POINTER_TYPE
12304 && TREE_CODE (TREE_TYPE (*tp)) == REFERENCE_TYPE)
12305 {
12306 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
12307 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v, *tp);
12308 gimplify_and_add (m, pre_p);
12309 *tp = v;
12310 }
12311
12312 tree c = build_omp_clause (input_location, OMP_CLAUSE_FIRSTPRIVATE);
12313 OMP_CLAUSE_DECL (c) = *tp;
12314 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
12315 OMP_FOR_CLAUSES (orig_for_stmt) = c;
12316 }
12317
12318 /* Gimplify the gross structure of an OMP_FOR statement. */
12319
12320 static enum gimplify_status
gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)12321 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
12322 {
12323 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
12324 enum gimplify_status ret = GS_ALL_DONE;
12325 enum gimplify_status tret;
12326 gomp_for *gfor;
12327 gimple_seq for_body, for_pre_body;
12328 int i;
12329 bitmap has_decl_expr = NULL;
12330 enum omp_region_type ort = ORT_WORKSHARE;
12331 bool openacc = TREE_CODE (*expr_p) == OACC_LOOP;
12332
12333 orig_for_stmt = for_stmt = *expr_p;
12334
12335 bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
12336 != NULL_TREE);
12337 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
12338 {
12339 tree *data[4] = { NULL, NULL, NULL, NULL };
12340 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
12341 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
12342 find_combined_omp_for, data, NULL);
12343 if (inner_for_stmt == NULL_TREE)
12344 {
12345 gcc_assert (seen_error ());
12346 *expr_p = NULL_TREE;
12347 return GS_ERROR;
12348 }
12349 if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
12350 {
12351 append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
12352 &OMP_FOR_PRE_BODY (for_stmt));
12353 OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
12354 }
12355 if (OMP_FOR_PRE_BODY (inner_for_stmt))
12356 {
12357 append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
12358 &OMP_FOR_PRE_BODY (for_stmt));
12359 OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
12360 }
12361
12362 if (data[0])
12363 {
12364 /* We have some statements or variable declarations in between
12365 the composite construct directives. Move them around the
12366 inner_for_stmt. */
12367 data[0] = expr_p;
12368 for (i = 0; i < 3; i++)
12369 if (data[i])
12370 {
12371 tree t = *data[i];
12372 if (i < 2 && data[i + 1] == &OMP_BODY (t))
12373 data[i + 1] = data[i];
12374 *data[i] = OMP_BODY (t);
12375 tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
12376 NULL_TREE, make_node (BLOCK));
12377 OMP_BODY (t) = body;
12378 append_to_statement_list_force (inner_for_stmt,
12379 &BIND_EXPR_BODY (body));
12380 *data[3] = t;
12381 data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
12382 gcc_assert (*data[3] == inner_for_stmt);
12383 }
12384 return GS_OK;
12385 }
12386
12387 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
12388 if (!loop_p
12389 && OMP_FOR_ORIG_DECLS (inner_for_stmt)
12390 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12391 i)) == TREE_LIST
12392 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12393 i)))
12394 {
12395 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
12396 /* Class iterators aren't allowed on OMP_SIMD, so the only
12397 case we need to solve is distribute parallel for. They are
12398 allowed on the loop construct, but that is already handled
12399 in gimplify_omp_loop. */
12400 gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
12401 && TREE_CODE (for_stmt) == OMP_DISTRIBUTE
12402 && data[1]);
12403 tree orig_decl = TREE_PURPOSE (orig);
12404 tree last = TREE_VALUE (orig);
12405 tree *pc;
12406 for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
12407 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
12408 if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
12409 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
12410 && OMP_CLAUSE_DECL (*pc) == orig_decl)
12411 break;
12412 if (*pc == NULL_TREE)
12413 {
12414 tree *spc;
12415 for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
12416 *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
12417 if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
12418 && OMP_CLAUSE_DECL (*spc) == orig_decl)
12419 break;
12420 if (*spc)
12421 {
12422 tree c = *spc;
12423 *spc = OMP_CLAUSE_CHAIN (c);
12424 OMP_CLAUSE_CHAIN (c) = NULL_TREE;
12425 *pc = c;
12426 }
12427 }
12428 if (*pc == NULL_TREE)
12429 ;
12430 else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
12431 {
12432 /* private clause will appear only on inner_for_stmt.
12433 Change it into firstprivate, and add private clause
12434 on for_stmt. */
12435 tree c = copy_node (*pc);
12436 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12437 OMP_FOR_CLAUSES (for_stmt) = c;
12438 OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
12439 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
12440 }
12441 else
12442 {
12443 /* lastprivate clause will appear on both inner_for_stmt
12444 and for_stmt. Add firstprivate clause to
12445 inner_for_stmt. */
12446 tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
12447 OMP_CLAUSE_FIRSTPRIVATE);
12448 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
12449 OMP_CLAUSE_CHAIN (c) = *pc;
12450 *pc = c;
12451 lang_hooks.decls.omp_finish_clause (*pc, pre_p, openacc);
12452 }
12453 tree c = build_omp_clause (UNKNOWN_LOCATION,
12454 OMP_CLAUSE_FIRSTPRIVATE);
12455 OMP_CLAUSE_DECL (c) = last;
12456 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12457 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12458 c = build_omp_clause (UNKNOWN_LOCATION,
12459 *pc ? OMP_CLAUSE_SHARED
12460 : OMP_CLAUSE_FIRSTPRIVATE);
12461 OMP_CLAUSE_DECL (c) = orig_decl;
12462 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12463 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12464 }
12465 /* Similarly, take care of C++ range for temporaries, those should
12466 be firstprivate on OMP_PARALLEL if any. */
12467 if (data[1])
12468 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
12469 if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
12470 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12471 i)) == TREE_LIST
12472 && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
12473 i)))
12474 {
12475 tree orig
12476 = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
12477 tree v = TREE_CHAIN (orig);
12478 tree c = build_omp_clause (UNKNOWN_LOCATION,
12479 OMP_CLAUSE_FIRSTPRIVATE);
12480 /* First add firstprivate clause for the __for_end artificial
12481 decl. */
12482 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
12483 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
12484 == REFERENCE_TYPE)
12485 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
12486 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12487 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12488 if (TREE_VEC_ELT (v, 0))
12489 {
12490 /* And now the same for __for_range artificial decl if it
12491 exists. */
12492 c = build_omp_clause (UNKNOWN_LOCATION,
12493 OMP_CLAUSE_FIRSTPRIVATE);
12494 OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
12495 if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
12496 == REFERENCE_TYPE)
12497 OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
12498 OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
12499 OMP_PARALLEL_CLAUSES (*data[1]) = c;
12500 }
12501 }
12502 }
12503
12504 switch (TREE_CODE (for_stmt))
12505 {
12506 case OMP_FOR:
12507 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt))
12508 {
12509 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12510 OMP_CLAUSE_SCHEDULE))
12511 error_at (EXPR_LOCATION (for_stmt),
12512 "%qs clause may not appear on non-rectangular %qs",
12513 "schedule", "for");
12514 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED))
12515 error_at (EXPR_LOCATION (for_stmt),
12516 "%qs clause may not appear on non-rectangular %qs",
12517 "ordered", "for");
12518 }
12519 break;
12520 case OMP_DISTRIBUTE:
12521 if (OMP_FOR_NON_RECTANGULAR (inner_for_stmt ? inner_for_stmt : for_stmt)
12522 && omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12523 OMP_CLAUSE_DIST_SCHEDULE))
12524 error_at (EXPR_LOCATION (for_stmt),
12525 "%qs clause may not appear on non-rectangular %qs",
12526 "dist_schedule", "distribute");
12527 break;
12528 case OACC_LOOP:
12529 ort = ORT_ACC;
12530 break;
12531 case OMP_TASKLOOP:
12532 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
12533 ort = ORT_UNTIED_TASKLOOP;
12534 else
12535 ort = ORT_TASKLOOP;
12536 break;
12537 case OMP_SIMD:
12538 ort = ORT_SIMD;
12539 break;
12540 default:
12541 gcc_unreachable ();
12542 }
12543
12544 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
12545 clause for the IV. */
12546 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12547 {
12548 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
12549 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12550 decl = TREE_OPERAND (t, 0);
12551 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12552 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
12553 && OMP_CLAUSE_DECL (c) == decl)
12554 {
12555 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
12556 break;
12557 }
12558 }
12559
12560 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
12561 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
12562 loop_p && TREE_CODE (for_stmt) != OMP_SIMD
12563 ? OMP_LOOP : TREE_CODE (for_stmt));
12564
12565 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
12566 gimplify_omp_ctxp->distribute = true;
12567
12568 /* Handle OMP_FOR_INIT. */
12569 for_pre_body = NULL;
12570 if ((ort == ORT_SIMD
12571 || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
12572 && OMP_FOR_PRE_BODY (for_stmt))
12573 {
12574 has_decl_expr = BITMAP_ALLOC (NULL);
12575 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
12576 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
12577 == VAR_DECL)
12578 {
12579 t = OMP_FOR_PRE_BODY (for_stmt);
12580 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
12581 }
12582 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
12583 {
12584 tree_stmt_iterator si;
12585 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
12586 tsi_next (&si))
12587 {
12588 t = tsi_stmt (si);
12589 if (TREE_CODE (t) == DECL_EXPR
12590 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
12591 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
12592 }
12593 }
12594 }
12595 if (OMP_FOR_PRE_BODY (for_stmt))
12596 {
12597 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
12598 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
12599 else
12600 {
12601 struct gimplify_omp_ctx ctx;
12602 memset (&ctx, 0, sizeof (ctx));
12603 ctx.region_type = ORT_NONE;
12604 gimplify_omp_ctxp = &ctx;
12605 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
12606 gimplify_omp_ctxp = NULL;
12607 }
12608 }
12609 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
12610
12611 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
12612 for_stmt = inner_for_stmt;
12613
12614 /* For taskloop, need to gimplify the start, end and step before the
12615 taskloop, outside of the taskloop omp context. */
12616 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12617 {
12618 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12619 {
12620 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12621 gimple_seq *for_pre_p = (gimple_seq_empty_p (for_pre_body)
12622 ? pre_p : &for_pre_body);
12623 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
12624 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12625 {
12626 tree v = TREE_OPERAND (t, 1);
12627 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
12628 for_pre_p, orig_for_stmt);
12629 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
12630 for_pre_p, orig_for_stmt);
12631 }
12632 else
12633 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
12634 orig_for_stmt);
12635
12636 /* Handle OMP_FOR_COND. */
12637 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12638 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12639 {
12640 tree v = TREE_OPERAND (t, 1);
12641 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 1),
12642 for_pre_p, orig_for_stmt);
12643 gimplify_omp_taskloop_expr (type, &TREE_VEC_ELT (v, 2),
12644 for_pre_p, orig_for_stmt);
12645 }
12646 else
12647 gimplify_omp_taskloop_expr (type, &TREE_OPERAND (t, 1), for_pre_p,
12648 orig_for_stmt);
12649
12650 /* Handle OMP_FOR_INCR. */
12651 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12652 if (TREE_CODE (t) == MODIFY_EXPR)
12653 {
12654 decl = TREE_OPERAND (t, 0);
12655 t = TREE_OPERAND (t, 1);
12656 tree *tp = &TREE_OPERAND (t, 1);
12657 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
12658 tp = &TREE_OPERAND (t, 0);
12659
12660 gimplify_omp_taskloop_expr (NULL_TREE, tp, for_pre_p,
12661 orig_for_stmt);
12662 }
12663 }
12664
12665 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
12666 OMP_TASKLOOP);
12667 }
12668
12669 if (orig_for_stmt != for_stmt)
12670 gimplify_omp_ctxp->combined_loop = true;
12671
12672 for_body = NULL;
12673 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12674 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
12675 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12676 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
12677
12678 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
12679 bool is_doacross = false;
12680 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
12681 {
12682 is_doacross = true;
12683 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
12684 (OMP_FOR_INIT (for_stmt))
12685 * 2);
12686 }
12687 int collapse = 1, tile = 0;
12688 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
12689 if (c)
12690 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
12691 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
12692 if (c)
12693 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
12694 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ALLOCATE);
12695 hash_set<tree> *allocate_uids = NULL;
12696 if (c)
12697 {
12698 allocate_uids = new hash_set<tree>;
12699 for (; c; c = OMP_CLAUSE_CHAIN (c))
12700 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE)
12701 allocate_uids->add (OMP_CLAUSE_DECL (c));
12702 }
12703 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12704 {
12705 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12706 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12707 decl = TREE_OPERAND (t, 0);
12708 gcc_assert (DECL_P (decl));
12709 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
12710 || POINTER_TYPE_P (TREE_TYPE (decl)));
12711 if (is_doacross)
12712 {
12713 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
12714 {
12715 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12716 if (TREE_CODE (orig_decl) == TREE_LIST)
12717 {
12718 orig_decl = TREE_PURPOSE (orig_decl);
12719 if (!orig_decl)
12720 orig_decl = decl;
12721 }
12722 gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
12723 }
12724 else
12725 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
12726 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
12727 }
12728
12729 if (for_stmt == orig_for_stmt)
12730 {
12731 tree orig_decl = decl;
12732 if (OMP_FOR_ORIG_DECLS (for_stmt))
12733 {
12734 tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12735 if (TREE_CODE (orig_decl) == TREE_LIST)
12736 {
12737 orig_decl = TREE_PURPOSE (orig_decl);
12738 if (!orig_decl)
12739 orig_decl = decl;
12740 }
12741 }
12742 if (is_global_var (orig_decl) && DECL_THREAD_LOCAL_P (orig_decl))
12743 error_at (EXPR_LOCATION (for_stmt),
12744 "threadprivate iteration variable %qD", orig_decl);
12745 }
12746
12747 /* Make sure the iteration variable is private. */
12748 tree c = NULL_TREE;
12749 tree c2 = NULL_TREE;
12750 if (orig_for_stmt != for_stmt)
12751 {
12752 /* Preserve this information until we gimplify the inner simd. */
12753 if (has_decl_expr
12754 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
12755 TREE_PRIVATE (t) = 1;
12756 }
12757 else if (ort == ORT_SIMD)
12758 {
12759 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12760 (splay_tree_key) decl);
12761 omp_is_private (gimplify_omp_ctxp, decl,
12762 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
12763 != 1));
12764 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
12765 {
12766 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12767 if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
12768 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12769 OMP_CLAUSE_LASTPRIVATE);
12770 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12771 OMP_CLAUSE_LASTPRIVATE))
12772 if (OMP_CLAUSE_DECL (c3) == decl)
12773 {
12774 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12775 "conditional %<lastprivate%> on loop "
12776 "iterator %qD ignored", decl);
12777 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12778 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12779 }
12780 }
12781 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
12782 {
12783 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12784 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
12785 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
12786 if ((has_decl_expr
12787 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
12788 || TREE_PRIVATE (t))
12789 {
12790 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12791 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12792 }
12793 struct gimplify_omp_ctx *outer
12794 = gimplify_omp_ctxp->outer_context;
12795 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12796 {
12797 if (outer->region_type == ORT_WORKSHARE
12798 && outer->combined_loop)
12799 {
12800 n = splay_tree_lookup (outer->variables,
12801 (splay_tree_key)decl);
12802 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
12803 {
12804 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12805 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12806 }
12807 else
12808 {
12809 struct gimplify_omp_ctx *octx = outer->outer_context;
12810 if (octx
12811 && octx->region_type == ORT_COMBINED_PARALLEL
12812 && octx->outer_context
12813 && (octx->outer_context->region_type
12814 == ORT_WORKSHARE)
12815 && octx->outer_context->combined_loop)
12816 {
12817 octx = octx->outer_context;
12818 n = splay_tree_lookup (octx->variables,
12819 (splay_tree_key)decl);
12820 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
12821 {
12822 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
12823 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
12824 }
12825 }
12826 }
12827 }
12828 }
12829
12830 OMP_CLAUSE_DECL (c) = decl;
12831 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12832 OMP_FOR_CLAUSES (for_stmt) = c;
12833 omp_add_variable (gimplify_omp_ctxp, decl, flags);
12834 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
12835 omp_lastprivate_for_combined_outer_constructs (outer, decl,
12836 true);
12837 }
12838 else
12839 {
12840 bool lastprivate
12841 = (!has_decl_expr
12842 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
12843 if (TREE_PRIVATE (t))
12844 lastprivate = false;
12845 if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
12846 {
12847 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12848 if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
12849 lastprivate = false;
12850 }
12851
12852 struct gimplify_omp_ctx *outer
12853 = gimplify_omp_ctxp->outer_context;
12854 if (outer && lastprivate)
12855 omp_lastprivate_for_combined_outer_constructs (outer, decl,
12856 true);
12857
12858 c = build_omp_clause (input_location,
12859 lastprivate ? OMP_CLAUSE_LASTPRIVATE
12860 : OMP_CLAUSE_PRIVATE);
12861 OMP_CLAUSE_DECL (c) = decl;
12862 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
12863 OMP_FOR_CLAUSES (for_stmt) = c;
12864 omp_add_variable (gimplify_omp_ctxp, decl,
12865 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
12866 | GOVD_EXPLICIT | GOVD_SEEN);
12867 c = NULL_TREE;
12868 }
12869 }
12870 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
12871 {
12872 omp_notice_variable (gimplify_omp_ctxp, decl, true);
12873 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12874 (splay_tree_key) decl);
12875 if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
12876 for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
12877 OMP_CLAUSE_LASTPRIVATE);
12878 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
12879 OMP_CLAUSE_LASTPRIVATE))
12880 if (OMP_CLAUSE_DECL (c3) == decl)
12881 {
12882 warning_at (OMP_CLAUSE_LOCATION (c3), 0,
12883 "conditional %<lastprivate%> on loop "
12884 "iterator %qD ignored", decl);
12885 OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
12886 n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
12887 }
12888 }
12889 else
12890 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
12891
12892 /* If DECL is not a gimple register, create a temporary variable to act
12893 as an iteration counter. This is valid, since DECL cannot be
12894 modified in the body of the loop. Similarly for any iteration vars
12895 in simd with collapse > 1 where the iterator vars must be
12896 lastprivate. And similarly for vars mentioned in allocate clauses. */
12897 if (orig_for_stmt != for_stmt)
12898 var = decl;
12899 else if (!is_gimple_reg (decl)
12900 || (ort == ORT_SIMD
12901 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
12902 || (allocate_uids && allocate_uids->contains (decl)))
12903 {
12904 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12905 /* Make sure omp_add_variable is not called on it prematurely.
12906 We call it ourselves a few lines later. */
12907 gimplify_omp_ctxp = NULL;
12908 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
12909 gimplify_omp_ctxp = ctx;
12910 TREE_OPERAND (t, 0) = var;
12911
12912 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
12913
12914 if (ort == ORT_SIMD
12915 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
12916 {
12917 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
12918 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
12919 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
12920 OMP_CLAUSE_DECL (c2) = var;
12921 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
12922 OMP_FOR_CLAUSES (for_stmt) = c2;
12923 omp_add_variable (gimplify_omp_ctxp, var,
12924 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
12925 if (c == NULL_TREE)
12926 {
12927 c = c2;
12928 c2 = NULL_TREE;
12929 }
12930 }
12931 else
12932 omp_add_variable (gimplify_omp_ctxp, var,
12933 GOVD_PRIVATE | GOVD_SEEN);
12934 }
12935 else
12936 var = decl;
12937
12938 gimplify_omp_ctxp->in_for_exprs = true;
12939 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12940 {
12941 tree lb = TREE_OPERAND (t, 1);
12942 tret = gimplify_expr (&TREE_VEC_ELT (lb, 1), &for_pre_body, NULL,
12943 is_gimple_val, fb_rvalue, false);
12944 ret = MIN (ret, tret);
12945 tret = gimplify_expr (&TREE_VEC_ELT (lb, 2), &for_pre_body, NULL,
12946 is_gimple_val, fb_rvalue, false);
12947 }
12948 else
12949 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12950 is_gimple_val, fb_rvalue, false);
12951 gimplify_omp_ctxp->in_for_exprs = false;
12952 ret = MIN (ret, tret);
12953 if (ret == GS_ERROR)
12954 return ret;
12955
12956 /* Handle OMP_FOR_COND. */
12957 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12958 gcc_assert (COMPARISON_CLASS_P (t));
12959 gcc_assert (TREE_OPERAND (t, 0) == decl);
12960
12961 gimplify_omp_ctxp->in_for_exprs = true;
12962 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC)
12963 {
12964 tree ub = TREE_OPERAND (t, 1);
12965 tret = gimplify_expr (&TREE_VEC_ELT (ub, 1), &for_pre_body, NULL,
12966 is_gimple_val, fb_rvalue, false);
12967 ret = MIN (ret, tret);
12968 tret = gimplify_expr (&TREE_VEC_ELT (ub, 2), &for_pre_body, NULL,
12969 is_gimple_val, fb_rvalue, false);
12970 }
12971 else
12972 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
12973 is_gimple_val, fb_rvalue, false);
12974 gimplify_omp_ctxp->in_for_exprs = false;
12975 ret = MIN (ret, tret);
12976
12977 /* Handle OMP_FOR_INCR. */
12978 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12979 switch (TREE_CODE (t))
12980 {
12981 case PREINCREMENT_EXPR:
12982 case POSTINCREMENT_EXPR:
12983 {
12984 tree decl = TREE_OPERAND (t, 0);
12985 /* c_omp_for_incr_canonicalize_ptr() should have been
12986 called to massage things appropriately. */
12987 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
12988
12989 if (orig_for_stmt != for_stmt)
12990 break;
12991 t = build_int_cst (TREE_TYPE (decl), 1);
12992 if (c)
12993 OMP_CLAUSE_LINEAR_STEP (c) = t;
12994 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
12995 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
12996 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
12997 break;
12998 }
12999
13000 case PREDECREMENT_EXPR:
13001 case POSTDECREMENT_EXPR:
13002 /* c_omp_for_incr_canonicalize_ptr() should have been
13003 called to massage things appropriately. */
13004 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
13005 if (orig_for_stmt != for_stmt)
13006 break;
13007 t = build_int_cst (TREE_TYPE (decl), -1);
13008 if (c)
13009 OMP_CLAUSE_LINEAR_STEP (c) = t;
13010 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
13011 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
13012 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
13013 break;
13014
13015 case MODIFY_EXPR:
13016 gcc_assert (TREE_OPERAND (t, 0) == decl);
13017 TREE_OPERAND (t, 0) = var;
13018
13019 t = TREE_OPERAND (t, 1);
13020 switch (TREE_CODE (t))
13021 {
13022 case PLUS_EXPR:
13023 if (TREE_OPERAND (t, 1) == decl)
13024 {
13025 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
13026 TREE_OPERAND (t, 0) = var;
13027 break;
13028 }
13029
13030 /* Fallthru. */
13031 case MINUS_EXPR:
13032 case POINTER_PLUS_EXPR:
13033 gcc_assert (TREE_OPERAND (t, 0) == decl);
13034 TREE_OPERAND (t, 0) = var;
13035 break;
13036 default:
13037 gcc_unreachable ();
13038 }
13039
13040 gimplify_omp_ctxp->in_for_exprs = true;
13041 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
13042 is_gimple_val, fb_rvalue, false);
13043 ret = MIN (ret, tret);
13044 if (c)
13045 {
13046 tree step = TREE_OPERAND (t, 1);
13047 tree stept = TREE_TYPE (decl);
13048 if (POINTER_TYPE_P (stept))
13049 stept = sizetype;
13050 step = fold_convert (stept, step);
13051 if (TREE_CODE (t) == MINUS_EXPR)
13052 step = fold_build1 (NEGATE_EXPR, stept, step);
13053 OMP_CLAUSE_LINEAR_STEP (c) = step;
13054 if (step != TREE_OPERAND (t, 1))
13055 {
13056 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
13057 &for_pre_body, NULL,
13058 is_gimple_val, fb_rvalue, false);
13059 ret = MIN (ret, tret);
13060 }
13061 }
13062 gimplify_omp_ctxp->in_for_exprs = false;
13063 break;
13064
13065 default:
13066 gcc_unreachable ();
13067 }
13068
13069 if (c2)
13070 {
13071 gcc_assert (c);
13072 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
13073 }
13074
13075 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
13076 {
13077 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
13078 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13079 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
13080 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
13081 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
13082 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
13083 && OMP_CLAUSE_DECL (c) == decl)
13084 {
13085 if (is_doacross && (collapse == 1 || i >= collapse))
13086 t = var;
13087 else
13088 {
13089 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13090 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13091 gcc_assert (TREE_OPERAND (t, 0) == var);
13092 t = TREE_OPERAND (t, 1);
13093 gcc_assert (TREE_CODE (t) == PLUS_EXPR
13094 || TREE_CODE (t) == MINUS_EXPR
13095 || TREE_CODE (t) == POINTER_PLUS_EXPR);
13096 gcc_assert (TREE_OPERAND (t, 0) == var);
13097 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
13098 is_doacross ? var : decl,
13099 TREE_OPERAND (t, 1));
13100 }
13101 gimple_seq *seq;
13102 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
13103 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
13104 else
13105 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
13106 push_gimplify_context ();
13107 gimplify_assign (decl, t, seq);
13108 gimple *bind = NULL;
13109 if (gimplify_ctxp->temps)
13110 {
13111 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
13112 *seq = NULL;
13113 gimplify_seq_add_stmt (seq, bind);
13114 }
13115 pop_gimplify_context (bind);
13116 }
13117 }
13118 if (OMP_FOR_NON_RECTANGULAR (for_stmt) && var != decl)
13119 for (int j = i + 1; j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
13120 {
13121 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
13122 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13123 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13124 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13125 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13126 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
13127 gcc_assert (COMPARISON_CLASS_P (t));
13128 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13129 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13130 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13131 }
13132 }
13133
13134 BITMAP_FREE (has_decl_expr);
13135 delete allocate_uids;
13136
13137 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
13138 || (loop_p && orig_for_stmt == for_stmt))
13139 {
13140 push_gimplify_context ();
13141 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
13142 {
13143 OMP_FOR_BODY (orig_for_stmt)
13144 = build3 (BIND_EXPR, void_type_node, NULL,
13145 OMP_FOR_BODY (orig_for_stmt), NULL);
13146 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
13147 }
13148 }
13149
13150 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
13151 &for_body);
13152
13153 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
13154 || (loop_p && orig_for_stmt == for_stmt))
13155 {
13156 if (gimple_code (g) == GIMPLE_BIND)
13157 pop_gimplify_context (g);
13158 else
13159 pop_gimplify_context (NULL);
13160 }
13161
13162 if (orig_for_stmt != for_stmt)
13163 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13164 {
13165 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13166 decl = TREE_OPERAND (t, 0);
13167 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13168 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13169 gimplify_omp_ctxp = ctx->outer_context;
13170 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
13171 gimplify_omp_ctxp = ctx;
13172 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
13173 TREE_OPERAND (t, 0) = var;
13174 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13175 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
13176 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
13177 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
13178 for (int j = i + 1;
13179 j < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); j++)
13180 {
13181 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), j);
13182 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13183 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13184 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13185 {
13186 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
13187 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13188 }
13189 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), j);
13190 gcc_assert (COMPARISON_CLASS_P (t));
13191 if (TREE_CODE (TREE_OPERAND (t, 1)) == TREE_VEC
13192 && TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) == decl)
13193 {
13194 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
13195 TREE_VEC_ELT (TREE_OPERAND (t, 1), 0) = var;
13196 }
13197 }
13198 }
13199
13200 gimplify_adjust_omp_clauses (pre_p, for_body,
13201 &OMP_FOR_CLAUSES (orig_for_stmt),
13202 TREE_CODE (orig_for_stmt));
13203
13204 int kind;
13205 switch (TREE_CODE (orig_for_stmt))
13206 {
13207 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
13208 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
13209 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
13210 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
13211 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
13212 default:
13213 gcc_unreachable ();
13214 }
13215 if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
13216 {
13217 gimplify_seq_add_seq (pre_p, for_pre_body);
13218 for_pre_body = NULL;
13219 }
13220 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
13221 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
13222 for_pre_body);
13223 if (orig_for_stmt != for_stmt)
13224 gimple_omp_for_set_combined_p (gfor, true);
13225 if (gimplify_omp_ctxp
13226 && (gimplify_omp_ctxp->combined_loop
13227 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
13228 && gimplify_omp_ctxp->outer_context
13229 && gimplify_omp_ctxp->outer_context->combined_loop)))
13230 {
13231 gimple_omp_for_set_combined_into_p (gfor, true);
13232 if (gimplify_omp_ctxp->combined_loop)
13233 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
13234 else
13235 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
13236 }
13237
13238 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13239 {
13240 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13241 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
13242 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
13243 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
13244 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
13245 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
13246 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
13247 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
13248 }
13249
13250 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
13251 constructs with GIMPLE_OMP_TASK sandwiched in between them.
13252 The outer taskloop stands for computing the number of iterations,
13253 counts for collapsed loops and holding taskloop specific clauses.
13254 The task construct stands for the effect of data sharing on the
13255 explicit task it creates and the inner taskloop stands for expansion
13256 of the static loop inside of the explicit task construct. */
13257 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
13258 {
13259 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
13260 tree task_clauses = NULL_TREE;
13261 tree c = *gfor_clauses_ptr;
13262 tree *gtask_clauses_ptr = &task_clauses;
13263 tree outer_for_clauses = NULL_TREE;
13264 tree *gforo_clauses_ptr = &outer_for_clauses;
13265 bitmap lastprivate_uids = NULL;
13266 if (omp_find_clause (c, OMP_CLAUSE_ALLOCATE))
13267 {
13268 c = omp_find_clause (c, OMP_CLAUSE_LASTPRIVATE);
13269 if (c)
13270 {
13271 lastprivate_uids = BITMAP_ALLOC (NULL);
13272 for (; c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13273 OMP_CLAUSE_LASTPRIVATE))
13274 bitmap_set_bit (lastprivate_uids,
13275 DECL_UID (OMP_CLAUSE_DECL (c)));
13276 }
13277 c = *gfor_clauses_ptr;
13278 }
13279 for (; c; c = OMP_CLAUSE_CHAIN (c))
13280 switch (OMP_CLAUSE_CODE (c))
13281 {
13282 /* These clauses are allowed on task, move them there. */
13283 case OMP_CLAUSE_SHARED:
13284 case OMP_CLAUSE_FIRSTPRIVATE:
13285 case OMP_CLAUSE_DEFAULT:
13286 case OMP_CLAUSE_IF:
13287 case OMP_CLAUSE_UNTIED:
13288 case OMP_CLAUSE_FINAL:
13289 case OMP_CLAUSE_MERGEABLE:
13290 case OMP_CLAUSE_PRIORITY:
13291 case OMP_CLAUSE_REDUCTION:
13292 case OMP_CLAUSE_IN_REDUCTION:
13293 *gtask_clauses_ptr = c;
13294 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13295 break;
13296 case OMP_CLAUSE_PRIVATE:
13297 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
13298 {
13299 /* We want private on outer for and firstprivate
13300 on task. */
13301 *gtask_clauses_ptr
13302 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13303 OMP_CLAUSE_FIRSTPRIVATE);
13304 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
13305 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
13306 openacc);
13307 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13308 *gforo_clauses_ptr = c;
13309 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13310 }
13311 else
13312 {
13313 *gtask_clauses_ptr = c;
13314 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13315 }
13316 break;
13317 /* These clauses go into outer taskloop clauses. */
13318 case OMP_CLAUSE_GRAINSIZE:
13319 case OMP_CLAUSE_NUM_TASKS:
13320 case OMP_CLAUSE_NOGROUP:
13321 *gforo_clauses_ptr = c;
13322 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13323 break;
13324 /* Collapse clause we duplicate on both taskloops. */
13325 case OMP_CLAUSE_COLLAPSE:
13326 *gfor_clauses_ptr = c;
13327 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13328 *gforo_clauses_ptr = copy_node (c);
13329 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
13330 break;
13331 /* For lastprivate, keep the clause on inner taskloop, and add
13332 a shared clause on task. If the same decl is also firstprivate,
13333 add also firstprivate clause on the inner taskloop. */
13334 case OMP_CLAUSE_LASTPRIVATE:
13335 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
13336 {
13337 /* For taskloop C++ lastprivate IVs, we want:
13338 1) private on outer taskloop
13339 2) firstprivate and shared on task
13340 3) lastprivate on inner taskloop */
13341 *gtask_clauses_ptr
13342 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13343 OMP_CLAUSE_FIRSTPRIVATE);
13344 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
13345 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL,
13346 openacc);
13347 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13348 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
13349 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13350 OMP_CLAUSE_PRIVATE);
13351 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
13352 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
13353 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
13354 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
13355 }
13356 *gfor_clauses_ptr = c;
13357 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13358 *gtask_clauses_ptr
13359 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
13360 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
13361 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
13362 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
13363 gtask_clauses_ptr
13364 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13365 break;
13366 /* Allocate clause we duplicate on task and inner taskloop
13367 if the decl is lastprivate, otherwise just put on task. */
13368 case OMP_CLAUSE_ALLOCATE:
13369 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
13370 && DECL_P (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)))
13371 {
13372 /* Additionally, put firstprivate clause on task
13373 for the allocator if it is not constant. */
13374 *gtask_clauses_ptr
13375 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13376 OMP_CLAUSE_FIRSTPRIVATE);
13377 OMP_CLAUSE_DECL (*gtask_clauses_ptr)
13378 = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
13379 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13380 }
13381 if (lastprivate_uids
13382 && bitmap_bit_p (lastprivate_uids,
13383 DECL_UID (OMP_CLAUSE_DECL (c))))
13384 {
13385 *gfor_clauses_ptr = c;
13386 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13387 *gtask_clauses_ptr = copy_node (c);
13388 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
13389 }
13390 else
13391 {
13392 *gtask_clauses_ptr = c;
13393 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
13394 }
13395 break;
13396 default:
13397 gcc_unreachable ();
13398 }
13399 *gfor_clauses_ptr = NULL_TREE;
13400 *gtask_clauses_ptr = NULL_TREE;
13401 *gforo_clauses_ptr = NULL_TREE;
13402 BITMAP_FREE (lastprivate_uids);
13403 gimple_set_location (gfor, input_location);
13404 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
13405 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
13406 NULL_TREE, NULL_TREE, NULL_TREE);
13407 gimple_set_location (g, input_location);
13408 gimple_omp_task_set_taskloop_p (g, true);
13409 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
13410 gomp_for *gforo
13411 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
13412 gimple_omp_for_collapse (gfor),
13413 gimple_omp_for_pre_body (gfor));
13414 gimple_omp_for_set_pre_body (gfor, NULL);
13415 gimple_omp_for_set_combined_p (gforo, true);
13416 gimple_omp_for_set_combined_into_p (gfor, true);
13417 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
13418 {
13419 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
13420 tree v = create_tmp_var (type);
13421 gimple_omp_for_set_index (gforo, i, v);
13422 t = unshare_expr (gimple_omp_for_initial (gfor, i));
13423 gimple_omp_for_set_initial (gforo, i, t);
13424 gimple_omp_for_set_cond (gforo, i,
13425 gimple_omp_for_cond (gfor, i));
13426 t = unshare_expr (gimple_omp_for_final (gfor, i));
13427 gimple_omp_for_set_final (gforo, i, t);
13428 t = unshare_expr (gimple_omp_for_incr (gfor, i));
13429 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
13430 TREE_OPERAND (t, 0) = v;
13431 gimple_omp_for_set_incr (gforo, i, t);
13432 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
13433 OMP_CLAUSE_DECL (t) = v;
13434 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
13435 gimple_omp_for_set_clauses (gforo, t);
13436 if (OMP_FOR_NON_RECTANGULAR (for_stmt))
13437 {
13438 tree *p1 = NULL, *p2 = NULL;
13439 t = gimple_omp_for_initial (gforo, i);
13440 if (TREE_CODE (t) == TREE_VEC)
13441 p1 = &TREE_VEC_ELT (t, 0);
13442 t = gimple_omp_for_final (gforo, i);
13443 if (TREE_CODE (t) == TREE_VEC)
13444 {
13445 if (p1)
13446 p2 = &TREE_VEC_ELT (t, 0);
13447 else
13448 p1 = &TREE_VEC_ELT (t, 0);
13449 }
13450 if (p1)
13451 {
13452 int j;
13453 for (j = 0; j < i; j++)
13454 if (*p1 == gimple_omp_for_index (gfor, j))
13455 {
13456 *p1 = gimple_omp_for_index (gforo, j);
13457 if (p2)
13458 *p2 = *p1;
13459 break;
13460 }
13461 gcc_assert (j < i);
13462 }
13463 }
13464 }
13465 gimplify_seq_add_stmt (pre_p, gforo);
13466 }
13467 else
13468 gimplify_seq_add_stmt (pre_p, gfor);
13469
13470 if (TREE_CODE (orig_for_stmt) == OMP_FOR)
13471 {
13472 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
13473 unsigned lastprivate_conditional = 0;
13474 while (ctx
13475 && (ctx->region_type == ORT_TARGET_DATA
13476 || ctx->region_type == ORT_TASKGROUP))
13477 ctx = ctx->outer_context;
13478 if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
13479 for (tree c = gimple_omp_for_clauses (gfor);
13480 c; c = OMP_CLAUSE_CHAIN (c))
13481 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13482 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13483 ++lastprivate_conditional;
13484 if (lastprivate_conditional)
13485 {
13486 struct omp_for_data fd;
13487 omp_extract_for_data (gfor, &fd, NULL);
13488 tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
13489 lastprivate_conditional);
13490 tree var = create_tmp_var_raw (type);
13491 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
13492 OMP_CLAUSE_DECL (c) = var;
13493 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
13494 gimple_omp_for_set_clauses (gfor, c);
13495 omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
13496 }
13497 }
13498 else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
13499 {
13500 unsigned lastprivate_conditional = 0;
13501 for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
13502 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
13503 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
13504 ++lastprivate_conditional;
13505 if (lastprivate_conditional)
13506 {
13507 struct omp_for_data fd;
13508 omp_extract_for_data (gfor, &fd, NULL);
13509 tree type = unsigned_type_for (fd.iter_type);
13510 while (lastprivate_conditional--)
13511 {
13512 tree c = build_omp_clause (UNKNOWN_LOCATION,
13513 OMP_CLAUSE__CONDTEMP_);
13514 OMP_CLAUSE_DECL (c) = create_tmp_var (type);
13515 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
13516 gimple_omp_for_set_clauses (gfor, c);
13517 }
13518 }
13519 }
13520
13521 if (ret != GS_ALL_DONE)
13522 return GS_ERROR;
13523 *expr_p = NULL_TREE;
13524 return GS_ALL_DONE;
13525 }
13526
13527 /* Helper for gimplify_omp_loop, called through walk_tree. */
13528
13529 static tree
note_no_context_vars(tree * tp,int *,void * data)13530 note_no_context_vars (tree *tp, int *, void *data)
13531 {
13532 if (VAR_P (*tp)
13533 && DECL_CONTEXT (*tp) == NULL_TREE
13534 && !is_global_var (*tp))
13535 {
13536 vec<tree> *d = (vec<tree> *) data;
13537 d->safe_push (*tp);
13538 DECL_CONTEXT (*tp) = current_function_decl;
13539 }
13540 return NULL_TREE;
13541 }
13542
13543 /* Gimplify the gross structure of an OMP_LOOP statement. */
13544
13545 static enum gimplify_status
gimplify_omp_loop(tree * expr_p,gimple_seq * pre_p)13546 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
13547 {
13548 tree for_stmt = *expr_p;
13549 tree clauses = OMP_FOR_CLAUSES (for_stmt);
13550 struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
13551 enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
13552 int i;
13553
13554 /* If order is not present, the behavior is as if order(concurrent)
13555 appeared. */
13556 tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
13557 if (order == NULL_TREE)
13558 {
13559 order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
13560 OMP_CLAUSE_CHAIN (order) = clauses;
13561 OMP_FOR_CLAUSES (for_stmt) = clauses = order;
13562 }
13563
13564 tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
13565 if (bind == NULL_TREE)
13566 {
13567 if (!flag_openmp) /* flag_openmp_simd */
13568 ;
13569 else if (octx && (octx->region_type & ORT_TEAMS) != 0)
13570 kind = OMP_CLAUSE_BIND_TEAMS;
13571 else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
13572 kind = OMP_CLAUSE_BIND_PARALLEL;
13573 else
13574 {
13575 for (; octx; octx = octx->outer_context)
13576 {
13577 if ((octx->region_type & ORT_ACC) != 0
13578 || octx->region_type == ORT_NONE
13579 || octx->region_type == ORT_IMPLICIT_TARGET)
13580 continue;
13581 break;
13582 }
13583 if (octx == NULL && !in_omp_construct)
13584 error_at (EXPR_LOCATION (for_stmt),
13585 "%<bind%> clause not specified on a %<loop%> "
13586 "construct not nested inside another OpenMP construct");
13587 }
13588 bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
13589 OMP_CLAUSE_CHAIN (bind) = clauses;
13590 OMP_CLAUSE_BIND_KIND (bind) = kind;
13591 OMP_FOR_CLAUSES (for_stmt) = bind;
13592 }
13593 else
13594 switch (OMP_CLAUSE_BIND_KIND (bind))
13595 {
13596 case OMP_CLAUSE_BIND_THREAD:
13597 break;
13598 case OMP_CLAUSE_BIND_PARALLEL:
13599 if (!flag_openmp) /* flag_openmp_simd */
13600 {
13601 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13602 break;
13603 }
13604 for (; octx; octx = octx->outer_context)
13605 if (octx->region_type == ORT_SIMD
13606 && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
13607 {
13608 error_at (EXPR_LOCATION (for_stmt),
13609 "%<bind(parallel)%> on a %<loop%> construct nested "
13610 "inside %<simd%> construct");
13611 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13612 break;
13613 }
13614 kind = OMP_CLAUSE_BIND_PARALLEL;
13615 break;
13616 case OMP_CLAUSE_BIND_TEAMS:
13617 if (!flag_openmp) /* flag_openmp_simd */
13618 {
13619 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13620 break;
13621 }
13622 if ((octx
13623 && octx->region_type != ORT_IMPLICIT_TARGET
13624 && octx->region_type != ORT_NONE
13625 && (octx->region_type & ORT_TEAMS) == 0)
13626 || in_omp_construct)
13627 {
13628 error_at (EXPR_LOCATION (for_stmt),
13629 "%<bind(teams)%> on a %<loop%> region not strictly "
13630 "nested inside of a %<teams%> region");
13631 OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
13632 break;
13633 }
13634 kind = OMP_CLAUSE_BIND_TEAMS;
13635 break;
13636 default:
13637 gcc_unreachable ();
13638 }
13639
13640 for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
13641 switch (OMP_CLAUSE_CODE (*pc))
13642 {
13643 case OMP_CLAUSE_REDUCTION:
13644 if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
13645 {
13646 error_at (OMP_CLAUSE_LOCATION (*pc),
13647 "%<inscan%> %<reduction%> clause on "
13648 "%qs construct", "loop");
13649 OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
13650 }
13651 if (OMP_CLAUSE_REDUCTION_TASK (*pc))
13652 {
13653 error_at (OMP_CLAUSE_LOCATION (*pc),
13654 "invalid %<task%> reduction modifier on construct "
13655 "other than %<parallel%>, %qs or %<sections%>",
13656 lang_GNU_Fortran () ? "do" : "for");
13657 OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
13658 }
13659 pc = &OMP_CLAUSE_CHAIN (*pc);
13660 break;
13661 case OMP_CLAUSE_LASTPRIVATE:
13662 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13663 {
13664 tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
13665 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
13666 if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
13667 break;
13668 if (OMP_FOR_ORIG_DECLS (for_stmt)
13669 && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
13670 i)) == TREE_LIST
13671 && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
13672 i)))
13673 {
13674 tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13675 if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
13676 break;
13677 }
13678 }
13679 if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
13680 {
13681 error_at (OMP_CLAUSE_LOCATION (*pc),
13682 "%<lastprivate%> clause on a %<loop%> construct refers "
13683 "to a variable %qD which is not the loop iterator",
13684 OMP_CLAUSE_DECL (*pc));
13685 *pc = OMP_CLAUSE_CHAIN (*pc);
13686 break;
13687 }
13688 pc = &OMP_CLAUSE_CHAIN (*pc);
13689 break;
13690 default:
13691 pc = &OMP_CLAUSE_CHAIN (*pc);
13692 break;
13693 }
13694
13695 TREE_SET_CODE (for_stmt, OMP_SIMD);
13696
13697 int last;
13698 switch (kind)
13699 {
13700 case OMP_CLAUSE_BIND_THREAD: last = 0; break;
13701 case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
13702 case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
13703 }
13704 for (int pass = 1; pass <= last; pass++)
13705 {
13706 if (pass == 2)
13707 {
13708 tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
13709 make_node (BLOCK));
13710 append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
13711 *expr_p = make_node (OMP_PARALLEL);
13712 TREE_TYPE (*expr_p) = void_type_node;
13713 OMP_PARALLEL_BODY (*expr_p) = bind;
13714 OMP_PARALLEL_COMBINED (*expr_p) = 1;
13715 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
13716 tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
13717 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
13718 if (OMP_FOR_ORIG_DECLS (for_stmt)
13719 && (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
13720 == TREE_LIST))
13721 {
13722 tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
13723 if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
13724 {
13725 *pc = build_omp_clause (UNKNOWN_LOCATION,
13726 OMP_CLAUSE_FIRSTPRIVATE);
13727 OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
13728 pc = &OMP_CLAUSE_CHAIN (*pc);
13729 }
13730 }
13731 }
13732 tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
13733 tree *pc = &OMP_FOR_CLAUSES (t);
13734 TREE_TYPE (t) = void_type_node;
13735 OMP_FOR_BODY (t) = *expr_p;
13736 SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
13737 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
13738 switch (OMP_CLAUSE_CODE (c))
13739 {
13740 case OMP_CLAUSE_BIND:
13741 case OMP_CLAUSE_ORDER:
13742 case OMP_CLAUSE_COLLAPSE:
13743 *pc = copy_node (c);
13744 pc = &OMP_CLAUSE_CHAIN (*pc);
13745 break;
13746 case OMP_CLAUSE_PRIVATE:
13747 case OMP_CLAUSE_FIRSTPRIVATE:
13748 /* Only needed on innermost. */
13749 break;
13750 case OMP_CLAUSE_LASTPRIVATE:
13751 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
13752 {
13753 *pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
13754 OMP_CLAUSE_FIRSTPRIVATE);
13755 OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
13756 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
13757 pc = &OMP_CLAUSE_CHAIN (*pc);
13758 }
13759 *pc = copy_node (c);
13760 OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
13761 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
13762 if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
13763 {
13764 if (pass != last)
13765 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
13766 else
13767 lang_hooks.decls.omp_finish_clause (*pc, NULL, false);
13768 OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
13769 }
13770 pc = &OMP_CLAUSE_CHAIN (*pc);
13771 break;
13772 case OMP_CLAUSE_REDUCTION:
13773 *pc = copy_node (c);
13774 OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
13775 TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
13776 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
13777 {
13778 auto_vec<tree> no_context_vars;
13779 int walk_subtrees = 0;
13780 note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
13781 &walk_subtrees, &no_context_vars);
13782 if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
13783 note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
13784 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
13785 note_no_context_vars,
13786 &no_context_vars);
13787 walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
13788 note_no_context_vars,
13789 &no_context_vars);
13790
13791 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
13792 = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
13793 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
13794 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
13795 = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
13796
13797 hash_map<tree, tree> decl_map;
13798 decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
13799 decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
13800 OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
13801 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
13802 decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
13803 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
13804
13805 copy_body_data id;
13806 memset (&id, 0, sizeof (id));
13807 id.src_fn = current_function_decl;
13808 id.dst_fn = current_function_decl;
13809 id.src_cfun = cfun;
13810 id.decl_map = &decl_map;
13811 id.copy_decl = copy_decl_no_change;
13812 id.transform_call_graph_edges = CB_CGE_DUPLICATE;
13813 id.transform_new_cfg = true;
13814 id.transform_return_to_modify = false;
13815 id.eh_lp_nr = 0;
13816 walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
13817 &id, NULL);
13818 walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
13819 &id, NULL);
13820
13821 for (tree d : no_context_vars)
13822 {
13823 DECL_CONTEXT (d) = NULL_TREE;
13824 DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
13825 }
13826 }
13827 else
13828 {
13829 OMP_CLAUSE_REDUCTION_INIT (*pc)
13830 = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
13831 OMP_CLAUSE_REDUCTION_MERGE (*pc)
13832 = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
13833 }
13834 pc = &OMP_CLAUSE_CHAIN (*pc);
13835 break;
13836 default:
13837 gcc_unreachable ();
13838 }
13839 *pc = NULL_TREE;
13840 *expr_p = t;
13841 }
13842 return gimplify_expr (expr_p, pre_p, NULL, is_gimple_stmt, fb_none);
13843 }
13844
13845
13846 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
13847 of OMP_TARGET's body. */
13848
13849 static tree
find_omp_teams(tree * tp,int * walk_subtrees,void *)13850 find_omp_teams (tree *tp, int *walk_subtrees, void *)
13851 {
13852 *walk_subtrees = 0;
13853 switch (TREE_CODE (*tp))
13854 {
13855 case OMP_TEAMS:
13856 return *tp;
13857 case BIND_EXPR:
13858 case STATEMENT_LIST:
13859 *walk_subtrees = 1;
13860 break;
13861 default:
13862 break;
13863 }
13864 return NULL_TREE;
13865 }
13866
13867 /* Helper function of optimize_target_teams, determine if the expression
13868 can be computed safely before the target construct on the host. */
13869
13870 static tree
computable_teams_clause(tree * tp,int * walk_subtrees,void *)13871 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
13872 {
13873 splay_tree_node n;
13874
13875 if (TYPE_P (*tp))
13876 {
13877 *walk_subtrees = 0;
13878 return NULL_TREE;
13879 }
13880 switch (TREE_CODE (*tp))
13881 {
13882 case VAR_DECL:
13883 case PARM_DECL:
13884 case RESULT_DECL:
13885 *walk_subtrees = 0;
13886 if (error_operand_p (*tp)
13887 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
13888 || DECL_HAS_VALUE_EXPR_P (*tp)
13889 || DECL_THREAD_LOCAL_P (*tp)
13890 || TREE_SIDE_EFFECTS (*tp)
13891 || TREE_THIS_VOLATILE (*tp))
13892 return *tp;
13893 if (is_global_var (*tp)
13894 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
13895 || lookup_attribute ("omp declare target link",
13896 DECL_ATTRIBUTES (*tp))))
13897 return *tp;
13898 if (VAR_P (*tp)
13899 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
13900 && !is_global_var (*tp)
13901 && decl_function_context (*tp) == current_function_decl)
13902 return *tp;
13903 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
13904 (splay_tree_key) *tp);
13905 if (n == NULL)
13906 {
13907 if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
13908 return NULL_TREE;
13909 return *tp;
13910 }
13911 else if (n->value & GOVD_LOCAL)
13912 return *tp;
13913 else if (n->value & GOVD_FIRSTPRIVATE)
13914 return NULL_TREE;
13915 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13916 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
13917 return NULL_TREE;
13918 return *tp;
13919 case INTEGER_CST:
13920 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13921 return *tp;
13922 return NULL_TREE;
13923 case TARGET_EXPR:
13924 if (TARGET_EXPR_INITIAL (*tp)
13925 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
13926 return *tp;
13927 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
13928 walk_subtrees, NULL);
13929 /* Allow some reasonable subset of integral arithmetics. */
13930 case PLUS_EXPR:
13931 case MINUS_EXPR:
13932 case MULT_EXPR:
13933 case TRUNC_DIV_EXPR:
13934 case CEIL_DIV_EXPR:
13935 case FLOOR_DIV_EXPR:
13936 case ROUND_DIV_EXPR:
13937 case TRUNC_MOD_EXPR:
13938 case CEIL_MOD_EXPR:
13939 case FLOOR_MOD_EXPR:
13940 case ROUND_MOD_EXPR:
13941 case RDIV_EXPR:
13942 case EXACT_DIV_EXPR:
13943 case MIN_EXPR:
13944 case MAX_EXPR:
13945 case LSHIFT_EXPR:
13946 case RSHIFT_EXPR:
13947 case BIT_IOR_EXPR:
13948 case BIT_XOR_EXPR:
13949 case BIT_AND_EXPR:
13950 case NEGATE_EXPR:
13951 case ABS_EXPR:
13952 case BIT_NOT_EXPR:
13953 case NON_LVALUE_EXPR:
13954 CASE_CONVERT:
13955 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
13956 return *tp;
13957 return NULL_TREE;
13958 /* And disallow anything else, except for comparisons. */
13959 default:
13960 if (COMPARISON_CLASS_P (*tp))
13961 return NULL_TREE;
13962 return *tp;
13963 }
13964 }
13965
13966 /* Try to determine if the num_teams and/or thread_limit expressions
13967 can have their values determined already before entering the
13968 target construct.
13969 INTEGER_CSTs trivially are,
13970 integral decls that are firstprivate (explicitly or implicitly)
13971 or explicitly map(always, to:) or map(always, tofrom:) on the target
13972 region too, and expressions involving simple arithmetics on those
13973 too, function calls are not ok, dereferencing something neither etc.
13974 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
13975 EXPR based on what we find:
13976 0 stands for clause not specified at all, use implementation default
13977 -1 stands for value that can't be determined easily before entering
13978 the target construct.
13979 If teams construct is not present at all, use 1 for num_teams
13980 and 0 for thread_limit (only one team is involved, and the thread
13981 limit is implementation defined. */
13982
13983 static void
optimize_target_teams(tree target,gimple_seq * pre_p)13984 optimize_target_teams (tree target, gimple_seq *pre_p)
13985 {
13986 tree body = OMP_BODY (target);
13987 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
13988 tree num_teams_lower = NULL_TREE;
13989 tree num_teams_upper = integer_zero_node;
13990 tree thread_limit = integer_zero_node;
13991 location_t num_teams_loc = EXPR_LOCATION (target);
13992 location_t thread_limit_loc = EXPR_LOCATION (target);
13993 tree c, *p, expr;
13994 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
13995
13996 if (teams == NULL_TREE)
13997 num_teams_upper = integer_one_node;
13998 else
13999 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
14000 {
14001 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
14002 {
14003 p = &num_teams_upper;
14004 num_teams_loc = OMP_CLAUSE_LOCATION (c);
14005 if (OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c))
14006 {
14007 expr = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c);
14008 if (TREE_CODE (expr) == INTEGER_CST)
14009 num_teams_lower = expr;
14010 else if (walk_tree (&expr, computable_teams_clause,
14011 NULL, NULL))
14012 num_teams_lower = integer_minus_one_node;
14013 else
14014 {
14015 num_teams_lower = expr;
14016 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
14017 if (gimplify_expr (&num_teams_lower, pre_p, NULL,
14018 is_gimple_val, fb_rvalue, false)
14019 == GS_ERROR)
14020 {
14021 gimplify_omp_ctxp = target_ctx;
14022 num_teams_lower = integer_minus_one_node;
14023 }
14024 else
14025 {
14026 gimplify_omp_ctxp = target_ctx;
14027 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
14028 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c)
14029 = num_teams_lower;
14030 }
14031 }
14032 }
14033 }
14034 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
14035 {
14036 p = &thread_limit;
14037 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
14038 }
14039 else
14040 continue;
14041 expr = OMP_CLAUSE_OPERAND (c, 0);
14042 if (TREE_CODE (expr) == INTEGER_CST)
14043 {
14044 *p = expr;
14045 continue;
14046 }
14047 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
14048 {
14049 *p = integer_minus_one_node;
14050 continue;
14051 }
14052 *p = expr;
14053 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
14054 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
14055 == GS_ERROR)
14056 {
14057 gimplify_omp_ctxp = target_ctx;
14058 *p = integer_minus_one_node;
14059 continue;
14060 }
14061 gimplify_omp_ctxp = target_ctx;
14062 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
14063 OMP_CLAUSE_OPERAND (c, 0) = *p;
14064 }
14065 if (!omp_find_clause (OMP_TARGET_CLAUSES (target), OMP_CLAUSE_THREAD_LIMIT))
14066 {
14067 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
14068 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
14069 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
14070 OMP_TARGET_CLAUSES (target) = c;
14071 }
14072 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
14073 OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (c) = num_teams_upper;
14074 OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (c) = num_teams_lower;
14075 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
14076 OMP_TARGET_CLAUSES (target) = c;
14077 }
14078
14079 /* Gimplify the gross structure of several OMP constructs. */
14080
14081 static void
gimplify_omp_workshare(tree * expr_p,gimple_seq * pre_p)14082 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
14083 {
14084 tree expr = *expr_p;
14085 gimple *stmt;
14086 gimple_seq body = NULL;
14087 enum omp_region_type ort;
14088
14089 switch (TREE_CODE (expr))
14090 {
14091 case OMP_SECTIONS:
14092 case OMP_SINGLE:
14093 ort = ORT_WORKSHARE;
14094 break;
14095 case OMP_SCOPE:
14096 ort = ORT_TASKGROUP;
14097 break;
14098 case OMP_TARGET:
14099 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
14100 break;
14101 case OACC_KERNELS:
14102 ort = ORT_ACC_KERNELS;
14103 break;
14104 case OACC_PARALLEL:
14105 ort = ORT_ACC_PARALLEL;
14106 break;
14107 case OACC_SERIAL:
14108 ort = ORT_ACC_SERIAL;
14109 break;
14110 case OACC_DATA:
14111 ort = ORT_ACC_DATA;
14112 break;
14113 case OMP_TARGET_DATA:
14114 ort = ORT_TARGET_DATA;
14115 break;
14116 case OMP_TEAMS:
14117 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
14118 if (gimplify_omp_ctxp == NULL
14119 || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
14120 ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
14121 break;
14122 case OACC_HOST_DATA:
14123 ort = ORT_ACC_HOST_DATA;
14124 break;
14125 default:
14126 gcc_unreachable ();
14127 }
14128
14129 bool save_in_omp_construct = in_omp_construct;
14130 if ((ort & ORT_ACC) == 0)
14131 in_omp_construct = false;
14132 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
14133 TREE_CODE (expr));
14134 if (TREE_CODE (expr) == OMP_TARGET)
14135 optimize_target_teams (expr, pre_p);
14136 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
14137 || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
14138 {
14139 push_gimplify_context ();
14140 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
14141 if (gimple_code (g) == GIMPLE_BIND)
14142 pop_gimplify_context (g);
14143 else
14144 pop_gimplify_context (NULL);
14145 if ((ort & ORT_TARGET_DATA) != 0)
14146 {
14147 enum built_in_function end_ix;
14148 switch (TREE_CODE (expr))
14149 {
14150 case OACC_DATA:
14151 case OACC_HOST_DATA:
14152 end_ix = BUILT_IN_GOACC_DATA_END;
14153 break;
14154 case OMP_TARGET_DATA:
14155 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
14156 break;
14157 default:
14158 gcc_unreachable ();
14159 }
14160 tree fn = builtin_decl_explicit (end_ix);
14161 g = gimple_build_call (fn, 0);
14162 gimple_seq cleanup = NULL;
14163 gimple_seq_add_stmt (&cleanup, g);
14164 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14165 body = NULL;
14166 gimple_seq_add_stmt (&body, g);
14167 }
14168 }
14169 else
14170 gimplify_and_add (OMP_BODY (expr), &body);
14171 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
14172 TREE_CODE (expr));
14173 in_omp_construct = save_in_omp_construct;
14174
14175 switch (TREE_CODE (expr))
14176 {
14177 case OACC_DATA:
14178 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
14179 OMP_CLAUSES (expr));
14180 break;
14181 case OACC_HOST_DATA:
14182 if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
14183 {
14184 for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14185 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
14186 OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
14187 }
14188
14189 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
14190 OMP_CLAUSES (expr));
14191 break;
14192 case OACC_KERNELS:
14193 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
14194 OMP_CLAUSES (expr));
14195 break;
14196 case OACC_PARALLEL:
14197 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
14198 OMP_CLAUSES (expr));
14199 break;
14200 case OACC_SERIAL:
14201 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
14202 OMP_CLAUSES (expr));
14203 break;
14204 case OMP_SECTIONS:
14205 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
14206 break;
14207 case OMP_SINGLE:
14208 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
14209 break;
14210 case OMP_SCOPE:
14211 stmt = gimple_build_omp_scope (body, OMP_CLAUSES (expr));
14212 break;
14213 case OMP_TARGET:
14214 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
14215 OMP_CLAUSES (expr));
14216 break;
14217 case OMP_TARGET_DATA:
14218 /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
14219 to be evaluated before the use_device_{ptr,addr} clauses if they
14220 refer to the same variables. */
14221 {
14222 tree use_device_clauses;
14223 tree *pc, *uc = &use_device_clauses;
14224 for (pc = &OMP_CLAUSES (expr); *pc; )
14225 if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
14226 || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
14227 {
14228 *uc = *pc;
14229 *pc = OMP_CLAUSE_CHAIN (*pc);
14230 uc = &OMP_CLAUSE_CHAIN (*uc);
14231 }
14232 else
14233 pc = &OMP_CLAUSE_CHAIN (*pc);
14234 *uc = NULL_TREE;
14235 *pc = use_device_clauses;
14236 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
14237 OMP_CLAUSES (expr));
14238 }
14239 break;
14240 case OMP_TEAMS:
14241 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
14242 if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
14243 gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
14244 break;
14245 default:
14246 gcc_unreachable ();
14247 }
14248
14249 gimplify_seq_add_stmt (pre_p, stmt);
14250 *expr_p = NULL_TREE;
14251 }
14252
14253 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
14254 target update constructs. */
14255
14256 static void
gimplify_omp_target_update(tree * expr_p,gimple_seq * pre_p)14257 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
14258 {
14259 tree expr = *expr_p;
14260 int kind;
14261 gomp_target *stmt;
14262 enum omp_region_type ort = ORT_WORKSHARE;
14263
14264 switch (TREE_CODE (expr))
14265 {
14266 case OACC_ENTER_DATA:
14267 kind = GF_OMP_TARGET_KIND_OACC_ENTER_DATA;
14268 ort = ORT_ACC;
14269 break;
14270 case OACC_EXIT_DATA:
14271 kind = GF_OMP_TARGET_KIND_OACC_EXIT_DATA;
14272 ort = ORT_ACC;
14273 break;
14274 case OACC_UPDATE:
14275 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
14276 ort = ORT_ACC;
14277 break;
14278 case OMP_TARGET_UPDATE:
14279 kind = GF_OMP_TARGET_KIND_UPDATE;
14280 break;
14281 case OMP_TARGET_ENTER_DATA:
14282 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
14283 break;
14284 case OMP_TARGET_EXIT_DATA:
14285 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
14286 break;
14287 default:
14288 gcc_unreachable ();
14289 }
14290 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
14291 ort, TREE_CODE (expr));
14292 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
14293 TREE_CODE (expr));
14294 if (TREE_CODE (expr) == OACC_UPDATE
14295 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
14296 OMP_CLAUSE_IF_PRESENT))
14297 {
14298 /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
14299 clause. */
14300 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14301 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
14302 switch (OMP_CLAUSE_MAP_KIND (c))
14303 {
14304 case GOMP_MAP_FORCE_TO:
14305 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
14306 break;
14307 case GOMP_MAP_FORCE_FROM:
14308 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
14309 break;
14310 default:
14311 break;
14312 }
14313 }
14314 else if (TREE_CODE (expr) == OACC_EXIT_DATA
14315 && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
14316 OMP_CLAUSE_FINALIZE))
14317 {
14318 /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
14319 semantics. */
14320 bool have_clause = false;
14321 for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14322 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
14323 switch (OMP_CLAUSE_MAP_KIND (c))
14324 {
14325 case GOMP_MAP_FROM:
14326 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
14327 have_clause = true;
14328 break;
14329 case GOMP_MAP_RELEASE:
14330 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
14331 have_clause = true;
14332 break;
14333 case GOMP_MAP_TO_PSET:
14334 /* Fortran arrays with descriptors must map that descriptor when
14335 doing standalone "attach" operations (in OpenACC). In that
14336 case GOMP_MAP_TO_PSET appears by itself with no preceding
14337 clause (see trans-openmp.cc:gfc_trans_omp_clauses). */
14338 break;
14339 case GOMP_MAP_POINTER:
14340 /* TODO PR92929: we may see these here, but they'll always follow
14341 one of the clauses above, and will be handled by libgomp as
14342 one group, so no handling required here. */
14343 gcc_assert (have_clause);
14344 break;
14345 case GOMP_MAP_DETACH:
14346 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
14347 have_clause = false;
14348 break;
14349 case GOMP_MAP_STRUCT:
14350 have_clause = false;
14351 break;
14352 default:
14353 gcc_unreachable ();
14354 }
14355 }
14356 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
14357
14358 gimplify_seq_add_stmt (pre_p, stmt);
14359 *expr_p = NULL_TREE;
14360 }
14361
14362 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
14363 stabilized the lhs of the atomic operation as *ADDR. Return true if
14364 EXPR is this stabilized form. */
14365
14366 static bool
goa_lhs_expr_p(tree expr,tree addr)14367 goa_lhs_expr_p (tree expr, tree addr)
14368 {
14369 /* Also include casts to other type variants. The C front end is fond
14370 of adding these for e.g. volatile variables. This is like
14371 STRIP_TYPE_NOPS but includes the main variant lookup. */
14372 STRIP_USELESS_TYPE_CONVERSION (expr);
14373
14374 if (TREE_CODE (expr) == INDIRECT_REF)
14375 {
14376 expr = TREE_OPERAND (expr, 0);
14377 while (expr != addr
14378 && (CONVERT_EXPR_P (expr)
14379 || TREE_CODE (expr) == NON_LVALUE_EXPR)
14380 && TREE_CODE (expr) == TREE_CODE (addr)
14381 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
14382 {
14383 expr = TREE_OPERAND (expr, 0);
14384 addr = TREE_OPERAND (addr, 0);
14385 }
14386 if (expr == addr)
14387 return true;
14388 return (TREE_CODE (addr) == ADDR_EXPR
14389 && TREE_CODE (expr) == ADDR_EXPR
14390 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
14391 }
14392 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
14393 return true;
14394 return false;
14395 }
14396
14397 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
14398 expression does not involve the lhs, evaluate it into a temporary.
14399 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
14400 or -1 if an error was encountered. */
14401
14402 static int
goa_stabilize_expr(tree * expr_p,gimple_seq * pre_p,tree lhs_addr,tree lhs_var,tree & target_expr,bool rhs,int depth)14403 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
14404 tree lhs_var, tree &target_expr, bool rhs, int depth)
14405 {
14406 tree expr = *expr_p;
14407 int saw_lhs = 0;
14408
14409 if (goa_lhs_expr_p (expr, lhs_addr))
14410 {
14411 if (pre_p)
14412 *expr_p = lhs_var;
14413 return 1;
14414 }
14415 if (is_gimple_val (expr))
14416 return 0;
14417
14418 /* Maximum depth of lhs in expression is for the
14419 __builtin_clear_padding (...), __builtin_clear_padding (...),
14420 __builtin_memcmp (&TARGET_EXPR <lhs, >, ...) == 0 ? ... : lhs; */
14421 if (++depth > 7)
14422 goto finish;
14423
14424 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
14425 {
14426 case tcc_binary:
14427 case tcc_comparison:
14428 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
14429 lhs_var, target_expr, true, depth);
14430 /* FALLTHRU */
14431 case tcc_unary:
14432 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
14433 lhs_var, target_expr, true, depth);
14434 break;
14435 case tcc_expression:
14436 switch (TREE_CODE (expr))
14437 {
14438 case TRUTH_ANDIF_EXPR:
14439 case TRUTH_ORIF_EXPR:
14440 case TRUTH_AND_EXPR:
14441 case TRUTH_OR_EXPR:
14442 case TRUTH_XOR_EXPR:
14443 case BIT_INSERT_EXPR:
14444 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
14445 lhs_addr, lhs_var, target_expr, true,
14446 depth);
14447 /* FALLTHRU */
14448 case TRUTH_NOT_EXPR:
14449 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14450 lhs_addr, lhs_var, target_expr, true,
14451 depth);
14452 break;
14453 case MODIFY_EXPR:
14454 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
14455 target_expr, true, depth))
14456 break;
14457 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
14458 lhs_addr, lhs_var, target_expr, true,
14459 depth);
14460 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14461 lhs_addr, lhs_var, target_expr, false,
14462 depth);
14463 break;
14464 /* FALLTHRU */
14465 case ADDR_EXPR:
14466 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr, lhs_var,
14467 target_expr, true, depth))
14468 break;
14469 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14470 lhs_addr, lhs_var, target_expr, false,
14471 depth);
14472 break;
14473 case COMPOUND_EXPR:
14474 /* Break out any preevaluations from cp_build_modify_expr. */
14475 for (; TREE_CODE (expr) == COMPOUND_EXPR;
14476 expr = TREE_OPERAND (expr, 1))
14477 {
14478 /* Special-case __builtin_clear_padding call before
14479 __builtin_memcmp. */
14480 if (TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR)
14481 {
14482 tree fndecl = get_callee_fndecl (TREE_OPERAND (expr, 0));
14483 if (fndecl
14484 && fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
14485 && VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
14486 && (!pre_p
14487 || goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL,
14488 lhs_addr, lhs_var,
14489 target_expr, true, depth)))
14490 {
14491 if (pre_p)
14492 *expr_p = expr;
14493 saw_lhs = goa_stabilize_expr (&TREE_OPERAND (expr, 0),
14494 pre_p, lhs_addr, lhs_var,
14495 target_expr, true, depth);
14496 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1),
14497 pre_p, lhs_addr, lhs_var,
14498 target_expr, rhs, depth);
14499 return saw_lhs;
14500 }
14501 }
14502
14503 if (pre_p)
14504 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
14505 }
14506 if (!pre_p)
14507 return goa_stabilize_expr (&expr, pre_p, lhs_addr, lhs_var,
14508 target_expr, rhs, depth);
14509 *expr_p = expr;
14510 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var,
14511 target_expr, rhs, depth);
14512 case COND_EXPR:
14513 if (!goa_stabilize_expr (&TREE_OPERAND (expr, 0), NULL, lhs_addr,
14514 lhs_var, target_expr, true, depth))
14515 break;
14516 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14517 lhs_addr, lhs_var, target_expr, true,
14518 depth);
14519 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
14520 lhs_addr, lhs_var, target_expr, true,
14521 depth);
14522 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 2), pre_p,
14523 lhs_addr, lhs_var, target_expr, true,
14524 depth);
14525 break;
14526 case TARGET_EXPR:
14527 if (TARGET_EXPR_INITIAL (expr))
14528 {
14529 if (pre_p && !goa_stabilize_expr (expr_p, NULL, lhs_addr,
14530 lhs_var, target_expr, true,
14531 depth))
14532 break;
14533 if (expr == target_expr)
14534 saw_lhs = 1;
14535 else
14536 {
14537 saw_lhs = goa_stabilize_expr (&TARGET_EXPR_INITIAL (expr),
14538 pre_p, lhs_addr, lhs_var,
14539 target_expr, true, depth);
14540 if (saw_lhs && target_expr == NULL_TREE && pre_p)
14541 target_expr = expr;
14542 }
14543 }
14544 break;
14545 default:
14546 break;
14547 }
14548 break;
14549 case tcc_reference:
14550 if (TREE_CODE (expr) == BIT_FIELD_REF
14551 || TREE_CODE (expr) == VIEW_CONVERT_EXPR)
14552 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
14553 lhs_addr, lhs_var, target_expr, true,
14554 depth);
14555 break;
14556 case tcc_vl_exp:
14557 if (TREE_CODE (expr) == CALL_EXPR)
14558 {
14559 if (tree fndecl = get_callee_fndecl (expr))
14560 if (fndecl_built_in_p (fndecl, BUILT_IN_CLEAR_PADDING)
14561 || fndecl_built_in_p (fndecl, BUILT_IN_MEMCMP))
14562 {
14563 int nargs = call_expr_nargs (expr);
14564 for (int i = 0; i < nargs; i++)
14565 saw_lhs |= goa_stabilize_expr (&CALL_EXPR_ARG (expr, i),
14566 pre_p, lhs_addr, lhs_var,
14567 target_expr, true, depth);
14568 }
14569 }
14570 break;
14571 default:
14572 break;
14573 }
14574
14575 finish:
14576 if (saw_lhs == 0 && pre_p)
14577 {
14578 enum gimplify_status gs;
14579 if (TREE_CODE (expr) == CALL_EXPR && VOID_TYPE_P (TREE_TYPE (expr)))
14580 {
14581 gimplify_stmt (&expr, pre_p);
14582 return saw_lhs;
14583 }
14584 else if (rhs)
14585 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
14586 else
14587 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_lvalue, fb_lvalue);
14588 if (gs != GS_ALL_DONE)
14589 saw_lhs = -1;
14590 }
14591
14592 return saw_lhs;
14593 }
14594
14595 /* Gimplify an OMP_ATOMIC statement. */
14596
14597 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,gimple_seq * pre_p)14598 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
14599 {
14600 tree addr = TREE_OPERAND (*expr_p, 0);
14601 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
14602 ? NULL : TREE_OPERAND (*expr_p, 1);
14603 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
14604 tree tmp_load;
14605 gomp_atomic_load *loadstmt;
14606 gomp_atomic_store *storestmt;
14607 tree target_expr = NULL_TREE;
14608
14609 tmp_load = create_tmp_reg (type);
14610 if (rhs
14611 && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load, target_expr,
14612 true, 0) < 0)
14613 return GS_ERROR;
14614
14615 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
14616 != GS_ALL_DONE)
14617 return GS_ERROR;
14618
14619 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
14620 OMP_ATOMIC_MEMORY_ORDER (*expr_p));
14621 gimplify_seq_add_stmt (pre_p, loadstmt);
14622 if (rhs)
14623 {
14624 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
14625 representatives. Use BIT_FIELD_REF on the lhs instead. */
14626 tree rhsarg = rhs;
14627 if (TREE_CODE (rhs) == COND_EXPR)
14628 rhsarg = TREE_OPERAND (rhs, 1);
14629 if (TREE_CODE (rhsarg) == BIT_INSERT_EXPR
14630 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
14631 {
14632 tree bitpos = TREE_OPERAND (rhsarg, 2);
14633 tree op1 = TREE_OPERAND (rhsarg, 1);
14634 tree bitsize;
14635 tree tmp_store = tmp_load;
14636 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
14637 tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
14638 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
14639 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
14640 else
14641 bitsize = TYPE_SIZE (TREE_TYPE (op1));
14642 gcc_assert (TREE_OPERAND (rhsarg, 0) == tmp_load);
14643 tree t = build2_loc (EXPR_LOCATION (rhsarg),
14644 MODIFY_EXPR, void_type_node,
14645 build3_loc (EXPR_LOCATION (rhsarg),
14646 BIT_FIELD_REF, TREE_TYPE (op1),
14647 tmp_store, bitsize, bitpos), op1);
14648 if (TREE_CODE (rhs) == COND_EXPR)
14649 t = build3_loc (EXPR_LOCATION (rhs), COND_EXPR, void_type_node,
14650 TREE_OPERAND (rhs, 0), t, void_node);
14651 gimplify_and_add (t, pre_p);
14652 rhs = tmp_store;
14653 }
14654 bool save_allow_rhs_cond_expr = gimplify_ctxp->allow_rhs_cond_expr;
14655 if (TREE_CODE (rhs) == COND_EXPR)
14656 gimplify_ctxp->allow_rhs_cond_expr = true;
14657 enum gimplify_status gs = gimplify_expr (&rhs, pre_p, NULL,
14658 is_gimple_val, fb_rvalue);
14659 gimplify_ctxp->allow_rhs_cond_expr = save_allow_rhs_cond_expr;
14660 if (gs != GS_ALL_DONE)
14661 return GS_ERROR;
14662 }
14663
14664 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
14665 rhs = tmp_load;
14666 storestmt
14667 = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
14668 if (TREE_CODE (*expr_p) != OMP_ATOMIC_READ && OMP_ATOMIC_WEAK (*expr_p))
14669 {
14670 gimple_omp_atomic_set_weak (loadstmt);
14671 gimple_omp_atomic_set_weak (storestmt);
14672 }
14673 gimplify_seq_add_stmt (pre_p, storestmt);
14674 switch (TREE_CODE (*expr_p))
14675 {
14676 case OMP_ATOMIC_READ:
14677 case OMP_ATOMIC_CAPTURE_OLD:
14678 *expr_p = tmp_load;
14679 gimple_omp_atomic_set_need_value (loadstmt);
14680 break;
14681 case OMP_ATOMIC_CAPTURE_NEW:
14682 *expr_p = rhs;
14683 gimple_omp_atomic_set_need_value (storestmt);
14684 break;
14685 default:
14686 *expr_p = NULL;
14687 break;
14688 }
14689
14690 return GS_ALL_DONE;
14691 }
14692
14693 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
14694 body, and adding some EH bits. */
14695
14696 static enum gimplify_status
gimplify_transaction(tree * expr_p,gimple_seq * pre_p)14697 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
14698 {
14699 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
14700 gimple *body_stmt;
14701 gtransaction *trans_stmt;
14702 gimple_seq body = NULL;
14703 int subcode = 0;
14704
14705 /* Wrap the transaction body in a BIND_EXPR so we have a context
14706 where to put decls for OMP. */
14707 if (TREE_CODE (tbody) != BIND_EXPR)
14708 {
14709 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
14710 TREE_SIDE_EFFECTS (bind) = 1;
14711 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
14712 TRANSACTION_EXPR_BODY (expr) = bind;
14713 }
14714
14715 push_gimplify_context ();
14716 temp = voidify_wrapper_expr (*expr_p, NULL);
14717
14718 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
14719 pop_gimplify_context (body_stmt);
14720
14721 trans_stmt = gimple_build_transaction (body);
14722 if (TRANSACTION_EXPR_OUTER (expr))
14723 subcode = GTMA_IS_OUTER;
14724 else if (TRANSACTION_EXPR_RELAXED (expr))
14725 subcode = GTMA_IS_RELAXED;
14726 gimple_transaction_set_subcode (trans_stmt, subcode);
14727
14728 gimplify_seq_add_stmt (pre_p, trans_stmt);
14729
14730 if (temp)
14731 {
14732 *expr_p = temp;
14733 return GS_OK;
14734 }
14735
14736 *expr_p = NULL_TREE;
14737 return GS_ALL_DONE;
14738 }
14739
14740 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
14741 is the OMP_BODY of the original EXPR (which has already been
14742 gimplified so it's not present in the EXPR).
14743
14744 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
14745
14746 static gimple *
gimplify_omp_ordered(tree expr,gimple_seq body)14747 gimplify_omp_ordered (tree expr, gimple_seq body)
14748 {
14749 tree c, decls;
14750 int failures = 0;
14751 unsigned int i;
14752 tree source_c = NULL_TREE;
14753 tree sink_c = NULL_TREE;
14754
14755 if (gimplify_omp_ctxp)
14756 {
14757 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
14758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
14759 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
14760 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
14761 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
14762 {
14763 error_at (OMP_CLAUSE_LOCATION (c),
14764 "%<ordered%> construct with %<depend%> clause must be "
14765 "closely nested inside a loop with %<ordered%> clause "
14766 "with a parameter");
14767 failures++;
14768 }
14769 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
14770 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
14771 {
14772 bool fail = false;
14773 for (decls = OMP_CLAUSE_DECL (c), i = 0;
14774 decls && TREE_CODE (decls) == TREE_LIST;
14775 decls = TREE_CHAIN (decls), ++i)
14776 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
14777 continue;
14778 else if (TREE_VALUE (decls)
14779 != gimplify_omp_ctxp->loop_iter_var[2 * i])
14780 {
14781 error_at (OMP_CLAUSE_LOCATION (c),
14782 "variable %qE is not an iteration "
14783 "of outermost loop %d, expected %qE",
14784 TREE_VALUE (decls), i + 1,
14785 gimplify_omp_ctxp->loop_iter_var[2 * i]);
14786 fail = true;
14787 failures++;
14788 }
14789 else
14790 TREE_VALUE (decls)
14791 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
14792 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
14793 {
14794 error_at (OMP_CLAUSE_LOCATION (c),
14795 "number of variables in %<depend%> clause with "
14796 "%<sink%> modifier does not match number of "
14797 "iteration variables");
14798 failures++;
14799 }
14800 sink_c = c;
14801 }
14802 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
14803 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
14804 {
14805 if (source_c)
14806 {
14807 error_at (OMP_CLAUSE_LOCATION (c),
14808 "more than one %<depend%> clause with %<source%> "
14809 "modifier on an %<ordered%> construct");
14810 failures++;
14811 }
14812 else
14813 source_c = c;
14814 }
14815 }
14816 if (source_c && sink_c)
14817 {
14818 error_at (OMP_CLAUSE_LOCATION (source_c),
14819 "%<depend%> clause with %<source%> modifier specified "
14820 "together with %<depend%> clauses with %<sink%> modifier "
14821 "on the same construct");
14822 failures++;
14823 }
14824
14825 if (failures)
14826 return gimple_build_nop ();
14827 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
14828 }
14829
14830 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
14831 expression produces a value to be used as an operand inside a GIMPLE
14832 statement, the value will be stored back in *EXPR_P. This value will
14833 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
14834 an SSA_NAME. The corresponding sequence of GIMPLE statements is
14835 emitted in PRE_P and POST_P.
14836
14837 Additionally, this process may overwrite parts of the input
14838 expression during gimplification. Ideally, it should be
14839 possible to do non-destructive gimplification.
14840
14841 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
14842 the expression needs to evaluate to a value to be used as
14843 an operand in a GIMPLE statement, this value will be stored in
14844 *EXPR_P on exit. This happens when the caller specifies one
14845 of fb_lvalue or fb_rvalue fallback flags.
14846
14847 PRE_P will contain the sequence of GIMPLE statements corresponding
14848 to the evaluation of EXPR and all the side-effects that must
14849 be executed before the main expression. On exit, the last
14850 statement of PRE_P is the core statement being gimplified. For
14851 instance, when gimplifying 'if (++a)' the last statement in
14852 PRE_P will be 'if (t.1)' where t.1 is the result of
14853 pre-incrementing 'a'.
14854
14855 POST_P will contain the sequence of GIMPLE statements corresponding
14856 to the evaluation of all the side-effects that must be executed
14857 after the main expression. If this is NULL, the post
14858 side-effects are stored at the end of PRE_P.
14859
14860 The reason why the output is split in two is to handle post
14861 side-effects explicitly. In some cases, an expression may have
14862 inner and outer post side-effects which need to be emitted in
14863 an order different from the one given by the recursive
14864 traversal. For instance, for the expression (*p--)++ the post
14865 side-effects of '--' must actually occur *after* the post
14866 side-effects of '++'. However, gimplification will first visit
14867 the inner expression, so if a separate POST sequence was not
14868 used, the resulting sequence would be:
14869
14870 1 t.1 = *p
14871 2 p = p - 1
14872 3 t.2 = t.1 + 1
14873 4 *p = t.2
14874
14875 However, the post-decrement operation in line #2 must not be
14876 evaluated until after the store to *p at line #4, so the
14877 correct sequence should be:
14878
14879 1 t.1 = *p
14880 2 t.2 = t.1 + 1
14881 3 *p = t.2
14882 4 p = p - 1
14883
14884 So, by specifying a separate post queue, it is possible
14885 to emit the post side-effects in the correct order.
14886 If POST_P is NULL, an internal queue will be used. Before
14887 returning to the caller, the sequence POST_P is appended to
14888 the main output sequence PRE_P.
14889
14890 GIMPLE_TEST_F points to a function that takes a tree T and
14891 returns nonzero if T is in the GIMPLE form requested by the
14892 caller. The GIMPLE predicates are in gimple.cc.
14893
14894 FALLBACK tells the function what sort of a temporary we want if
14895 gimplification cannot produce an expression that complies with
14896 GIMPLE_TEST_F.
14897
14898 fb_none means that no temporary should be generated
14899 fb_rvalue means that an rvalue is OK to generate
14900 fb_lvalue means that an lvalue is OK to generate
14901 fb_either means that either is OK, but an lvalue is preferable.
14902 fb_mayfail means that gimplification may fail (in which case
14903 GS_ERROR will be returned)
14904
14905 The return value is either GS_ERROR or GS_ALL_DONE, since this
14906 function iterates until EXPR is completely gimplified or an error
14907 occurs. */
14908
14909 enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)14910 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14911 bool (*gimple_test_f) (tree), fallback_t fallback)
14912 {
14913 tree tmp;
14914 gimple_seq internal_pre = NULL;
14915 gimple_seq internal_post = NULL;
14916 tree save_expr;
14917 bool is_statement;
14918 location_t saved_location;
14919 enum gimplify_status ret;
14920 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
14921 tree label;
14922
14923 save_expr = *expr_p;
14924 if (save_expr == NULL_TREE)
14925 return GS_ALL_DONE;
14926
14927 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
14928 is_statement = gimple_test_f == is_gimple_stmt;
14929 if (is_statement)
14930 gcc_assert (pre_p);
14931
14932 /* Consistency checks. */
14933 if (gimple_test_f == is_gimple_reg)
14934 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
14935 else if (gimple_test_f == is_gimple_val
14936 || gimple_test_f == is_gimple_call_addr
14937 || gimple_test_f == is_gimple_condexpr
14938 || gimple_test_f == is_gimple_condexpr_for_cond
14939 || gimple_test_f == is_gimple_mem_rhs
14940 || gimple_test_f == is_gimple_mem_rhs_or_call
14941 || gimple_test_f == is_gimple_reg_rhs
14942 || gimple_test_f == is_gimple_reg_rhs_or_call
14943 || gimple_test_f == is_gimple_asm_val
14944 || gimple_test_f == is_gimple_mem_ref_addr)
14945 gcc_assert (fallback & fb_rvalue);
14946 else if (gimple_test_f == is_gimple_min_lval
14947 || gimple_test_f == is_gimple_lvalue)
14948 gcc_assert (fallback & fb_lvalue);
14949 else if (gimple_test_f == is_gimple_addressable)
14950 gcc_assert (fallback & fb_either);
14951 else if (gimple_test_f == is_gimple_stmt)
14952 gcc_assert (fallback == fb_none);
14953 else
14954 {
14955 /* We should have recognized the GIMPLE_TEST_F predicate to
14956 know what kind of fallback to use in case a temporary is
14957 needed to hold the value or address of *EXPR_P. */
14958 gcc_unreachable ();
14959 }
14960
14961 /* We used to check the predicate here and return immediately if it
14962 succeeds. This is wrong; the design is for gimplification to be
14963 idempotent, and for the predicates to only test for valid forms, not
14964 whether they are fully simplified. */
14965 if (pre_p == NULL)
14966 pre_p = &internal_pre;
14967
14968 if (post_p == NULL)
14969 post_p = &internal_post;
14970
14971 /* Remember the last statements added to PRE_P and POST_P. Every
14972 new statement added by the gimplification helpers needs to be
14973 annotated with location information. To centralize the
14974 responsibility, we remember the last statement that had been
14975 added to both queues before gimplifying *EXPR_P. If
14976 gimplification produces new statements in PRE_P and POST_P, those
14977 statements will be annotated with the same location information
14978 as *EXPR_P. */
14979 pre_last_gsi = gsi_last (*pre_p);
14980 post_last_gsi = gsi_last (*post_p);
14981
14982 saved_location = input_location;
14983 if (save_expr != error_mark_node
14984 && EXPR_HAS_LOCATION (*expr_p))
14985 input_location = EXPR_LOCATION (*expr_p);
14986
14987 /* Loop over the specific gimplifiers until the toplevel node
14988 remains the same. */
14989 do
14990 {
14991 /* Strip away as many useless type conversions as possible
14992 at the toplevel. */
14993 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
14994
14995 /* Remember the expr. */
14996 save_expr = *expr_p;
14997
14998 /* Die, die, die, my darling. */
14999 if (error_operand_p (save_expr))
15000 {
15001 ret = GS_ERROR;
15002 break;
15003 }
15004
15005 /* Do any language-specific gimplification. */
15006 ret = ((enum gimplify_status)
15007 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
15008 if (ret == GS_OK)
15009 {
15010 if (*expr_p == NULL_TREE)
15011 break;
15012 if (*expr_p != save_expr)
15013 continue;
15014 }
15015 else if (ret != GS_UNHANDLED)
15016 break;
15017
15018 /* Make sure that all the cases set 'ret' appropriately. */
15019 ret = GS_UNHANDLED;
15020 switch (TREE_CODE (*expr_p))
15021 {
15022 /* First deal with the special cases. */
15023
15024 case POSTINCREMENT_EXPR:
15025 case POSTDECREMENT_EXPR:
15026 case PREINCREMENT_EXPR:
15027 case PREDECREMENT_EXPR:
15028 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
15029 fallback != fb_none,
15030 TREE_TYPE (*expr_p));
15031 break;
15032
15033 case VIEW_CONVERT_EXPR:
15034 if ((fallback & fb_rvalue)
15035 && is_gimple_reg_type (TREE_TYPE (*expr_p))
15036 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
15037 {
15038 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15039 post_p, is_gimple_val, fb_rvalue);
15040 recalculate_side_effects (*expr_p);
15041 break;
15042 }
15043 /* Fallthru. */
15044
15045 case ARRAY_REF:
15046 case ARRAY_RANGE_REF:
15047 case REALPART_EXPR:
15048 case IMAGPART_EXPR:
15049 case COMPONENT_REF:
15050 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
15051 fallback ? fallback : fb_rvalue);
15052 break;
15053
15054 case COND_EXPR:
15055 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
15056
15057 /* C99 code may assign to an array in a structure value of a
15058 conditional expression, and this has undefined behavior
15059 only on execution, so create a temporary if an lvalue is
15060 required. */
15061 if (fallback == fb_lvalue)
15062 {
15063 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
15064 mark_addressable (*expr_p);
15065 ret = GS_OK;
15066 }
15067 break;
15068
15069 case CALL_EXPR:
15070 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
15071
15072 /* C99 code may assign to an array in a structure returned
15073 from a function, and this has undefined behavior only on
15074 execution, so create a temporary if an lvalue is
15075 required. */
15076 if (fallback == fb_lvalue)
15077 {
15078 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
15079 mark_addressable (*expr_p);
15080 ret = GS_OK;
15081 }
15082 break;
15083
15084 case TREE_LIST:
15085 gcc_unreachable ();
15086
15087 case COMPOUND_EXPR:
15088 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
15089 break;
15090
15091 case COMPOUND_LITERAL_EXPR:
15092 ret = gimplify_compound_literal_expr (expr_p, pre_p,
15093 gimple_test_f, fallback);
15094 break;
15095
15096 case MODIFY_EXPR:
15097 case INIT_EXPR:
15098 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
15099 fallback != fb_none);
15100 break;
15101
15102 case TRUTH_ANDIF_EXPR:
15103 case TRUTH_ORIF_EXPR:
15104 {
15105 /* Preserve the original type of the expression and the
15106 source location of the outer expression. */
15107 tree org_type = TREE_TYPE (*expr_p);
15108 *expr_p = gimple_boolify (*expr_p);
15109 *expr_p = build3_loc (input_location, COND_EXPR,
15110 org_type, *expr_p,
15111 fold_convert_loc
15112 (input_location,
15113 org_type, boolean_true_node),
15114 fold_convert_loc
15115 (input_location,
15116 org_type, boolean_false_node));
15117 ret = GS_OK;
15118 break;
15119 }
15120
15121 case TRUTH_NOT_EXPR:
15122 {
15123 tree type = TREE_TYPE (*expr_p);
15124 /* The parsers are careful to generate TRUTH_NOT_EXPR
15125 only with operands that are always zero or one.
15126 We do not fold here but handle the only interesting case
15127 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
15128 *expr_p = gimple_boolify (*expr_p);
15129 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
15130 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
15131 TREE_TYPE (*expr_p),
15132 TREE_OPERAND (*expr_p, 0));
15133 else
15134 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
15135 TREE_TYPE (*expr_p),
15136 TREE_OPERAND (*expr_p, 0),
15137 build_int_cst (TREE_TYPE (*expr_p), 1));
15138 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
15139 *expr_p = fold_convert_loc (input_location, type, *expr_p);
15140 ret = GS_OK;
15141 break;
15142 }
15143
15144 case ADDR_EXPR:
15145 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
15146 break;
15147
15148 case ANNOTATE_EXPR:
15149 {
15150 tree cond = TREE_OPERAND (*expr_p, 0);
15151 tree kind = TREE_OPERAND (*expr_p, 1);
15152 tree data = TREE_OPERAND (*expr_p, 2);
15153 tree type = TREE_TYPE (cond);
15154 if (!INTEGRAL_TYPE_P (type))
15155 {
15156 *expr_p = cond;
15157 ret = GS_OK;
15158 break;
15159 }
15160 tree tmp = create_tmp_var (type);
15161 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
15162 gcall *call
15163 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
15164 gimple_call_set_lhs (call, tmp);
15165 gimplify_seq_add_stmt (pre_p, call);
15166 *expr_p = tmp;
15167 ret = GS_ALL_DONE;
15168 break;
15169 }
15170
15171 case VA_ARG_EXPR:
15172 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
15173 break;
15174
15175 CASE_CONVERT:
15176 if (IS_EMPTY_STMT (*expr_p))
15177 {
15178 ret = GS_ALL_DONE;
15179 break;
15180 }
15181
15182 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
15183 || fallback == fb_none)
15184 {
15185 /* Just strip a conversion to void (or in void context) and
15186 try again. */
15187 *expr_p = TREE_OPERAND (*expr_p, 0);
15188 ret = GS_OK;
15189 break;
15190 }
15191
15192 ret = gimplify_conversion (expr_p);
15193 if (ret == GS_ERROR)
15194 break;
15195 if (*expr_p != save_expr)
15196 break;
15197 /* FALLTHRU */
15198
15199 case FIX_TRUNC_EXPR:
15200 /* unary_expr: ... | '(' cast ')' val | ... */
15201 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15202 is_gimple_val, fb_rvalue);
15203 recalculate_side_effects (*expr_p);
15204 break;
15205
15206 case INDIRECT_REF:
15207 {
15208 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
15209 bool notrap = TREE_THIS_NOTRAP (*expr_p);
15210 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
15211
15212 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
15213 if (*expr_p != save_expr)
15214 {
15215 ret = GS_OK;
15216 break;
15217 }
15218
15219 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15220 is_gimple_reg, fb_rvalue);
15221 if (ret == GS_ERROR)
15222 break;
15223
15224 recalculate_side_effects (*expr_p);
15225 *expr_p = fold_build2_loc (input_location, MEM_REF,
15226 TREE_TYPE (*expr_p),
15227 TREE_OPERAND (*expr_p, 0),
15228 build_int_cst (saved_ptr_type, 0));
15229 TREE_THIS_VOLATILE (*expr_p) = volatilep;
15230 TREE_THIS_NOTRAP (*expr_p) = notrap;
15231 ret = GS_OK;
15232 break;
15233 }
15234
15235 /* We arrive here through the various re-gimplifcation paths. */
15236 case MEM_REF:
15237 /* First try re-folding the whole thing. */
15238 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
15239 TREE_OPERAND (*expr_p, 0),
15240 TREE_OPERAND (*expr_p, 1));
15241 if (tmp)
15242 {
15243 REF_REVERSE_STORAGE_ORDER (tmp)
15244 = REF_REVERSE_STORAGE_ORDER (*expr_p);
15245 *expr_p = tmp;
15246 recalculate_side_effects (*expr_p);
15247 ret = GS_OK;
15248 break;
15249 }
15250 /* Avoid re-gimplifying the address operand if it is already
15251 in suitable form. Re-gimplifying would mark the address
15252 operand addressable. Always gimplify when not in SSA form
15253 as we still may have to gimplify decls with value-exprs. */
15254 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
15255 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
15256 {
15257 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15258 is_gimple_mem_ref_addr, fb_rvalue);
15259 if (ret == GS_ERROR)
15260 break;
15261 }
15262 recalculate_side_effects (*expr_p);
15263 ret = GS_ALL_DONE;
15264 break;
15265
15266 /* Constants need not be gimplified. */
15267 case INTEGER_CST:
15268 case REAL_CST:
15269 case FIXED_CST:
15270 case STRING_CST:
15271 case COMPLEX_CST:
15272 case VECTOR_CST:
15273 /* Drop the overflow flag on constants, we do not want
15274 that in the GIMPLE IL. */
15275 if (TREE_OVERFLOW_P (*expr_p))
15276 *expr_p = drop_tree_overflow (*expr_p);
15277 ret = GS_ALL_DONE;
15278 break;
15279
15280 case CONST_DECL:
15281 /* If we require an lvalue, such as for ADDR_EXPR, retain the
15282 CONST_DECL node. Otherwise the decl is replaceable by its
15283 value. */
15284 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
15285 if (fallback & fb_lvalue)
15286 ret = GS_ALL_DONE;
15287 else
15288 {
15289 *expr_p = DECL_INITIAL (*expr_p);
15290 ret = GS_OK;
15291 }
15292 break;
15293
15294 case DECL_EXPR:
15295 ret = gimplify_decl_expr (expr_p, pre_p);
15296 break;
15297
15298 case BIND_EXPR:
15299 ret = gimplify_bind_expr (expr_p, pre_p);
15300 break;
15301
15302 case LOOP_EXPR:
15303 ret = gimplify_loop_expr (expr_p, pre_p);
15304 break;
15305
15306 case SWITCH_EXPR:
15307 ret = gimplify_switch_expr (expr_p, pre_p);
15308 break;
15309
15310 case EXIT_EXPR:
15311 ret = gimplify_exit_expr (expr_p);
15312 break;
15313
15314 case GOTO_EXPR:
15315 /* If the target is not LABEL, then it is a computed jump
15316 and the target needs to be gimplified. */
15317 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
15318 {
15319 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
15320 NULL, is_gimple_val, fb_rvalue);
15321 if (ret == GS_ERROR)
15322 break;
15323 }
15324 gimplify_seq_add_stmt (pre_p,
15325 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
15326 ret = GS_ALL_DONE;
15327 break;
15328
15329 case PREDICT_EXPR:
15330 gimplify_seq_add_stmt (pre_p,
15331 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
15332 PREDICT_EXPR_OUTCOME (*expr_p)));
15333 ret = GS_ALL_DONE;
15334 break;
15335
15336 case LABEL_EXPR:
15337 ret = gimplify_label_expr (expr_p, pre_p);
15338 label = LABEL_EXPR_LABEL (*expr_p);
15339 gcc_assert (decl_function_context (label) == current_function_decl);
15340
15341 /* If the label is used in a goto statement, or address of the label
15342 is taken, we need to unpoison all variables that were seen so far.
15343 Doing so would prevent us from reporting a false positives. */
15344 if (asan_poisoned_variables
15345 && asan_used_labels != NULL
15346 && asan_used_labels->contains (label)
15347 && !gimplify_omp_ctxp)
15348 asan_poison_variables (asan_poisoned_variables, false, pre_p);
15349 break;
15350
15351 case CASE_LABEL_EXPR:
15352 ret = gimplify_case_label_expr (expr_p, pre_p);
15353
15354 if (gimplify_ctxp->live_switch_vars)
15355 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
15356 pre_p);
15357 break;
15358
15359 case RETURN_EXPR:
15360 ret = gimplify_return_expr (*expr_p, pre_p);
15361 break;
15362
15363 case CONSTRUCTOR:
15364 /* Don't reduce this in place; let gimplify_init_constructor work its
15365 magic. Buf if we're just elaborating this for side effects, just
15366 gimplify any element that has side-effects. */
15367 if (fallback == fb_none)
15368 {
15369 unsigned HOST_WIDE_INT ix;
15370 tree val;
15371 tree temp = NULL_TREE;
15372 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
15373 if (TREE_SIDE_EFFECTS (val))
15374 append_to_statement_list (val, &temp);
15375
15376 *expr_p = temp;
15377 ret = temp ? GS_OK : GS_ALL_DONE;
15378 }
15379 /* C99 code may assign to an array in a constructed
15380 structure or union, and this has undefined behavior only
15381 on execution, so create a temporary if an lvalue is
15382 required. */
15383 else if (fallback == fb_lvalue)
15384 {
15385 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
15386 mark_addressable (*expr_p);
15387 ret = GS_OK;
15388 }
15389 else
15390 ret = GS_ALL_DONE;
15391 break;
15392
15393 /* The following are special cases that are not handled by the
15394 original GIMPLE grammar. */
15395
15396 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
15397 eliminated. */
15398 case SAVE_EXPR:
15399 ret = gimplify_save_expr (expr_p, pre_p, post_p);
15400 break;
15401
15402 case BIT_FIELD_REF:
15403 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15404 post_p, is_gimple_lvalue, fb_either);
15405 recalculate_side_effects (*expr_p);
15406 break;
15407
15408 case TARGET_MEM_REF:
15409 {
15410 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
15411
15412 if (TMR_BASE (*expr_p))
15413 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
15414 post_p, is_gimple_mem_ref_addr, fb_either);
15415 if (TMR_INDEX (*expr_p))
15416 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
15417 post_p, is_gimple_val, fb_rvalue);
15418 if (TMR_INDEX2 (*expr_p))
15419 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
15420 post_p, is_gimple_val, fb_rvalue);
15421 /* TMR_STEP and TMR_OFFSET are always integer constants. */
15422 ret = MIN (r0, r1);
15423 }
15424 break;
15425
15426 case NON_LVALUE_EXPR:
15427 /* This should have been stripped above. */
15428 gcc_unreachable ();
15429
15430 case ASM_EXPR:
15431 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
15432 break;
15433
15434 case TRY_FINALLY_EXPR:
15435 case TRY_CATCH_EXPR:
15436 {
15437 gimple_seq eval, cleanup;
15438 gtry *try_;
15439
15440 /* Calls to destructors are generated automatically in FINALLY/CATCH
15441 block. They should have location as UNKNOWN_LOCATION. However,
15442 gimplify_call_expr will reset these call stmts to input_location
15443 if it finds stmt's location is unknown. To prevent resetting for
15444 destructors, we set the input_location to unknown.
15445 Note that this only affects the destructor calls in FINALLY/CATCH
15446 block, and will automatically reset to its original value by the
15447 end of gimplify_expr. */
15448 input_location = UNKNOWN_LOCATION;
15449 eval = cleanup = NULL;
15450 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
15451 if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
15452 && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
15453 {
15454 gimple_seq n = NULL, e = NULL;
15455 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
15456 0), &n);
15457 gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
15458 1), &e);
15459 if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
15460 {
15461 geh_else *stmt = gimple_build_eh_else (n, e);
15462 gimple_seq_add_stmt (&cleanup, stmt);
15463 }
15464 }
15465 else
15466 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
15467 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
15468 if (gimple_seq_empty_p (cleanup))
15469 {
15470 gimple_seq_add_seq (pre_p, eval);
15471 ret = GS_ALL_DONE;
15472 break;
15473 }
15474 try_ = gimple_build_try (eval, cleanup,
15475 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
15476 ? GIMPLE_TRY_FINALLY
15477 : GIMPLE_TRY_CATCH);
15478 if (EXPR_HAS_LOCATION (save_expr))
15479 gimple_set_location (try_, EXPR_LOCATION (save_expr));
15480 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
15481 gimple_set_location (try_, saved_location);
15482 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
15483 gimple_try_set_catch_is_cleanup (try_,
15484 TRY_CATCH_IS_CLEANUP (*expr_p));
15485 gimplify_seq_add_stmt (pre_p, try_);
15486 ret = GS_ALL_DONE;
15487 break;
15488 }
15489
15490 case CLEANUP_POINT_EXPR:
15491 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
15492 break;
15493
15494 case TARGET_EXPR:
15495 ret = gimplify_target_expr (expr_p, pre_p, post_p);
15496 break;
15497
15498 case CATCH_EXPR:
15499 {
15500 gimple *c;
15501 gimple_seq handler = NULL;
15502 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
15503 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
15504 gimplify_seq_add_stmt (pre_p, c);
15505 ret = GS_ALL_DONE;
15506 break;
15507 }
15508
15509 case EH_FILTER_EXPR:
15510 {
15511 gimple *ehf;
15512 gimple_seq failure = NULL;
15513
15514 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
15515 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
15516 copy_warning (ehf, *expr_p);
15517 gimplify_seq_add_stmt (pre_p, ehf);
15518 ret = GS_ALL_DONE;
15519 break;
15520 }
15521
15522 case OBJ_TYPE_REF:
15523 {
15524 enum gimplify_status r0, r1;
15525 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
15526 post_p, is_gimple_val, fb_rvalue);
15527 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
15528 post_p, is_gimple_val, fb_rvalue);
15529 TREE_SIDE_EFFECTS (*expr_p) = 0;
15530 ret = MIN (r0, r1);
15531 }
15532 break;
15533
15534 case LABEL_DECL:
15535 /* We get here when taking the address of a label. We mark
15536 the label as "forced"; meaning it can never be removed and
15537 it is a potential target for any computed goto. */
15538 FORCED_LABEL (*expr_p) = 1;
15539 ret = GS_ALL_DONE;
15540 break;
15541
15542 case STATEMENT_LIST:
15543 ret = gimplify_statement_list (expr_p, pre_p);
15544 break;
15545
15546 case WITH_SIZE_EXPR:
15547 {
15548 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15549 post_p == &internal_post ? NULL : post_p,
15550 gimple_test_f, fallback);
15551 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
15552 is_gimple_val, fb_rvalue);
15553 ret = GS_ALL_DONE;
15554 }
15555 break;
15556
15557 case VAR_DECL:
15558 case PARM_DECL:
15559 ret = gimplify_var_or_parm_decl (expr_p);
15560 break;
15561
15562 case RESULT_DECL:
15563 /* When within an OMP context, notice uses of variables. */
15564 if (gimplify_omp_ctxp)
15565 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
15566 ret = GS_ALL_DONE;
15567 break;
15568
15569 case DEBUG_EXPR_DECL:
15570 gcc_unreachable ();
15571
15572 case DEBUG_BEGIN_STMT:
15573 gimplify_seq_add_stmt (pre_p,
15574 gimple_build_debug_begin_stmt
15575 (TREE_BLOCK (*expr_p),
15576 EXPR_LOCATION (*expr_p)));
15577 ret = GS_ALL_DONE;
15578 *expr_p = NULL;
15579 break;
15580
15581 case SSA_NAME:
15582 /* Allow callbacks into the gimplifier during optimization. */
15583 ret = GS_ALL_DONE;
15584 break;
15585
15586 case OMP_PARALLEL:
15587 gimplify_omp_parallel (expr_p, pre_p);
15588 ret = GS_ALL_DONE;
15589 break;
15590
15591 case OMP_TASK:
15592 gimplify_omp_task (expr_p, pre_p);
15593 ret = GS_ALL_DONE;
15594 break;
15595
15596 case OMP_SIMD:
15597 {
15598 /* Temporarily disable into_ssa, as scan_omp_simd
15599 which calls copy_gimple_seq_and_replace_locals can't deal
15600 with SSA_NAMEs defined outside of the body properly. */
15601 bool saved_into_ssa = gimplify_ctxp->into_ssa;
15602 gimplify_ctxp->into_ssa = false;
15603 ret = gimplify_omp_for (expr_p, pre_p);
15604 gimplify_ctxp->into_ssa = saved_into_ssa;
15605 break;
15606 }
15607
15608 case OMP_FOR:
15609 case OMP_DISTRIBUTE:
15610 case OMP_TASKLOOP:
15611 case OACC_LOOP:
15612 ret = gimplify_omp_for (expr_p, pre_p);
15613 break;
15614
15615 case OMP_LOOP:
15616 ret = gimplify_omp_loop (expr_p, pre_p);
15617 break;
15618
15619 case OACC_CACHE:
15620 gimplify_oacc_cache (expr_p, pre_p);
15621 ret = GS_ALL_DONE;
15622 break;
15623
15624 case OACC_DECLARE:
15625 gimplify_oacc_declare (expr_p, pre_p);
15626 ret = GS_ALL_DONE;
15627 break;
15628
15629 case OACC_HOST_DATA:
15630 case OACC_DATA:
15631 case OACC_KERNELS:
15632 case OACC_PARALLEL:
15633 case OACC_SERIAL:
15634 case OMP_SCOPE:
15635 case OMP_SECTIONS:
15636 case OMP_SINGLE:
15637 case OMP_TARGET:
15638 case OMP_TARGET_DATA:
15639 case OMP_TEAMS:
15640 gimplify_omp_workshare (expr_p, pre_p);
15641 ret = GS_ALL_DONE;
15642 break;
15643
15644 case OACC_ENTER_DATA:
15645 case OACC_EXIT_DATA:
15646 case OACC_UPDATE:
15647 case OMP_TARGET_UPDATE:
15648 case OMP_TARGET_ENTER_DATA:
15649 case OMP_TARGET_EXIT_DATA:
15650 gimplify_omp_target_update (expr_p, pre_p);
15651 ret = GS_ALL_DONE;
15652 break;
15653
15654 case OMP_SECTION:
15655 case OMP_MASTER:
15656 case OMP_MASKED:
15657 case OMP_ORDERED:
15658 case OMP_CRITICAL:
15659 case OMP_SCAN:
15660 {
15661 gimple_seq body = NULL;
15662 gimple *g;
15663 bool saved_in_omp_construct = in_omp_construct;
15664
15665 in_omp_construct = true;
15666 gimplify_and_add (OMP_BODY (*expr_p), &body);
15667 in_omp_construct = saved_in_omp_construct;
15668 switch (TREE_CODE (*expr_p))
15669 {
15670 case OMP_SECTION:
15671 g = gimple_build_omp_section (body);
15672 break;
15673 case OMP_MASTER:
15674 g = gimple_build_omp_master (body);
15675 break;
15676 case OMP_ORDERED:
15677 g = gimplify_omp_ordered (*expr_p, body);
15678 break;
15679 case OMP_MASKED:
15680 gimplify_scan_omp_clauses (&OMP_MASKED_CLAUSES (*expr_p),
15681 pre_p, ORT_WORKSHARE, OMP_MASKED);
15682 gimplify_adjust_omp_clauses (pre_p, body,
15683 &OMP_MASKED_CLAUSES (*expr_p),
15684 OMP_MASKED);
15685 g = gimple_build_omp_masked (body,
15686 OMP_MASKED_CLAUSES (*expr_p));
15687 break;
15688 case OMP_CRITICAL:
15689 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
15690 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
15691 gimplify_adjust_omp_clauses (pre_p, body,
15692 &OMP_CRITICAL_CLAUSES (*expr_p),
15693 OMP_CRITICAL);
15694 g = gimple_build_omp_critical (body,
15695 OMP_CRITICAL_NAME (*expr_p),
15696 OMP_CRITICAL_CLAUSES (*expr_p));
15697 break;
15698 case OMP_SCAN:
15699 gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
15700 pre_p, ORT_WORKSHARE, OMP_SCAN);
15701 gimplify_adjust_omp_clauses (pre_p, body,
15702 &OMP_SCAN_CLAUSES (*expr_p),
15703 OMP_SCAN);
15704 g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
15705 break;
15706 default:
15707 gcc_unreachable ();
15708 }
15709 gimplify_seq_add_stmt (pre_p, g);
15710 ret = GS_ALL_DONE;
15711 break;
15712 }
15713
15714 case OMP_TASKGROUP:
15715 {
15716 gimple_seq body = NULL;
15717
15718 tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
15719 bool saved_in_omp_construct = in_omp_construct;
15720 gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
15721 OMP_TASKGROUP);
15722 gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
15723
15724 in_omp_construct = true;
15725 gimplify_and_add (OMP_BODY (*expr_p), &body);
15726 in_omp_construct = saved_in_omp_construct;
15727 gimple_seq cleanup = NULL;
15728 tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
15729 gimple *g = gimple_build_call (fn, 0);
15730 gimple_seq_add_stmt (&cleanup, g);
15731 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
15732 body = NULL;
15733 gimple_seq_add_stmt (&body, g);
15734 g = gimple_build_omp_taskgroup (body, *pclauses);
15735 gimplify_seq_add_stmt (pre_p, g);
15736 ret = GS_ALL_DONE;
15737 break;
15738 }
15739
15740 case OMP_ATOMIC:
15741 case OMP_ATOMIC_READ:
15742 case OMP_ATOMIC_CAPTURE_OLD:
15743 case OMP_ATOMIC_CAPTURE_NEW:
15744 ret = gimplify_omp_atomic (expr_p, pre_p);
15745 break;
15746
15747 case TRANSACTION_EXPR:
15748 ret = gimplify_transaction (expr_p, pre_p);
15749 break;
15750
15751 case TRUTH_AND_EXPR:
15752 case TRUTH_OR_EXPR:
15753 case TRUTH_XOR_EXPR:
15754 {
15755 tree orig_type = TREE_TYPE (*expr_p);
15756 tree new_type, xop0, xop1;
15757 *expr_p = gimple_boolify (*expr_p);
15758 new_type = TREE_TYPE (*expr_p);
15759 if (!useless_type_conversion_p (orig_type, new_type))
15760 {
15761 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
15762 ret = GS_OK;
15763 break;
15764 }
15765
15766 /* Boolified binary truth expressions are semantically equivalent
15767 to bitwise binary expressions. Canonicalize them to the
15768 bitwise variant. */
15769 switch (TREE_CODE (*expr_p))
15770 {
15771 case TRUTH_AND_EXPR:
15772 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
15773 break;
15774 case TRUTH_OR_EXPR:
15775 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
15776 break;
15777 case TRUTH_XOR_EXPR:
15778 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
15779 break;
15780 default:
15781 break;
15782 }
15783 /* Now make sure that operands have compatible type to
15784 expression's new_type. */
15785 xop0 = TREE_OPERAND (*expr_p, 0);
15786 xop1 = TREE_OPERAND (*expr_p, 1);
15787 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
15788 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
15789 new_type,
15790 xop0);
15791 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
15792 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
15793 new_type,
15794 xop1);
15795 /* Continue classified as tcc_binary. */
15796 goto expr_2;
15797 }
15798
15799 case VEC_COND_EXPR:
15800 goto expr_3;
15801
15802 case VEC_PERM_EXPR:
15803 /* Classified as tcc_expression. */
15804 goto expr_3;
15805
15806 case BIT_INSERT_EXPR:
15807 /* Argument 3 is a constant. */
15808 goto expr_2;
15809
15810 case POINTER_PLUS_EXPR:
15811 {
15812 enum gimplify_status r0, r1;
15813 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15814 post_p, is_gimple_val, fb_rvalue);
15815 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
15816 post_p, is_gimple_val, fb_rvalue);
15817 recalculate_side_effects (*expr_p);
15818 ret = MIN (r0, r1);
15819 break;
15820 }
15821
15822 default:
15823 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
15824 {
15825 case tcc_comparison:
15826 /* Handle comparison of objects of non scalar mode aggregates
15827 with a call to memcmp. It would be nice to only have to do
15828 this for variable-sized objects, but then we'd have to allow
15829 the same nest of reference nodes we allow for MODIFY_EXPR and
15830 that's too complex.
15831
15832 Compare scalar mode aggregates as scalar mode values. Using
15833 memcmp for them would be very inefficient at best, and is
15834 plain wrong if bitfields are involved. */
15835 if (error_operand_p (TREE_OPERAND (*expr_p, 1)))
15836 ret = GS_ERROR;
15837 else
15838 {
15839 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
15840
15841 /* Vector comparisons need no boolification. */
15842 if (TREE_CODE (type) == VECTOR_TYPE)
15843 goto expr_2;
15844 else if (!AGGREGATE_TYPE_P (type))
15845 {
15846 tree org_type = TREE_TYPE (*expr_p);
15847 *expr_p = gimple_boolify (*expr_p);
15848 if (!useless_type_conversion_p (org_type,
15849 TREE_TYPE (*expr_p)))
15850 {
15851 *expr_p = fold_convert_loc (input_location,
15852 org_type, *expr_p);
15853 ret = GS_OK;
15854 }
15855 else
15856 goto expr_2;
15857 }
15858 else if (TYPE_MODE (type) != BLKmode)
15859 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
15860 else
15861 ret = gimplify_variable_sized_compare (expr_p);
15862 }
15863 break;
15864
15865 /* If *EXPR_P does not need to be special-cased, handle it
15866 according to its class. */
15867 case tcc_unary:
15868 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15869 post_p, is_gimple_val, fb_rvalue);
15870 break;
15871
15872 case tcc_binary:
15873 expr_2:
15874 {
15875 enum gimplify_status r0, r1;
15876
15877 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15878 post_p, is_gimple_val, fb_rvalue);
15879 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
15880 post_p, is_gimple_val, fb_rvalue);
15881
15882 ret = MIN (r0, r1);
15883 break;
15884 }
15885
15886 expr_3:
15887 {
15888 enum gimplify_status r0, r1, r2;
15889
15890 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
15891 post_p, is_gimple_val, fb_rvalue);
15892 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
15893 post_p, is_gimple_val, fb_rvalue);
15894 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
15895 post_p, is_gimple_val, fb_rvalue);
15896
15897 ret = MIN (MIN (r0, r1), r2);
15898 break;
15899 }
15900
15901 case tcc_declaration:
15902 case tcc_constant:
15903 ret = GS_ALL_DONE;
15904 goto dont_recalculate;
15905
15906 default:
15907 gcc_unreachable ();
15908 }
15909
15910 recalculate_side_effects (*expr_p);
15911
15912 dont_recalculate:
15913 break;
15914 }
15915
15916 gcc_assert (*expr_p || ret != GS_OK);
15917 }
15918 while (ret == GS_OK);
15919
15920 /* If we encountered an error_mark somewhere nested inside, either
15921 stub out the statement or propagate the error back out. */
15922 if (ret == GS_ERROR)
15923 {
15924 if (is_statement)
15925 *expr_p = NULL;
15926 goto out;
15927 }
15928
15929 /* This was only valid as a return value from the langhook, which
15930 we handled. Make sure it doesn't escape from any other context. */
15931 gcc_assert (ret != GS_UNHANDLED);
15932
15933 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
15934 {
15935 /* We aren't looking for a value, and we don't have a valid
15936 statement. If it doesn't have side-effects, throw it away.
15937 We can also get here with code such as "*&&L;", where L is
15938 a LABEL_DECL that is marked as FORCED_LABEL. */
15939 if (TREE_CODE (*expr_p) == LABEL_DECL
15940 || !TREE_SIDE_EFFECTS (*expr_p))
15941 *expr_p = NULL;
15942 else if (!TREE_THIS_VOLATILE (*expr_p))
15943 {
15944 /* This is probably a _REF that contains something nested that
15945 has side effects. Recurse through the operands to find it. */
15946 enum tree_code code = TREE_CODE (*expr_p);
15947
15948 switch (code)
15949 {
15950 case COMPONENT_REF:
15951 case REALPART_EXPR:
15952 case IMAGPART_EXPR:
15953 case VIEW_CONVERT_EXPR:
15954 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15955 gimple_test_f, fallback);
15956 break;
15957
15958 case ARRAY_REF:
15959 case ARRAY_RANGE_REF:
15960 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
15961 gimple_test_f, fallback);
15962 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
15963 gimple_test_f, fallback);
15964 break;
15965
15966 default:
15967 /* Anything else with side-effects must be converted to
15968 a valid statement before we get here. */
15969 gcc_unreachable ();
15970 }
15971
15972 *expr_p = NULL;
15973 }
15974 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
15975 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode
15976 && !is_empty_type (TREE_TYPE (*expr_p)))
15977 {
15978 /* Historically, the compiler has treated a bare reference
15979 to a non-BLKmode volatile lvalue as forcing a load. */
15980 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
15981
15982 /* Normally, we do not want to create a temporary for a
15983 TREE_ADDRESSABLE type because such a type should not be
15984 copied by bitwise-assignment. However, we make an
15985 exception here, as all we are doing here is ensuring that
15986 we read the bytes that make up the type. We use
15987 create_tmp_var_raw because create_tmp_var will abort when
15988 given a TREE_ADDRESSABLE type. */
15989 tree tmp = create_tmp_var_raw (type, "vol");
15990 gimple_add_tmp_var (tmp);
15991 gimplify_assign (tmp, *expr_p, pre_p);
15992 *expr_p = NULL;
15993 }
15994 else
15995 /* We can't do anything useful with a volatile reference to
15996 an incomplete type, so just throw it away. Likewise for
15997 a BLKmode type, since any implicit inner load should
15998 already have been turned into an explicit one by the
15999 gimplification process. */
16000 *expr_p = NULL;
16001 }
16002
16003 /* If we are gimplifying at the statement level, we're done. Tack
16004 everything together and return. */
16005 if (fallback == fb_none || is_statement)
16006 {
16007 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
16008 it out for GC to reclaim it. */
16009 *expr_p = NULL_TREE;
16010
16011 if (!gimple_seq_empty_p (internal_pre)
16012 || !gimple_seq_empty_p (internal_post))
16013 {
16014 gimplify_seq_add_seq (&internal_pre, internal_post);
16015 gimplify_seq_add_seq (pre_p, internal_pre);
16016 }
16017
16018 /* The result of gimplifying *EXPR_P is going to be the last few
16019 statements in *PRE_P and *POST_P. Add location information
16020 to all the statements that were added by the gimplification
16021 helpers. */
16022 if (!gimple_seq_empty_p (*pre_p))
16023 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
16024
16025 if (!gimple_seq_empty_p (*post_p))
16026 annotate_all_with_location_after (*post_p, post_last_gsi,
16027 input_location);
16028
16029 goto out;
16030 }
16031
16032 #ifdef ENABLE_GIMPLE_CHECKING
16033 if (*expr_p)
16034 {
16035 enum tree_code code = TREE_CODE (*expr_p);
16036 /* These expressions should already be in gimple IR form. */
16037 gcc_assert (code != MODIFY_EXPR
16038 && code != ASM_EXPR
16039 && code != BIND_EXPR
16040 && code != CATCH_EXPR
16041 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
16042 && code != EH_FILTER_EXPR
16043 && code != GOTO_EXPR
16044 && code != LABEL_EXPR
16045 && code != LOOP_EXPR
16046 && code != SWITCH_EXPR
16047 && code != TRY_FINALLY_EXPR
16048 && code != EH_ELSE_EXPR
16049 && code != OACC_PARALLEL
16050 && code != OACC_KERNELS
16051 && code != OACC_SERIAL
16052 && code != OACC_DATA
16053 && code != OACC_HOST_DATA
16054 && code != OACC_DECLARE
16055 && code != OACC_UPDATE
16056 && code != OACC_ENTER_DATA
16057 && code != OACC_EXIT_DATA
16058 && code != OACC_CACHE
16059 && code != OMP_CRITICAL
16060 && code != OMP_FOR
16061 && code != OACC_LOOP
16062 && code != OMP_MASTER
16063 && code != OMP_MASKED
16064 && code != OMP_TASKGROUP
16065 && code != OMP_ORDERED
16066 && code != OMP_PARALLEL
16067 && code != OMP_SCAN
16068 && code != OMP_SECTIONS
16069 && code != OMP_SECTION
16070 && code != OMP_SINGLE
16071 && code != OMP_SCOPE);
16072 }
16073 #endif
16074
16075 /* Otherwise we're gimplifying a subexpression, so the resulting
16076 value is interesting. If it's a valid operand that matches
16077 GIMPLE_TEST_F, we're done. Unless we are handling some
16078 post-effects internally; if that's the case, we need to copy into
16079 a temporary before adding the post-effects to POST_P. */
16080 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
16081 goto out;
16082
16083 /* Otherwise, we need to create a new temporary for the gimplified
16084 expression. */
16085
16086 /* We can't return an lvalue if we have an internal postqueue. The
16087 object the lvalue refers to would (probably) be modified by the
16088 postqueue; we need to copy the value out first, which means an
16089 rvalue. */
16090 if ((fallback & fb_lvalue)
16091 && gimple_seq_empty_p (internal_post)
16092 && is_gimple_addressable (*expr_p))
16093 {
16094 /* An lvalue will do. Take the address of the expression, store it
16095 in a temporary, and replace the expression with an INDIRECT_REF of
16096 that temporary. */
16097 tree ref_alias_type = reference_alias_ptr_type (*expr_p);
16098 unsigned int ref_align = get_object_alignment (*expr_p);
16099 tree ref_type = TREE_TYPE (*expr_p);
16100 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
16101 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
16102 if (TYPE_ALIGN (ref_type) != ref_align)
16103 ref_type = build_aligned_type (ref_type, ref_align);
16104 *expr_p = build2 (MEM_REF, ref_type,
16105 tmp, build_zero_cst (ref_alias_type));
16106 }
16107 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
16108 {
16109 /* An rvalue will do. Assign the gimplified expression into a
16110 new temporary TMP and replace the original expression with
16111 TMP. First, make sure that the expression has a type so that
16112 it can be assigned into a temporary. */
16113 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
16114 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
16115 }
16116 else
16117 {
16118 #ifdef ENABLE_GIMPLE_CHECKING
16119 if (!(fallback & fb_mayfail))
16120 {
16121 fprintf (stderr, "gimplification failed:\n");
16122 print_generic_expr (stderr, *expr_p);
16123 debug_tree (*expr_p);
16124 internal_error ("gimplification failed");
16125 }
16126 #endif
16127 gcc_assert (fallback & fb_mayfail);
16128
16129 /* If this is an asm statement, and the user asked for the
16130 impossible, don't die. Fail and let gimplify_asm_expr
16131 issue an error. */
16132 ret = GS_ERROR;
16133 goto out;
16134 }
16135
16136 /* Make sure the temporary matches our predicate. */
16137 gcc_assert ((*gimple_test_f) (*expr_p));
16138
16139 if (!gimple_seq_empty_p (internal_post))
16140 {
16141 annotate_all_with_location (internal_post, input_location);
16142 gimplify_seq_add_seq (pre_p, internal_post);
16143 }
16144
16145 out:
16146 input_location = saved_location;
16147 return ret;
16148 }
16149
16150 /* Like gimplify_expr but make sure the gimplified result is not itself
16151 a SSA name (but a decl if it were). Temporaries required by
16152 evaluating *EXPR_P may be still SSA names. */
16153
16154 static enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback,bool allow_ssa)16155 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
16156 bool (*gimple_test_f) (tree), fallback_t fallback,
16157 bool allow_ssa)
16158 {
16159 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
16160 gimple_test_f, fallback);
16161 if (! allow_ssa
16162 && TREE_CODE (*expr_p) == SSA_NAME)
16163 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
16164 return ret;
16165 }
16166
16167 /* Look through TYPE for variable-sized objects and gimplify each such
16168 size that we find. Add to LIST_P any statements generated. */
16169
16170 void
gimplify_type_sizes(tree type,gimple_seq * list_p)16171 gimplify_type_sizes (tree type, gimple_seq *list_p)
16172 {
16173 if (type == NULL || type == error_mark_node)
16174 return;
16175
16176 const bool ignored_p
16177 = TYPE_NAME (type)
16178 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
16179 && DECL_IGNORED_P (TYPE_NAME (type));
16180 tree t;
16181
16182 /* We first do the main variant, then copy into any other variants. */
16183 type = TYPE_MAIN_VARIANT (type);
16184
16185 /* Avoid infinite recursion. */
16186 if (TYPE_SIZES_GIMPLIFIED (type))
16187 return;
16188
16189 TYPE_SIZES_GIMPLIFIED (type) = 1;
16190
16191 switch (TREE_CODE (type))
16192 {
16193 case INTEGER_TYPE:
16194 case ENUMERAL_TYPE:
16195 case BOOLEAN_TYPE:
16196 case REAL_TYPE:
16197 case FIXED_POINT_TYPE:
16198 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
16199 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
16200
16201 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
16202 {
16203 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
16204 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
16205 }
16206 break;
16207
16208 case ARRAY_TYPE:
16209 /* These types may not have declarations, so handle them here. */
16210 gimplify_type_sizes (TREE_TYPE (type), list_p);
16211 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
16212 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
16213 with assigned stack slots, for -O1+ -g they should be tracked
16214 by VTA. */
16215 if (!ignored_p
16216 && TYPE_DOMAIN (type)
16217 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
16218 {
16219 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
16220 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
16221 DECL_IGNORED_P (t) = 0;
16222 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
16223 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
16224 DECL_IGNORED_P (t) = 0;
16225 }
16226 break;
16227
16228 case RECORD_TYPE:
16229 case UNION_TYPE:
16230 case QUAL_UNION_TYPE:
16231 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
16232 if (TREE_CODE (field) == FIELD_DECL)
16233 {
16234 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
16235 /* Likewise, ensure variable offsets aren't removed. */
16236 if (!ignored_p
16237 && (t = DECL_FIELD_OFFSET (field))
16238 && VAR_P (t)
16239 && DECL_ARTIFICIAL (t))
16240 DECL_IGNORED_P (t) = 0;
16241 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
16242 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
16243 gimplify_type_sizes (TREE_TYPE (field), list_p);
16244 }
16245 break;
16246
16247 case POINTER_TYPE:
16248 case REFERENCE_TYPE:
16249 /* We used to recurse on the pointed-to type here, which turned out to
16250 be incorrect because its definition might refer to variables not
16251 yet initialized at this point if a forward declaration is involved.
16252
16253 It was actually useful for anonymous pointed-to types to ensure
16254 that the sizes evaluation dominates every possible later use of the
16255 values. Restricting to such types here would be safe since there
16256 is no possible forward declaration around, but would introduce an
16257 undesirable middle-end semantic to anonymity. We then defer to
16258 front-ends the responsibility of ensuring that the sizes are
16259 evaluated both early and late enough, e.g. by attaching artificial
16260 type declarations to the tree. */
16261 break;
16262
16263 default:
16264 break;
16265 }
16266
16267 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
16268 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
16269
16270 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
16271 {
16272 TYPE_SIZE (t) = TYPE_SIZE (type);
16273 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
16274 TYPE_SIZES_GIMPLIFIED (t) = 1;
16275 }
16276 }
16277
16278 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
16279 a size or position, has had all of its SAVE_EXPRs evaluated.
16280 We add any required statements to *STMT_P. */
16281
16282 void
gimplify_one_sizepos(tree * expr_p,gimple_seq * stmt_p)16283 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
16284 {
16285 tree expr = *expr_p;
16286
16287 /* We don't do anything if the value isn't there, is constant, or contains
16288 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
16289 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
16290 will want to replace it with a new variable, but that will cause problems
16291 if this type is from outside the function. It's OK to have that here. */
16292 if (expr == NULL_TREE
16293 || is_gimple_constant (expr)
16294 || TREE_CODE (expr) == VAR_DECL
16295 || CONTAINS_PLACEHOLDER_P (expr))
16296 return;
16297
16298 *expr_p = unshare_expr (expr);
16299
16300 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
16301 if the def vanishes. */
16302 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
16303
16304 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
16305 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
16306 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
16307 if (is_gimple_constant (*expr_p))
16308 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
16309 }
16310
16311 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
16312 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
16313 is true, also gimplify the parameters. */
16314
16315 gbind *
gimplify_body(tree fndecl,bool do_parms)16316 gimplify_body (tree fndecl, bool do_parms)
16317 {
16318 location_t saved_location = input_location;
16319 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
16320 gimple *outer_stmt;
16321 gbind *outer_bind;
16322
16323 timevar_push (TV_TREE_GIMPLIFY);
16324
16325 init_tree_ssa (cfun);
16326
16327 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
16328 gimplification. */
16329 default_rtl_profile ();
16330
16331 gcc_assert (gimplify_ctxp == NULL);
16332 push_gimplify_context (true);
16333
16334 if (flag_openacc || flag_openmp)
16335 {
16336 gcc_assert (gimplify_omp_ctxp == NULL);
16337 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
16338 gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
16339 }
16340
16341 /* Unshare most shared trees in the body and in that of any nested functions.
16342 It would seem we don't have to do this for nested functions because
16343 they are supposed to be output and then the outer function gimplified
16344 first, but the g++ front end doesn't always do it that way. */
16345 unshare_body (fndecl);
16346 unvisit_body (fndecl);
16347
16348 /* Make sure input_location isn't set to something weird. */
16349 input_location = DECL_SOURCE_LOCATION (fndecl);
16350
16351 /* Resolve callee-copies. This has to be done before processing
16352 the body so that DECL_VALUE_EXPR gets processed correctly. */
16353 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
16354
16355 /* Gimplify the function's body. */
16356 seq = NULL;
16357 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
16358 outer_stmt = gimple_seq_first_nondebug_stmt (seq);
16359 if (!outer_stmt)
16360 {
16361 outer_stmt = gimple_build_nop ();
16362 gimplify_seq_add_stmt (&seq, outer_stmt);
16363 }
16364
16365 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
16366 not the case, wrap everything in a GIMPLE_BIND to make it so. */
16367 if (gimple_code (outer_stmt) == GIMPLE_BIND
16368 && (gimple_seq_first_nondebug_stmt (seq)
16369 == gimple_seq_last_nondebug_stmt (seq)))
16370 {
16371 outer_bind = as_a <gbind *> (outer_stmt);
16372 if (gimple_seq_first_stmt (seq) != outer_stmt
16373 || gimple_seq_last_stmt (seq) != outer_stmt)
16374 {
16375 /* If there are debug stmts before or after outer_stmt, move them
16376 inside of outer_bind body. */
16377 gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
16378 gimple_seq second_seq = NULL;
16379 if (gimple_seq_first_stmt (seq) != outer_stmt
16380 && gimple_seq_last_stmt (seq) != outer_stmt)
16381 {
16382 second_seq = gsi_split_seq_after (gsi);
16383 gsi_remove (&gsi, false);
16384 }
16385 else if (gimple_seq_first_stmt (seq) != outer_stmt)
16386 gsi_remove (&gsi, false);
16387 else
16388 {
16389 gsi_remove (&gsi, false);
16390 second_seq = seq;
16391 seq = NULL;
16392 }
16393 gimple_seq_add_seq_without_update (&seq,
16394 gimple_bind_body (outer_bind));
16395 gimple_seq_add_seq_without_update (&seq, second_seq);
16396 gimple_bind_set_body (outer_bind, seq);
16397 }
16398 }
16399 else
16400 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
16401
16402 DECL_SAVED_TREE (fndecl) = NULL_TREE;
16403
16404 /* If we had callee-copies statements, insert them at the beginning
16405 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
16406 if (!gimple_seq_empty_p (parm_stmts))
16407 {
16408 tree parm;
16409
16410 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
16411 if (parm_cleanup)
16412 {
16413 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
16414 GIMPLE_TRY_FINALLY);
16415 parm_stmts = NULL;
16416 gimple_seq_add_stmt (&parm_stmts, g);
16417 }
16418 gimple_bind_set_body (outer_bind, parm_stmts);
16419
16420 for (parm = DECL_ARGUMENTS (current_function_decl);
16421 parm; parm = DECL_CHAIN (parm))
16422 if (DECL_HAS_VALUE_EXPR_P (parm))
16423 {
16424 DECL_HAS_VALUE_EXPR_P (parm) = 0;
16425 DECL_IGNORED_P (parm) = 0;
16426 }
16427 }
16428
16429 if ((flag_openacc || flag_openmp || flag_openmp_simd)
16430 && gimplify_omp_ctxp)
16431 {
16432 delete_omp_context (gimplify_omp_ctxp);
16433 gimplify_omp_ctxp = NULL;
16434 }
16435
16436 pop_gimplify_context (outer_bind);
16437 gcc_assert (gimplify_ctxp == NULL);
16438
16439 if (flag_checking && !seen_error ())
16440 verify_gimple_in_seq (gimple_bind_body (outer_bind));
16441
16442 timevar_pop (TV_TREE_GIMPLIFY);
16443 input_location = saved_location;
16444
16445 return outer_bind;
16446 }
16447
16448 typedef char *char_p; /* For DEF_VEC_P. */
16449
16450 /* Return whether we should exclude FNDECL from instrumentation. */
16451
16452 static bool
flag_instrument_functions_exclude_p(tree fndecl)16453 flag_instrument_functions_exclude_p (tree fndecl)
16454 {
16455 vec<char_p> *v;
16456
16457 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
16458 if (v && v->length () > 0)
16459 {
16460 const char *name;
16461 int i;
16462 char *s;
16463
16464 name = lang_hooks.decl_printable_name (fndecl, 1);
16465 FOR_EACH_VEC_ELT (*v, i, s)
16466 if (strstr (name, s) != NULL)
16467 return true;
16468 }
16469
16470 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
16471 if (v && v->length () > 0)
16472 {
16473 const char *name;
16474 int i;
16475 char *s;
16476
16477 name = DECL_SOURCE_FILE (fndecl);
16478 FOR_EACH_VEC_ELT (*v, i, s)
16479 if (strstr (name, s) != NULL)
16480 return true;
16481 }
16482
16483 return false;
16484 }
16485
16486 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
16487 node for the function we want to gimplify.
16488
16489 Return the sequence of GIMPLE statements corresponding to the body
16490 of FNDECL. */
16491
16492 void
gimplify_function_tree(tree fndecl)16493 gimplify_function_tree (tree fndecl)
16494 {
16495 gimple_seq seq;
16496 gbind *bind;
16497
16498 gcc_assert (!gimple_body (fndecl));
16499
16500 if (DECL_STRUCT_FUNCTION (fndecl))
16501 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
16502 else
16503 push_struct_function (fndecl);
16504
16505 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
16506 if necessary. */
16507 cfun->curr_properties |= PROP_gimple_lva;
16508
16509 if (asan_sanitize_use_after_scope ())
16510 asan_poisoned_variables = new hash_set<tree> ();
16511 bind = gimplify_body (fndecl, true);
16512 if (asan_poisoned_variables)
16513 {
16514 delete asan_poisoned_variables;
16515 asan_poisoned_variables = NULL;
16516 }
16517
16518 /* The tree body of the function is no longer needed, replace it
16519 with the new GIMPLE body. */
16520 seq = NULL;
16521 gimple_seq_add_stmt (&seq, bind);
16522 gimple_set_body (fndecl, seq);
16523
16524 /* If we're instrumenting function entry/exit, then prepend the call to
16525 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
16526 catch the exit hook. */
16527 /* ??? Add some way to ignore exceptions for this TFE. */
16528 if (flag_instrument_function_entry_exit
16529 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
16530 /* Do not instrument extern inline functions. */
16531 && !(DECL_DECLARED_INLINE_P (fndecl)
16532 && DECL_EXTERNAL (fndecl)
16533 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
16534 && !flag_instrument_functions_exclude_p (fndecl))
16535 {
16536 tree x;
16537 gbind *new_bind;
16538 gimple *tf;
16539 gimple_seq cleanup = NULL, body = NULL;
16540 tree tmp_var, this_fn_addr;
16541 gcall *call;
16542
16543 /* The instrumentation hooks aren't going to call the instrumented
16544 function and the address they receive is expected to be matchable
16545 against symbol addresses. Make sure we don't create a trampoline,
16546 in case the current function is nested. */
16547 this_fn_addr = build_fold_addr_expr (current_function_decl);
16548 TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
16549
16550 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
16551 call = gimple_build_call (x, 1, integer_zero_node);
16552 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
16553 gimple_call_set_lhs (call, tmp_var);
16554 gimplify_seq_add_stmt (&cleanup, call);
16555 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
16556 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
16557 gimplify_seq_add_stmt (&cleanup, call);
16558 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
16559
16560 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
16561 call = gimple_build_call (x, 1, integer_zero_node);
16562 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
16563 gimple_call_set_lhs (call, tmp_var);
16564 gimplify_seq_add_stmt (&body, call);
16565 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
16566 call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
16567 gimplify_seq_add_stmt (&body, call);
16568 gimplify_seq_add_stmt (&body, tf);
16569 new_bind = gimple_build_bind (NULL, body, NULL);
16570
16571 /* Replace the current function body with the body
16572 wrapped in the try/finally TF. */
16573 seq = NULL;
16574 gimple_seq_add_stmt (&seq, new_bind);
16575 gimple_set_body (fndecl, seq);
16576 bind = new_bind;
16577 }
16578
16579 if (sanitize_flags_p (SANITIZE_THREAD)
16580 && param_tsan_instrument_func_entry_exit)
16581 {
16582 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
16583 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
16584 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
16585 /* Replace the current function body with the body
16586 wrapped in the try/finally TF. */
16587 seq = NULL;
16588 gimple_seq_add_stmt (&seq, new_bind);
16589 gimple_set_body (fndecl, seq);
16590 }
16591
16592 DECL_SAVED_TREE (fndecl) = NULL_TREE;
16593 cfun->curr_properties |= PROP_gimple_any;
16594
16595 pop_cfun ();
16596
16597 dump_function (TDI_gimple, fndecl);
16598 }
16599
16600 /* Return a dummy expression of type TYPE in order to keep going after an
16601 error. */
16602
16603 static tree
dummy_object(tree type)16604 dummy_object (tree type)
16605 {
16606 tree t = build_int_cst (build_pointer_type (type), 0);
16607 return build2 (MEM_REF, type, t, t);
16608 }
16609
16610 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
16611 builtin function, but a very special sort of operator. */
16612
16613 enum gimplify_status
gimplify_va_arg_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)16614 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
16615 gimple_seq *post_p ATTRIBUTE_UNUSED)
16616 {
16617 tree promoted_type, have_va_type;
16618 tree valist = TREE_OPERAND (*expr_p, 0);
16619 tree type = TREE_TYPE (*expr_p);
16620 tree t, tag, aptag;
16621 location_t loc = EXPR_LOCATION (*expr_p);
16622
16623 /* Verify that valist is of the proper type. */
16624 have_va_type = TREE_TYPE (valist);
16625 if (have_va_type == error_mark_node)
16626 return GS_ERROR;
16627 have_va_type = targetm.canonical_va_list_type (have_va_type);
16628 if (have_va_type == NULL_TREE
16629 && POINTER_TYPE_P (TREE_TYPE (valist)))
16630 /* Handle 'Case 1: Not an array type' from c-common.cc/build_va_arg. */
16631 have_va_type
16632 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
16633 gcc_assert (have_va_type != NULL_TREE);
16634
16635 /* Generate a diagnostic for requesting data of a type that cannot
16636 be passed through `...' due to type promotion at the call site. */
16637 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
16638 != type)
16639 {
16640 static bool gave_help;
16641 bool warned;
16642 /* Use the expansion point to handle cases such as passing bool (defined
16643 in a system header) through `...'. */
16644 location_t xloc
16645 = expansion_point_location_if_in_system_header (loc);
16646
16647 /* Unfortunately, this is merely undefined, rather than a constraint
16648 violation, so we cannot make this an error. If this call is never
16649 executed, the program is still strictly conforming. */
16650 auto_diagnostic_group d;
16651 warned = warning_at (xloc, 0,
16652 "%qT is promoted to %qT when passed through %<...%>",
16653 type, promoted_type);
16654 if (!gave_help && warned)
16655 {
16656 gave_help = true;
16657 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
16658 promoted_type, type);
16659 }
16660
16661 /* We can, however, treat "undefined" any way we please.
16662 Call abort to encourage the user to fix the program. */
16663 if (warned)
16664 inform (xloc, "if this code is reached, the program will abort");
16665 /* Before the abort, allow the evaluation of the va_list
16666 expression to exit or longjmp. */
16667 gimplify_and_add (valist, pre_p);
16668 t = build_call_expr_loc (loc,
16669 builtin_decl_implicit (BUILT_IN_TRAP), 0);
16670 gimplify_and_add (t, pre_p);
16671
16672 /* This is dead code, but go ahead and finish so that the
16673 mode of the result comes out right. */
16674 *expr_p = dummy_object (type);
16675 return GS_ALL_DONE;
16676 }
16677
16678 tag = build_int_cst (build_pointer_type (type), 0);
16679 aptag = build_int_cst (TREE_TYPE (valist), 0);
16680
16681 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
16682 valist, tag, aptag);
16683
16684 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
16685 needs to be expanded. */
16686 cfun->curr_properties &= ~PROP_gimple_lva;
16687
16688 return GS_OK;
16689 }
16690
16691 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
16692
16693 DST/SRC are the destination and source respectively. You can pass
16694 ungimplified trees in DST or SRC, in which case they will be
16695 converted to a gimple operand if necessary.
16696
16697 This function returns the newly created GIMPLE_ASSIGN tuple. */
16698
16699 gimple *
gimplify_assign(tree dst,tree src,gimple_seq * seq_p)16700 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
16701 {
16702 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
16703 gimplify_and_add (t, seq_p);
16704 ggc_free (t);
16705 return gimple_seq_last_stmt (*seq_p);
16706 }
16707
16708 inline hashval_t
hash(const elt_t * p)16709 gimplify_hasher::hash (const elt_t *p)
16710 {
16711 tree t = p->val;
16712 return iterative_hash_expr (t, 0);
16713 }
16714
16715 inline bool
equal(const elt_t * p1,const elt_t * p2)16716 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
16717 {
16718 tree t1 = p1->val;
16719 tree t2 = p2->val;
16720 enum tree_code code = TREE_CODE (t1);
16721
16722 if (TREE_CODE (t2) != code
16723 || TREE_TYPE (t1) != TREE_TYPE (t2))
16724 return false;
16725
16726 if (!operand_equal_p (t1, t2, 0))
16727 return false;
16728
16729 /* Only allow them to compare equal if they also hash equal; otherwise
16730 results are nondeterminate, and we fail bootstrap comparison. */
16731 gcc_checking_assert (hash (p1) == hash (p2));
16732
16733 return true;
16734 }
16735