1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2022 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
63
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65 phases. The first phase scans the function looking for OMP statements
66 and then for variables that must be replaced to satisfy data sharing
67 clauses. The second phase expands code for the constructs, as well as
68 re-gimplifying things when variables have been replaced with complex
69 expressions.
70
71 Final code generation is done by pass_expand_omp. The flowgraph is
72 scanned for regions which are then moved to a new
73 function, to be invoked by the thread library, or offloaded. */
74
75 /* Context structure. Used to store information about each parallel
76 directive in the code. */
77
78 struct omp_context
79 {
80 /* This field must be at the beginning, as we do "inheritance": Some
81 callback functions for tree-inline.cc (e.g., omp_copy_decl)
82 receive a copy_body_data pointer that is up-casted to an
83 omp_context pointer. */
84 copy_body_data cb;
85
86 /* The tree of contexts corresponding to the encountered constructs. */
87 struct omp_context *outer;
88 gimple *stmt;
89
90 /* Map variables to fields in a structure that allows communication
91 between sending and receiving threads. */
92 splay_tree field_map;
93 tree record_type;
94 tree sender_decl;
95 tree receiver_decl;
96
97 /* These are used just by task contexts, if task firstprivate fn is
98 needed. srecord_type is used to communicate from the thread
99 that encountered the task construct to task firstprivate fn,
100 record_type is allocated by GOMP_task, initialized by task firstprivate
101 fn and passed to the task body fn. */
102 splay_tree sfield_map;
103 tree srecord_type;
104
105 /* A chain of variables to add to the top-level block surrounding the
106 construct. In the case of a parallel, this is in the child function. */
107 tree block_vars;
108
109 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110 barriers should jump to during omplower pass. */
111 tree cancel_label;
112
113 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114 otherwise. */
115 gimple *simt_stmt;
116
117 /* For task reductions registered in this context, a vector containing
118 the length of the private copies block (if constant, otherwise NULL)
119 and then offsets (if constant, otherwise NULL) for each entry. */
120 vec<tree> task_reductions;
121
122 /* A hash map from the reduction clauses to the registered array
123 elts. */
124 hash_map<tree, unsigned> *task_reduction_map;
125
126 /* And a hash map from the lastprivate(conditional:) variables to their
127 corresponding tracking loop iteration variables. */
128 hash_map<tree, tree> *lastprivate_conditional_map;
129
130 /* And a hash map from the allocate variables to their corresponding
131 allocators. */
132 hash_map<tree, tree> *allocate_map;
133
134 /* A tree_list of the reduction clauses in this context. This is
135 only used for checking the consistency of OpenACC reduction
136 clauses in scan_omp_for and is not guaranteed to contain a valid
137 value outside of this function. */
138 tree local_reduction_clauses;
139
140 /* A tree_list of the reduction clauses in outer contexts. This is
141 only used for checking the consistency of OpenACC reduction
142 clauses in scan_omp_for and is not guaranteed to contain a valid
143 value outside of this function. */
144 tree outer_reduction_clauses;
145
146 /* Nesting depth of this context. Used to beautify error messages re
147 invalid gotos. The outermost ctx is depth 1, with depth 0 being
148 reserved for the main body of the function. */
149 int depth;
150
151 /* True if this parallel directive is nested within another. */
152 bool is_nested;
153
154 /* True if this construct can be cancelled. */
155 bool cancellable;
156
157 /* True if lower_omp_1 should look up lastprivate conditional in parent
158 context. */
159 bool combined_into_simd_safelen1;
160
161 /* True if there is nested scan context with inclusive clause. */
162 bool scan_inclusive;
163
164 /* True if there is nested scan context with exclusive clause. */
165 bool scan_exclusive;
166
167 /* True in the second simd loop of for simd with inscan reductions. */
168 bool for_simd_scan_phase;
169
170 /* True if there is order(concurrent) clause on the construct. */
171 bool order_concurrent;
172
173 /* True if there is bind clause on the construct (i.e. a loop construct). */
174 bool loop_p;
175
176 /* Only used for omp target contexts. True if a teams construct is
177 strictly nested in it. */
178 bool teams_nested_p;
179
180 /* Only used for omp target contexts. True if an OpenMP construct other
181 than teams is strictly nested in it. */
182 bool nonteams_nested_p;
183
184 /* Candidates for adjusting OpenACC privatization level. */
185 vec<tree> oacc_privatization_candidates;
186 };
187
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap make_addressable_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 static vec<gomp_task *> task_cpyfns;
195
196 static void scan_omp (gimple_seq *, omp_context *);
197 static tree scan_omp_1_op (tree *, int *, void *);
198
199 #define WALK_SUBSTMTS \
200 case GIMPLE_BIND: \
201 case GIMPLE_TRY: \
202 case GIMPLE_CATCH: \
203 case GIMPLE_EH_FILTER: \
204 case GIMPLE_TRANSACTION: \
205 /* The sub-statements for these should be walked. */ \
206 *handled_ops_p = false; \
207 break;
208
209 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
210 (This doesn't include OpenACC 'kernels' decomposed parts.) */
211
212 static bool
is_oacc_parallel_or_serial(omp_context * ctx)213 is_oacc_parallel_or_serial (omp_context *ctx)
214 {
215 enum gimple_code outer_type = gimple_code (ctx->stmt);
216 return ((outer_type == GIMPLE_OMP_TARGET)
217 && ((gimple_omp_target_kind (ctx->stmt)
218 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
219 || (gimple_omp_target_kind (ctx->stmt)
220 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
221 }
222
223 /* Return whether CTX represents an OpenACC 'kernels' construct.
224 (This doesn't include OpenACC 'kernels' decomposed parts.) */
225
226 static bool
is_oacc_kernels(omp_context * ctx)227 is_oacc_kernels (omp_context *ctx)
228 {
229 enum gimple_code outer_type = gimple_code (ctx->stmt);
230 return ((outer_type == GIMPLE_OMP_TARGET)
231 && (gimple_omp_target_kind (ctx->stmt)
232 == GF_OMP_TARGET_KIND_OACC_KERNELS));
233 }
234
235 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
236
237 static bool
is_oacc_kernels_decomposed_part(omp_context * ctx)238 is_oacc_kernels_decomposed_part (omp_context *ctx)
239 {
240 enum gimple_code outer_type = gimple_code (ctx->stmt);
241 return ((outer_type == GIMPLE_OMP_TARGET)
242 && ((gimple_omp_target_kind (ctx->stmt)
243 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
244 || (gimple_omp_target_kind (ctx->stmt)
245 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
246 || (gimple_omp_target_kind (ctx->stmt)
247 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
248 }
249
250 /* Return true if STMT corresponds to an OpenMP target region. */
251 static bool
is_omp_target(gimple * stmt)252 is_omp_target (gimple *stmt)
253 {
254 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
255 {
256 int kind = gimple_omp_target_kind (stmt);
257 return (kind == GF_OMP_TARGET_KIND_REGION
258 || kind == GF_OMP_TARGET_KIND_DATA
259 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
260 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
261 }
262 return false;
263 }
264
265 /* If DECL is the artificial dummy VAR_DECL created for non-static
266 data member privatization, return the underlying "this" parameter,
267 otherwise return NULL. */
268
269 tree
omp_member_access_dummy_var(tree decl)270 omp_member_access_dummy_var (tree decl)
271 {
272 if (!VAR_P (decl)
273 || !DECL_ARTIFICIAL (decl)
274 || !DECL_IGNORED_P (decl)
275 || !DECL_HAS_VALUE_EXPR_P (decl)
276 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
277 return NULL_TREE;
278
279 tree v = DECL_VALUE_EXPR (decl);
280 if (TREE_CODE (v) != COMPONENT_REF)
281 return NULL_TREE;
282
283 while (1)
284 switch (TREE_CODE (v))
285 {
286 case COMPONENT_REF:
287 case MEM_REF:
288 case INDIRECT_REF:
289 CASE_CONVERT:
290 case POINTER_PLUS_EXPR:
291 v = TREE_OPERAND (v, 0);
292 continue;
293 case PARM_DECL:
294 if (DECL_CONTEXT (v) == current_function_decl
295 && DECL_ARTIFICIAL (v)
296 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
297 return v;
298 return NULL_TREE;
299 default:
300 return NULL_TREE;
301 }
302 }
303
304 /* Helper for unshare_and_remap, called through walk_tree. */
305
306 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)307 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
308 {
309 tree *pair = (tree *) data;
310 if (*tp == pair[0])
311 {
312 *tp = unshare_expr (pair[1]);
313 *walk_subtrees = 0;
314 }
315 else if (IS_TYPE_OR_DECL_P (*tp))
316 *walk_subtrees = 0;
317 return NULL_TREE;
318 }
319
320 /* Return unshare_expr (X) with all occurrences of FROM
321 replaced with TO. */
322
323 static tree
unshare_and_remap(tree x,tree from,tree to)324 unshare_and_remap (tree x, tree from, tree to)
325 {
326 tree pair[2] = { from, to };
327 x = unshare_expr (x);
328 walk_tree (&x, unshare_and_remap_1, pair, NULL);
329 return x;
330 }
331
332 /* Convenience function for calling scan_omp_1_op on tree operands. */
333
334 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)335 scan_omp_op (tree *tp, omp_context *ctx)
336 {
337 struct walk_stmt_info wi;
338
339 memset (&wi, 0, sizeof (wi));
340 wi.info = ctx;
341 wi.want_locations = true;
342
343 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
344 }
345
346 static void lower_omp (gimple_seq *, omp_context *);
347 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
348 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
349
350 /* Return true if CTX is for an omp parallel. */
351
352 static inline bool
is_parallel_ctx(omp_context * ctx)353 is_parallel_ctx (omp_context *ctx)
354 {
355 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
356 }
357
358
359 /* Return true if CTX is for an omp task. */
360
361 static inline bool
is_task_ctx(omp_context * ctx)362 is_task_ctx (omp_context *ctx)
363 {
364 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
365 }
366
367
368 /* Return true if CTX is for an omp taskloop. */
369
370 static inline bool
is_taskloop_ctx(omp_context * ctx)371 is_taskloop_ctx (omp_context *ctx)
372 {
373 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
374 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
375 }
376
377
378 /* Return true if CTX is for a host omp teams. */
379
380 static inline bool
is_host_teams_ctx(omp_context * ctx)381 is_host_teams_ctx (omp_context *ctx)
382 {
383 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
384 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
385 }
386
387 /* Return true if CTX is for an omp parallel or omp task or host omp teams
388 (the last one is strictly not a task region in OpenMP speak, but we
389 need to treat it similarly). */
390
391 static inline bool
is_taskreg_ctx(omp_context * ctx)392 is_taskreg_ctx (omp_context *ctx)
393 {
394 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
395 }
396
397 /* Return true if EXPR is variable sized. */
398
399 static inline bool
is_variable_sized(const_tree expr)400 is_variable_sized (const_tree expr)
401 {
402 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
403 }
404
405 /* Lookup variables. The "maybe" form
406 allows for the variable form to not have been entered, otherwise we
407 assert that the variable must have been entered. */
408
409 static inline tree
lookup_decl(tree var,omp_context * ctx)410 lookup_decl (tree var, omp_context *ctx)
411 {
412 tree *n = ctx->cb.decl_map->get (var);
413 return *n;
414 }
415
416 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)417 maybe_lookup_decl (const_tree var, omp_context *ctx)
418 {
419 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
420 return n ? *n : NULL_TREE;
421 }
422
423 static inline tree
lookup_field(tree var,omp_context * ctx)424 lookup_field (tree var, omp_context *ctx)
425 {
426 splay_tree_node n;
427 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
428 return (tree) n->value;
429 }
430
431 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)432 lookup_sfield (splay_tree_key key, omp_context *ctx)
433 {
434 splay_tree_node n;
435 n = splay_tree_lookup (ctx->sfield_map
436 ? ctx->sfield_map : ctx->field_map, key);
437 return (tree) n->value;
438 }
439
440 static inline tree
lookup_sfield(tree var,omp_context * ctx)441 lookup_sfield (tree var, omp_context *ctx)
442 {
443 return lookup_sfield ((splay_tree_key) var, ctx);
444 }
445
446 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)447 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
448 {
449 splay_tree_node n;
450 n = splay_tree_lookup (ctx->field_map, key);
451 return n ? (tree) n->value : NULL_TREE;
452 }
453
454 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)455 maybe_lookup_field (tree var, omp_context *ctx)
456 {
457 return maybe_lookup_field ((splay_tree_key) var, ctx);
458 }
459
460 /* Return true if DECL should be copied by pointer. SHARED_CTX is
461 the parallel context if DECL is to be shared. */
462
463 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)464 use_pointer_for_field (tree decl, omp_context *shared_ctx)
465 {
466 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
467 || TYPE_ATOMIC (TREE_TYPE (decl)))
468 return true;
469
470 /* We can only use copy-in/copy-out semantics for shared variables
471 when we know the value is not accessible from an outer scope. */
472 if (shared_ctx)
473 {
474 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
475
476 /* ??? Trivially accessible from anywhere. But why would we even
477 be passing an address in this case? Should we simply assert
478 this to be false, or should we have a cleanup pass that removes
479 these from the list of mappings? */
480 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
481 return true;
482
483 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
484 without analyzing the expression whether or not its location
485 is accessible to anyone else. In the case of nested parallel
486 regions it certainly may be. */
487 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
488 return true;
489
490 /* Do not use copy-in/copy-out for variables that have their
491 address taken. */
492 if (is_global_var (decl))
493 {
494 /* For file scope vars, track whether we've seen them as
495 non-addressable initially and in that case, keep the same
496 answer for the duration of the pass, even when they are made
497 addressable later on e.g. through reduction expansion. Global
498 variables which weren't addressable before the pass will not
499 have their privatized copies address taken. See PR91216. */
500 if (!TREE_ADDRESSABLE (decl))
501 {
502 if (!global_nonaddressable_vars)
503 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
504 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
505 }
506 else if (!global_nonaddressable_vars
507 || !bitmap_bit_p (global_nonaddressable_vars,
508 DECL_UID (decl)))
509 return true;
510 }
511 else if (TREE_ADDRESSABLE (decl))
512 return true;
513
514 /* lower_send_shared_vars only uses copy-in, but not copy-out
515 for these. */
516 if (TREE_READONLY (decl)
517 || ((TREE_CODE (decl) == RESULT_DECL
518 || TREE_CODE (decl) == PARM_DECL)
519 && DECL_BY_REFERENCE (decl)))
520 return false;
521
522 /* Disallow copy-in/out in nested parallel if
523 decl is shared in outer parallel, otherwise
524 each thread could store the shared variable
525 in its own copy-in location, making the
526 variable no longer really shared. */
527 if (shared_ctx->is_nested)
528 {
529 omp_context *up;
530
531 for (up = shared_ctx->outer; up; up = up->outer)
532 if ((is_taskreg_ctx (up)
533 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
534 && is_gimple_omp_offloaded (up->stmt)))
535 && maybe_lookup_decl (decl, up))
536 break;
537
538 if (up)
539 {
540 tree c;
541
542 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
543 {
544 for (c = gimple_omp_target_clauses (up->stmt);
545 c; c = OMP_CLAUSE_CHAIN (c))
546 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
547 && OMP_CLAUSE_DECL (c) == decl)
548 break;
549 }
550 else
551 for (c = gimple_omp_taskreg_clauses (up->stmt);
552 c; c = OMP_CLAUSE_CHAIN (c))
553 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
554 && OMP_CLAUSE_DECL (c) == decl)
555 break;
556
557 if (c)
558 goto maybe_mark_addressable_and_ret;
559 }
560 }
561
562 /* For tasks avoid using copy-in/out. As tasks can be
563 deferred or executed in different thread, when GOMP_task
564 returns, the task hasn't necessarily terminated. */
565 if (is_task_ctx (shared_ctx))
566 {
567 tree outer;
568 maybe_mark_addressable_and_ret:
569 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
570 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
571 {
572 /* Taking address of OUTER in lower_send_shared_vars
573 might need regimplification of everything that uses the
574 variable. */
575 if (!make_addressable_vars)
576 make_addressable_vars = BITMAP_ALLOC (NULL);
577 bitmap_set_bit (make_addressable_vars, DECL_UID (outer));
578 TREE_ADDRESSABLE (outer) = 1;
579 }
580 return true;
581 }
582 }
583
584 return false;
585 }
586
587 /* Construct a new automatic decl similar to VAR. */
588
589 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)590 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
591 {
592 tree copy = copy_var_decl (var, name, type);
593
594 DECL_CONTEXT (copy) = current_function_decl;
595
596 if (ctx)
597 {
598 DECL_CHAIN (copy) = ctx->block_vars;
599 ctx->block_vars = copy;
600 }
601 else
602 record_vars (copy);
603
604 /* If VAR is listed in make_addressable_vars, it wasn't
605 originally addressable, but was only later made so.
606 We don't need to take address of privatizations
607 from that var. */
608 if (TREE_ADDRESSABLE (var)
609 && ((make_addressable_vars
610 && bitmap_bit_p (make_addressable_vars, DECL_UID (var)))
611 || (global_nonaddressable_vars
612 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
613 TREE_ADDRESSABLE (copy) = 0;
614
615 return copy;
616 }
617
618 static tree
omp_copy_decl_1(tree var,omp_context * ctx)619 omp_copy_decl_1 (tree var, omp_context *ctx)
620 {
621 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
622 }
623
624 /* Build tree nodes to access the field for VAR on the receiver side. */
625
626 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)627 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
628 {
629 tree x, field = lookup_field (var, ctx);
630
631 /* If the receiver record type was remapped in the child function,
632 remap the field into the new record type. */
633 x = maybe_lookup_field (field, ctx);
634 if (x != NULL)
635 field = x;
636
637 x = build_simple_mem_ref (ctx->receiver_decl);
638 TREE_THIS_NOTRAP (x) = 1;
639 x = omp_build_component_ref (x, field);
640 if (by_ref)
641 {
642 x = build_simple_mem_ref (x);
643 TREE_THIS_NOTRAP (x) = 1;
644 }
645
646 return x;
647 }
648
649 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
650 of a parallel, this is a component reference; for workshare constructs
651 this is some variable. */
652
653 static tree
build_outer_var_ref(tree var,omp_context * ctx,enum omp_clause_code code=OMP_CLAUSE_ERROR)654 build_outer_var_ref (tree var, omp_context *ctx,
655 enum omp_clause_code code = OMP_CLAUSE_ERROR)
656 {
657 tree x;
658 omp_context *outer = ctx->outer;
659 for (; outer; outer = outer->outer)
660 {
661 if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
662 continue;
663 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
664 && !maybe_lookup_decl (var, outer))
665 continue;
666 break;
667 }
668
669 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
670 x = var;
671 else if (is_variable_sized (var))
672 {
673 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
674 x = build_outer_var_ref (x, ctx, code);
675 x = build_simple_mem_ref (x);
676 }
677 else if (is_taskreg_ctx (ctx))
678 {
679 bool by_ref = use_pointer_for_field (var, NULL);
680 x = build_receiver_ref (var, by_ref, ctx);
681 }
682 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
683 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
684 || ctx->loop_p
685 || (code == OMP_CLAUSE_PRIVATE
686 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
687 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
688 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
689 {
690 /* #pragma omp simd isn't a worksharing construct, and can reference
691 even private vars in its linear etc. clauses.
692 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
693 to private vars in all worksharing constructs. */
694 x = NULL_TREE;
695 if (outer && is_taskreg_ctx (outer))
696 x = lookup_decl (var, outer);
697 else if (outer)
698 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
699 if (x == NULL_TREE)
700 x = var;
701 }
702 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
703 {
704 gcc_assert (outer);
705 splay_tree_node n
706 = splay_tree_lookup (outer->field_map,
707 (splay_tree_key) &DECL_UID (var));
708 if (n == NULL)
709 {
710 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
711 x = var;
712 else
713 x = lookup_decl (var, outer);
714 }
715 else
716 {
717 tree field = (tree) n->value;
718 /* If the receiver record type was remapped in the child function,
719 remap the field into the new record type. */
720 x = maybe_lookup_field (field, outer);
721 if (x != NULL)
722 field = x;
723
724 x = build_simple_mem_ref (outer->receiver_decl);
725 x = omp_build_component_ref (x, field);
726 if (use_pointer_for_field (var, outer))
727 x = build_simple_mem_ref (x);
728 }
729 }
730 else if (outer)
731 x = lookup_decl (var, outer);
732 else if (omp_privatize_by_reference (var))
733 /* This can happen with orphaned constructs. If var is reference, it is
734 possible it is shared and as such valid. */
735 x = var;
736 else if (omp_member_access_dummy_var (var))
737 x = var;
738 else
739 gcc_unreachable ();
740
741 if (x == var)
742 {
743 tree t = omp_member_access_dummy_var (var);
744 if (t)
745 {
746 x = DECL_VALUE_EXPR (var);
747 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
748 if (o != t)
749 x = unshare_and_remap (x, t, o);
750 else
751 x = unshare_expr (x);
752 }
753 }
754
755 if (omp_privatize_by_reference (var))
756 x = build_simple_mem_ref (x);
757
758 return x;
759 }
760
761 /* Build tree nodes to access the field for VAR on the sender side. */
762
763 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)764 build_sender_ref (splay_tree_key key, omp_context *ctx)
765 {
766 tree field = lookup_sfield (key, ctx);
767 return omp_build_component_ref (ctx->sender_decl, field);
768 }
769
770 static tree
build_sender_ref(tree var,omp_context * ctx)771 build_sender_ref (tree var, omp_context *ctx)
772 {
773 return build_sender_ref ((splay_tree_key) var, ctx);
774 }
775
776 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
777 BASE_POINTERS_RESTRICT, declare the field with restrict. */
778
779 static void
install_var_field(tree var,bool by_ref,int mask,omp_context * ctx)780 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
781 {
782 tree field, type, sfield = NULL_TREE;
783 splay_tree_key key = (splay_tree_key) var;
784
785 if ((mask & 16) != 0)
786 {
787 key = (splay_tree_key) &DECL_NAME (var);
788 gcc_checking_assert (key != (splay_tree_key) var);
789 }
790 if ((mask & 8) != 0)
791 {
792 key = (splay_tree_key) &DECL_UID (var);
793 gcc_checking_assert (key != (splay_tree_key) var);
794 }
795 gcc_assert ((mask & 1) == 0
796 || !splay_tree_lookup (ctx->field_map, key));
797 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
798 || !splay_tree_lookup (ctx->sfield_map, key));
799 gcc_assert ((mask & 3) == 3
800 || !is_gimple_omp_oacc (ctx->stmt));
801
802 type = TREE_TYPE (var);
803 if ((mask & 16) != 0)
804 type = lang_hooks.decls.omp_array_data (var, true);
805
806 /* Prevent redeclaring the var in the split-off function with a restrict
807 pointer type. Note that we only clear type itself, restrict qualifiers in
808 the pointed-to type will be ignored by points-to analysis. */
809 if (POINTER_TYPE_P (type)
810 && TYPE_RESTRICT (type))
811 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
812
813 if (mask & 4)
814 {
815 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
816 type = build_pointer_type (build_pointer_type (type));
817 }
818 else if (by_ref)
819 type = build_pointer_type (type);
820 else if ((mask & (32 | 3)) == 1
821 && omp_privatize_by_reference (var))
822 type = TREE_TYPE (type);
823
824 field = build_decl (DECL_SOURCE_LOCATION (var),
825 FIELD_DECL, DECL_NAME (var), type);
826
827 /* Remember what variable this field was created for. This does have a
828 side effect of making dwarf2out ignore this member, so for helpful
829 debugging we clear it later in delete_omp_context. */
830 DECL_ABSTRACT_ORIGIN (field) = var;
831 if ((mask & 16) == 0 && type == TREE_TYPE (var))
832 {
833 SET_DECL_ALIGN (field, DECL_ALIGN (var));
834 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
835 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
836 }
837 else
838 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
839
840 if ((mask & 3) == 3)
841 {
842 insert_field_into_struct (ctx->record_type, field);
843 if (ctx->srecord_type)
844 {
845 sfield = build_decl (DECL_SOURCE_LOCATION (var),
846 FIELD_DECL, DECL_NAME (var), type);
847 DECL_ABSTRACT_ORIGIN (sfield) = var;
848 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
849 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
850 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
851 insert_field_into_struct (ctx->srecord_type, sfield);
852 }
853 }
854 else
855 {
856 if (ctx->srecord_type == NULL_TREE)
857 {
858 tree t;
859
860 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
861 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
862 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
863 {
864 sfield = build_decl (DECL_SOURCE_LOCATION (t),
865 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
866 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
867 insert_field_into_struct (ctx->srecord_type, sfield);
868 splay_tree_insert (ctx->sfield_map,
869 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
870 (splay_tree_value) sfield);
871 }
872 }
873 sfield = field;
874 insert_field_into_struct ((mask & 1) ? ctx->record_type
875 : ctx->srecord_type, field);
876 }
877
878 if (mask & 1)
879 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
880 if ((mask & 2) && ctx->sfield_map)
881 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
882 }
883
884 static tree
install_var_local(tree var,omp_context * ctx)885 install_var_local (tree var, omp_context *ctx)
886 {
887 tree new_var = omp_copy_decl_1 (var, ctx);
888 insert_decl_map (&ctx->cb, var, new_var);
889 return new_var;
890 }
891
892 /* Adjust the replacement for DECL in CTX for the new context. This means
893 copying the DECL_VALUE_EXPR, and fixing up the type. */
894
895 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)896 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
897 {
898 tree new_decl, size;
899
900 new_decl = lookup_decl (decl, ctx);
901
902 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
903
904 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
905 && DECL_HAS_VALUE_EXPR_P (decl))
906 {
907 tree ve = DECL_VALUE_EXPR (decl);
908 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
909 SET_DECL_VALUE_EXPR (new_decl, ve);
910 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
911 }
912
913 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
914 {
915 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
916 if (size == error_mark_node)
917 size = TYPE_SIZE (TREE_TYPE (new_decl));
918 DECL_SIZE (new_decl) = size;
919
920 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
921 if (size == error_mark_node)
922 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
923 DECL_SIZE_UNIT (new_decl) = size;
924 }
925 }
926
927 /* The callback for remap_decl. Search all containing contexts for a
928 mapping of the variable; this avoids having to duplicate the splay
929 tree ahead of time. We know a mapping doesn't already exist in the
930 given context. Create new mappings to implement default semantics. */
931
932 static tree
omp_copy_decl(tree var,copy_body_data * cb)933 omp_copy_decl (tree var, copy_body_data *cb)
934 {
935 omp_context *ctx = (omp_context *) cb;
936 tree new_var;
937
938 if (TREE_CODE (var) == LABEL_DECL)
939 {
940 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
941 return var;
942 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
943 DECL_CONTEXT (new_var) = current_function_decl;
944 insert_decl_map (&ctx->cb, var, new_var);
945 return new_var;
946 }
947
948 while (!is_taskreg_ctx (ctx))
949 {
950 ctx = ctx->outer;
951 if (ctx == NULL)
952 return var;
953 new_var = maybe_lookup_decl (var, ctx);
954 if (new_var)
955 return new_var;
956 }
957
958 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
959 return var;
960
961 return error_mark_node;
962 }
963
964 /* Create a new context, with OUTER_CTX being the surrounding context. */
965
966 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)967 new_omp_context (gimple *stmt, omp_context *outer_ctx)
968 {
969 omp_context *ctx = XCNEW (omp_context);
970
971 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
972 (splay_tree_value) ctx);
973 ctx->stmt = stmt;
974
975 if (outer_ctx)
976 {
977 ctx->outer = outer_ctx;
978 ctx->cb = outer_ctx->cb;
979 ctx->cb.block = NULL;
980 ctx->depth = outer_ctx->depth + 1;
981 }
982 else
983 {
984 ctx->cb.src_fn = current_function_decl;
985 ctx->cb.dst_fn = current_function_decl;
986 ctx->cb.src_node = cgraph_node::get (current_function_decl);
987 gcc_checking_assert (ctx->cb.src_node);
988 ctx->cb.dst_node = ctx->cb.src_node;
989 ctx->cb.src_cfun = cfun;
990 ctx->cb.copy_decl = omp_copy_decl;
991 ctx->cb.eh_lp_nr = 0;
992 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
993 ctx->cb.adjust_array_error_bounds = true;
994 ctx->cb.dont_remap_vla_if_no_change = true;
995 ctx->depth = 1;
996 }
997
998 ctx->cb.decl_map = new hash_map<tree, tree>;
999
1000 return ctx;
1001 }
1002
1003 static gimple_seq maybe_catch_exception (gimple_seq);
1004
1005 /* Finalize task copyfn. */
1006
1007 static void
finalize_task_copyfn(gomp_task * task_stmt)1008 finalize_task_copyfn (gomp_task *task_stmt)
1009 {
1010 struct function *child_cfun;
1011 tree child_fn;
1012 gimple_seq seq = NULL, new_seq;
1013 gbind *bind;
1014
1015 child_fn = gimple_omp_task_copy_fn (task_stmt);
1016 if (child_fn == NULL_TREE)
1017 return;
1018
1019 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1020 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1021
1022 push_cfun (child_cfun);
1023 bind = gimplify_body (child_fn, false);
1024 gimple_seq_add_stmt (&seq, bind);
1025 new_seq = maybe_catch_exception (seq);
1026 if (new_seq != seq)
1027 {
1028 bind = gimple_build_bind (NULL, new_seq, NULL);
1029 seq = NULL;
1030 gimple_seq_add_stmt (&seq, bind);
1031 }
1032 gimple_set_body (child_fn, seq);
1033 pop_cfun ();
1034
1035 /* Inform the callgraph about the new function. */
1036 cgraph_node *node = cgraph_node::get_create (child_fn);
1037 node->parallelized_function = 1;
1038 cgraph_node::add_new_function (child_fn, false);
1039 }
1040
1041 /* Destroy a omp_context data structures. Called through the splay tree
1042 value delete callback. */
1043
1044 static void
delete_omp_context(splay_tree_value value)1045 delete_omp_context (splay_tree_value value)
1046 {
1047 omp_context *ctx = (omp_context *) value;
1048
1049 delete ctx->cb.decl_map;
1050
1051 if (ctx->field_map)
1052 splay_tree_delete (ctx->field_map);
1053 if (ctx->sfield_map)
1054 splay_tree_delete (ctx->sfield_map);
1055
1056 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1057 it produces corrupt debug information. */
1058 if (ctx->record_type)
1059 {
1060 tree t;
1061 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1062 DECL_ABSTRACT_ORIGIN (t) = NULL;
1063 }
1064 if (ctx->srecord_type)
1065 {
1066 tree t;
1067 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1068 DECL_ABSTRACT_ORIGIN (t) = NULL;
1069 }
1070
1071 if (ctx->task_reduction_map)
1072 {
1073 ctx->task_reductions.release ();
1074 delete ctx->task_reduction_map;
1075 }
1076
1077 delete ctx->lastprivate_conditional_map;
1078 delete ctx->allocate_map;
1079
1080 XDELETE (ctx);
1081 }
1082
1083 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1084 context. */
1085
1086 static void
fixup_child_record_type(omp_context * ctx)1087 fixup_child_record_type (omp_context *ctx)
1088 {
1089 tree f, type = ctx->record_type;
1090
1091 if (!ctx->receiver_decl)
1092 return;
1093 /* ??? It isn't sufficient to just call remap_type here, because
1094 variably_modified_type_p doesn't work the way we expect for
1095 record types. Testing each field for whether it needs remapping
1096 and creating a new record by hand works, however. */
1097 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1098 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1099 break;
1100 if (f)
1101 {
1102 tree name, new_fields = NULL;
1103
1104 type = lang_hooks.types.make_type (RECORD_TYPE);
1105 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1106 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1107 TYPE_DECL, name, type);
1108 TYPE_NAME (type) = name;
1109
1110 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1111 {
1112 tree new_f = copy_node (f);
1113 DECL_CONTEXT (new_f) = type;
1114 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1115 DECL_CHAIN (new_f) = new_fields;
1116 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1117 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1118 &ctx->cb, NULL);
1119 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1120 &ctx->cb, NULL);
1121 new_fields = new_f;
1122
1123 /* Arrange to be able to look up the receiver field
1124 given the sender field. */
1125 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1126 (splay_tree_value) new_f);
1127 }
1128 TYPE_FIELDS (type) = nreverse (new_fields);
1129 layout_type (type);
1130 }
1131
1132 /* In a target region we never modify any of the pointers in *.omp_data_i,
1133 so attempt to help the optimizers. */
1134 if (is_gimple_omp_offloaded (ctx->stmt))
1135 type = build_qualified_type (type, TYPE_QUAL_CONST);
1136
1137 TREE_TYPE (ctx->receiver_decl)
1138 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1139 }
1140
1141 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1142 specified by CLAUSES. */
1143
1144 static void
scan_sharing_clauses(tree clauses,omp_context * ctx)1145 scan_sharing_clauses (tree clauses, omp_context *ctx)
1146 {
1147 tree c, decl;
1148 bool scan_array_reductions = false;
1149
1150 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1152 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1153 /* omp_default_mem_alloc is 1 */
1154 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1155 || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1156 {
1157 if (ctx->allocate_map == NULL)
1158 ctx->allocate_map = new hash_map<tree, tree>;
1159 tree val = integer_zero_node;
1160 if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1161 val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1162 if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1163 val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1164 ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1165 }
1166
1167 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1168 {
1169 bool by_ref;
1170
1171 switch (OMP_CLAUSE_CODE (c))
1172 {
1173 case OMP_CLAUSE_PRIVATE:
1174 decl = OMP_CLAUSE_DECL (c);
1175 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1176 goto do_private;
1177 else if (!is_variable_sized (decl))
1178 install_var_local (decl, ctx);
1179 break;
1180
1181 case OMP_CLAUSE_SHARED:
1182 decl = OMP_CLAUSE_DECL (c);
1183 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1184 ctx->allocate_map->remove (decl);
1185 /* Ignore shared directives in teams construct inside of
1186 target construct. */
1187 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1188 && !is_host_teams_ctx (ctx))
1189 {
1190 /* Global variables don't need to be copied,
1191 the receiver side will use them directly. */
1192 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1193 if (is_global_var (odecl))
1194 break;
1195 insert_decl_map (&ctx->cb, decl, odecl);
1196 break;
1197 }
1198 gcc_assert (is_taskreg_ctx (ctx));
1199 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1200 || !is_variable_sized (decl));
1201 /* Global variables don't need to be copied,
1202 the receiver side will use them directly. */
1203 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1204 break;
1205 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1206 {
1207 use_pointer_for_field (decl, ctx);
1208 break;
1209 }
1210 by_ref = use_pointer_for_field (decl, NULL);
1211 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1212 || TREE_ADDRESSABLE (decl)
1213 || by_ref
1214 || omp_privatize_by_reference (decl))
1215 {
1216 by_ref = use_pointer_for_field (decl, ctx);
1217 install_var_field (decl, by_ref, 3, ctx);
1218 install_var_local (decl, ctx);
1219 break;
1220 }
1221 /* We don't need to copy const scalar vars back. */
1222 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1223 goto do_private;
1224
1225 case OMP_CLAUSE_REDUCTION:
1226 /* Collect 'reduction' clauses on OpenACC compute construct. */
1227 if (is_gimple_omp_oacc (ctx->stmt)
1228 && is_gimple_omp_offloaded (ctx->stmt))
1229 {
1230 /* No 'reduction' clauses on OpenACC 'kernels'. */
1231 gcc_checking_assert (!is_oacc_kernels (ctx));
1232 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1233 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1234
1235 ctx->local_reduction_clauses
1236 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1237 }
1238 /* FALLTHRU */
1239
1240 case OMP_CLAUSE_IN_REDUCTION:
1241 decl = OMP_CLAUSE_DECL (c);
1242 if (ctx->allocate_map
1243 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1244 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1245 || OMP_CLAUSE_REDUCTION_TASK (c)))
1246 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1247 || is_task_ctx (ctx)))
1248 {
1249 /* For now. */
1250 if (ctx->allocate_map->get (decl))
1251 ctx->allocate_map->remove (decl);
1252 }
1253 if (TREE_CODE (decl) == MEM_REF)
1254 {
1255 tree t = TREE_OPERAND (decl, 0);
1256 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1257 t = TREE_OPERAND (t, 0);
1258 if (TREE_CODE (t) == INDIRECT_REF
1259 || TREE_CODE (t) == ADDR_EXPR)
1260 t = TREE_OPERAND (t, 0);
1261 if (is_omp_target (ctx->stmt))
1262 {
1263 if (is_variable_sized (t))
1264 {
1265 gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1266 t = DECL_VALUE_EXPR (t);
1267 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1268 t = TREE_OPERAND (t, 0);
1269 gcc_assert (DECL_P (t));
1270 }
1271 tree at = t;
1272 if (ctx->outer)
1273 scan_omp_op (&at, ctx->outer);
1274 tree nt = omp_copy_decl_1 (at, ctx->outer);
1275 splay_tree_insert (ctx->field_map,
1276 (splay_tree_key) &DECL_CONTEXT (t),
1277 (splay_tree_value) nt);
1278 if (at != t)
1279 splay_tree_insert (ctx->field_map,
1280 (splay_tree_key) &DECL_CONTEXT (at),
1281 (splay_tree_value) nt);
1282 break;
1283 }
1284 install_var_local (t, ctx);
1285 if (is_taskreg_ctx (ctx)
1286 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1287 || (is_task_ctx (ctx)
1288 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1289 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1290 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1291 == POINTER_TYPE)))))
1292 && !is_variable_sized (t)
1293 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1294 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1295 && !is_task_ctx (ctx))))
1296 {
1297 by_ref = use_pointer_for_field (t, NULL);
1298 if (is_task_ctx (ctx)
1299 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1300 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1301 {
1302 install_var_field (t, false, 1, ctx);
1303 install_var_field (t, by_ref, 2, ctx);
1304 }
1305 else
1306 install_var_field (t, by_ref, 3, ctx);
1307 }
1308 break;
1309 }
1310 if (is_omp_target (ctx->stmt))
1311 {
1312 tree at = decl;
1313 if (ctx->outer)
1314 scan_omp_op (&at, ctx->outer);
1315 tree nt = omp_copy_decl_1 (at, ctx->outer);
1316 splay_tree_insert (ctx->field_map,
1317 (splay_tree_key) &DECL_CONTEXT (decl),
1318 (splay_tree_value) nt);
1319 if (at != decl)
1320 splay_tree_insert (ctx->field_map,
1321 (splay_tree_key) &DECL_CONTEXT (at),
1322 (splay_tree_value) nt);
1323 break;
1324 }
1325 if (is_task_ctx (ctx)
1326 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1327 && OMP_CLAUSE_REDUCTION_TASK (c)
1328 && is_parallel_ctx (ctx)))
1329 {
1330 /* Global variables don't need to be copied,
1331 the receiver side will use them directly. */
1332 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1333 {
1334 by_ref = use_pointer_for_field (decl, ctx);
1335 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1336 install_var_field (decl, by_ref, 3, ctx);
1337 }
1338 install_var_local (decl, ctx);
1339 break;
1340 }
1341 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1342 && OMP_CLAUSE_REDUCTION_TASK (c))
1343 {
1344 install_var_local (decl, ctx);
1345 break;
1346 }
1347 goto do_private;
1348
1349 case OMP_CLAUSE_LASTPRIVATE:
1350 /* Let the corresponding firstprivate clause create
1351 the variable. */
1352 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1353 break;
1354 /* FALLTHRU */
1355
1356 case OMP_CLAUSE_FIRSTPRIVATE:
1357 case OMP_CLAUSE_LINEAR:
1358 decl = OMP_CLAUSE_DECL (c);
1359 do_private:
1360 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1361 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1362 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1363 && is_gimple_omp_offloaded (ctx->stmt))
1364 {
1365 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1366 {
1367 by_ref = !omp_privatize_by_reference (decl);
1368 install_var_field (decl, by_ref, 3, ctx);
1369 }
1370 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1371 {
1372 if (TREE_CODE (decl) == INDIRECT_REF)
1373 decl = TREE_OPERAND (decl, 0);
1374 install_var_field (decl, true, 3, ctx);
1375 }
1376 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1377 install_var_field (decl, true, 3, ctx);
1378 else
1379 install_var_field (decl, false, 3, ctx);
1380 }
1381 if (is_variable_sized (decl))
1382 {
1383 if (is_task_ctx (ctx))
1384 {
1385 if (ctx->allocate_map
1386 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1387 {
1388 /* For now. */
1389 if (ctx->allocate_map->get (decl))
1390 ctx->allocate_map->remove (decl);
1391 }
1392 install_var_field (decl, false, 1, ctx);
1393 }
1394 break;
1395 }
1396 else if (is_taskreg_ctx (ctx))
1397 {
1398 bool global
1399 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1400 by_ref = use_pointer_for_field (decl, NULL);
1401
1402 if (is_task_ctx (ctx)
1403 && (global || by_ref || omp_privatize_by_reference (decl)))
1404 {
1405 if (ctx->allocate_map
1406 && ctx->allocate_map->get (decl))
1407 install_var_field (decl, by_ref, 32 | 1, ctx);
1408 else
1409 install_var_field (decl, false, 1, ctx);
1410 if (!global)
1411 install_var_field (decl, by_ref, 2, ctx);
1412 }
1413 else if (!global)
1414 install_var_field (decl, by_ref, 3, ctx);
1415 }
1416 install_var_local (decl, ctx);
1417 break;
1418
1419 case OMP_CLAUSE_USE_DEVICE_PTR:
1420 case OMP_CLAUSE_USE_DEVICE_ADDR:
1421 decl = OMP_CLAUSE_DECL (c);
1422
1423 /* Fortran array descriptors. */
1424 if (lang_hooks.decls.omp_array_data (decl, true))
1425 install_var_field (decl, false, 19, ctx);
1426 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1427 && !omp_privatize_by_reference (decl)
1428 && !omp_is_allocatable_or_ptr (decl))
1429 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1430 install_var_field (decl, true, 11, ctx);
1431 else
1432 install_var_field (decl, false, 11, ctx);
1433 if (DECL_SIZE (decl)
1434 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1435 {
1436 tree decl2 = DECL_VALUE_EXPR (decl);
1437 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1438 decl2 = TREE_OPERAND (decl2, 0);
1439 gcc_assert (DECL_P (decl2));
1440 install_var_local (decl2, ctx);
1441 }
1442 install_var_local (decl, ctx);
1443 break;
1444
1445 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1446 decl = OMP_CLAUSE_DECL (c);
1447 while (TREE_CODE (decl) == INDIRECT_REF
1448 || TREE_CODE (decl) == ARRAY_REF)
1449 decl = TREE_OPERAND (decl, 0);
1450 goto do_private;
1451
1452 case OMP_CLAUSE_IS_DEVICE_PTR:
1453 decl = OMP_CLAUSE_DECL (c);
1454 goto do_private;
1455
1456 case OMP_CLAUSE__LOOPTEMP_:
1457 case OMP_CLAUSE__REDUCTEMP_:
1458 gcc_assert (is_taskreg_ctx (ctx));
1459 decl = OMP_CLAUSE_DECL (c);
1460 install_var_field (decl, false, 3, ctx);
1461 install_var_local (decl, ctx);
1462 break;
1463
1464 case OMP_CLAUSE_COPYPRIVATE:
1465 case OMP_CLAUSE_COPYIN:
1466 decl = OMP_CLAUSE_DECL (c);
1467 by_ref = use_pointer_for_field (decl, NULL);
1468 install_var_field (decl, by_ref, 3, ctx);
1469 break;
1470
1471 case OMP_CLAUSE_FINAL:
1472 case OMP_CLAUSE_IF:
1473 case OMP_CLAUSE_NUM_THREADS:
1474 case OMP_CLAUSE_NUM_TEAMS:
1475 case OMP_CLAUSE_THREAD_LIMIT:
1476 case OMP_CLAUSE_DEVICE:
1477 case OMP_CLAUSE_SCHEDULE:
1478 case OMP_CLAUSE_DIST_SCHEDULE:
1479 case OMP_CLAUSE_DEPEND:
1480 case OMP_CLAUSE_PRIORITY:
1481 case OMP_CLAUSE_GRAINSIZE:
1482 case OMP_CLAUSE_NUM_TASKS:
1483 case OMP_CLAUSE_NUM_GANGS:
1484 case OMP_CLAUSE_NUM_WORKERS:
1485 case OMP_CLAUSE_VECTOR_LENGTH:
1486 case OMP_CLAUSE_DETACH:
1487 case OMP_CLAUSE_FILTER:
1488 if (ctx->outer)
1489 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1490 break;
1491
1492 case OMP_CLAUSE_TO:
1493 case OMP_CLAUSE_FROM:
1494 case OMP_CLAUSE_MAP:
1495 if (ctx->outer)
1496 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1497 decl = OMP_CLAUSE_DECL (c);
1498 /* If requested, make 'decl' addressable. */
1499 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1500 && OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c))
1501 {
1502 gcc_checking_assert (DECL_P (decl));
1503
1504 bool decl_addressable = TREE_ADDRESSABLE (decl);
1505 if (!decl_addressable)
1506 {
1507 if (!make_addressable_vars)
1508 make_addressable_vars = BITMAP_ALLOC (NULL);
1509 bitmap_set_bit (make_addressable_vars, DECL_UID (decl));
1510 TREE_ADDRESSABLE (decl) = 1;
1511 }
1512
1513 if (dump_enabled_p ())
1514 {
1515 location_t loc = OMP_CLAUSE_LOCATION (c);
1516 const dump_user_location_t d_u_loc
1517 = dump_user_location_t::from_location_t (loc);
1518 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
1519 #if __GNUC__ >= 10
1520 # pragma GCC diagnostic push
1521 # pragma GCC diagnostic ignored "-Wformat"
1522 #endif
1523 if (!decl_addressable)
1524 dump_printf_loc (MSG_NOTE, d_u_loc,
1525 "variable %<%T%>"
1526 " made addressable\n",
1527 decl);
1528 else
1529 dump_printf_loc (MSG_NOTE, d_u_loc,
1530 "variable %<%T%>"
1531 " already made addressable\n",
1532 decl);
1533 #if __GNUC__ >= 10
1534 # pragma GCC diagnostic pop
1535 #endif
1536 }
1537
1538 /* Done. */
1539 OMP_CLAUSE_MAP_DECL_MAKE_ADDRESSABLE (c) = 0;
1540 }
1541 /* Global variables with "omp declare target" attribute
1542 don't need to be copied, the receiver side will use them
1543 directly. However, global variables with "omp declare target link"
1544 attribute need to be copied. Or when ALWAYS modifier is used. */
1545 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1546 && DECL_P (decl)
1547 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1548 && (OMP_CLAUSE_MAP_KIND (c)
1549 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1550 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1551 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1552 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1553 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1554 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1555 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1556 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1557 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1558 && varpool_node::get_create (decl)->offloadable
1559 && !lookup_attribute ("omp declare target link",
1560 DECL_ATTRIBUTES (decl)))
1561 break;
1562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1563 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1564 {
1565 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1566 not offloaded; there is nothing to map for those. */
1567 if (!is_gimple_omp_offloaded (ctx->stmt)
1568 && !POINTER_TYPE_P (TREE_TYPE (decl))
1569 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1570 break;
1571 }
1572 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1573 && DECL_P (decl)
1574 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1575 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1576 && is_omp_target (ctx->stmt))
1577 {
1578 /* If this is an offloaded region, an attach operation should
1579 only exist when the pointer variable is mapped in a prior
1580 clause. */
1581 if (is_gimple_omp_offloaded (ctx->stmt))
1582 gcc_assert
1583 (maybe_lookup_decl (decl, ctx)
1584 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1585 && lookup_attribute ("omp declare target",
1586 DECL_ATTRIBUTES (decl))));
1587
1588 /* By itself, attach/detach is generated as part of pointer
1589 variable mapping and should not create new variables in the
1590 offloaded region, however sender refs for it must be created
1591 for its address to be passed to the runtime. */
1592 tree field
1593 = build_decl (OMP_CLAUSE_LOCATION (c),
1594 FIELD_DECL, NULL_TREE, ptr_type_node);
1595 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1596 insert_field_into_struct (ctx->record_type, field);
1597 /* To not clash with a map of the pointer variable itself,
1598 attach/detach maps have their field looked up by the *clause*
1599 tree expression, not the decl. */
1600 gcc_assert (!splay_tree_lookup (ctx->field_map,
1601 (splay_tree_key) c));
1602 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1603 (splay_tree_value) field);
1604 break;
1605 }
1606 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1607 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1608 || (OMP_CLAUSE_MAP_KIND (c)
1609 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1610 {
1611 if (TREE_CODE (decl) == COMPONENT_REF
1612 || (TREE_CODE (decl) == INDIRECT_REF
1613 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1614 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1615 == REFERENCE_TYPE)))
1616 break;
1617 if (DECL_SIZE (decl)
1618 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1619 {
1620 tree decl2 = DECL_VALUE_EXPR (decl);
1621 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1622 decl2 = TREE_OPERAND (decl2, 0);
1623 gcc_assert (DECL_P (decl2));
1624 install_var_local (decl2, ctx);
1625 }
1626 install_var_local (decl, ctx);
1627 break;
1628 }
1629 if (DECL_P (decl))
1630 {
1631 if (DECL_SIZE (decl)
1632 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1633 {
1634 tree decl2 = DECL_VALUE_EXPR (decl);
1635 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1636 decl2 = TREE_OPERAND (decl2, 0);
1637 gcc_assert (DECL_P (decl2));
1638 install_var_field (decl2, true, 3, ctx);
1639 install_var_local (decl2, ctx);
1640 install_var_local (decl, ctx);
1641 }
1642 else
1643 {
1644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1645 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1646 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1647 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1648 install_var_field (decl, true, 7, ctx);
1649 else
1650 install_var_field (decl, true, 3, ctx);
1651 if (is_gimple_omp_offloaded (ctx->stmt)
1652 && !(is_gimple_omp_oacc (ctx->stmt)
1653 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1654 install_var_local (decl, ctx);
1655 }
1656 }
1657 else
1658 {
1659 tree base = get_base_address (decl);
1660 tree nc = OMP_CLAUSE_CHAIN (c);
1661 if (DECL_P (base)
1662 && nc != NULL_TREE
1663 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1664 && OMP_CLAUSE_DECL (nc) == base
1665 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1666 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1667 {
1668 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1669 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1670 }
1671 else
1672 {
1673 if (ctx->outer)
1674 {
1675 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1676 decl = OMP_CLAUSE_DECL (c);
1677 }
1678 gcc_assert (!splay_tree_lookup (ctx->field_map,
1679 (splay_tree_key) decl));
1680 tree field
1681 = build_decl (OMP_CLAUSE_LOCATION (c),
1682 FIELD_DECL, NULL_TREE, ptr_type_node);
1683 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1684 insert_field_into_struct (ctx->record_type, field);
1685 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1686 (splay_tree_value) field);
1687 }
1688 }
1689 break;
1690
1691 case OMP_CLAUSE_ORDER:
1692 ctx->order_concurrent = true;
1693 break;
1694
1695 case OMP_CLAUSE_BIND:
1696 ctx->loop_p = true;
1697 break;
1698
1699 case OMP_CLAUSE_NOWAIT:
1700 case OMP_CLAUSE_ORDERED:
1701 case OMP_CLAUSE_COLLAPSE:
1702 case OMP_CLAUSE_UNTIED:
1703 case OMP_CLAUSE_MERGEABLE:
1704 case OMP_CLAUSE_PROC_BIND:
1705 case OMP_CLAUSE_SAFELEN:
1706 case OMP_CLAUSE_SIMDLEN:
1707 case OMP_CLAUSE_THREADS:
1708 case OMP_CLAUSE_SIMD:
1709 case OMP_CLAUSE_NOGROUP:
1710 case OMP_CLAUSE_DEFAULTMAP:
1711 case OMP_CLAUSE_ASYNC:
1712 case OMP_CLAUSE_WAIT:
1713 case OMP_CLAUSE_GANG:
1714 case OMP_CLAUSE_WORKER:
1715 case OMP_CLAUSE_VECTOR:
1716 case OMP_CLAUSE_INDEPENDENT:
1717 case OMP_CLAUSE_AUTO:
1718 case OMP_CLAUSE_SEQ:
1719 case OMP_CLAUSE_TILE:
1720 case OMP_CLAUSE__SIMT_:
1721 case OMP_CLAUSE_DEFAULT:
1722 case OMP_CLAUSE_NONTEMPORAL:
1723 case OMP_CLAUSE_IF_PRESENT:
1724 case OMP_CLAUSE_FINALIZE:
1725 case OMP_CLAUSE_TASK_REDUCTION:
1726 case OMP_CLAUSE_ALLOCATE:
1727 break;
1728
1729 case OMP_CLAUSE_ALIGNED:
1730 decl = OMP_CLAUSE_DECL (c);
1731 if (is_global_var (decl)
1732 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1733 install_var_local (decl, ctx);
1734 break;
1735
1736 case OMP_CLAUSE__CONDTEMP_:
1737 decl = OMP_CLAUSE_DECL (c);
1738 if (is_parallel_ctx (ctx))
1739 {
1740 install_var_field (decl, false, 3, ctx);
1741 install_var_local (decl, ctx);
1742 }
1743 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1744 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1745 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1746 install_var_local (decl, ctx);
1747 break;
1748
1749 case OMP_CLAUSE__CACHE_:
1750 case OMP_CLAUSE_NOHOST:
1751 default:
1752 gcc_unreachable ();
1753 }
1754 }
1755
1756 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1757 {
1758 switch (OMP_CLAUSE_CODE (c))
1759 {
1760 case OMP_CLAUSE_LASTPRIVATE:
1761 /* Let the corresponding firstprivate clause create
1762 the variable. */
1763 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1764 scan_array_reductions = true;
1765 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1766 break;
1767 /* FALLTHRU */
1768
1769 case OMP_CLAUSE_FIRSTPRIVATE:
1770 case OMP_CLAUSE_PRIVATE:
1771 case OMP_CLAUSE_LINEAR:
1772 case OMP_CLAUSE_HAS_DEVICE_ADDR:
1773 case OMP_CLAUSE_IS_DEVICE_PTR:
1774 decl = OMP_CLAUSE_DECL (c);
1775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1776 {
1777 while (TREE_CODE (decl) == INDIRECT_REF
1778 || TREE_CODE (decl) == ARRAY_REF)
1779 decl = TREE_OPERAND (decl, 0);
1780 }
1781
1782 if (is_variable_sized (decl))
1783 {
1784 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1785 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR
1786 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
1787 && is_gimple_omp_offloaded (ctx->stmt))
1788 {
1789 tree decl2 = DECL_VALUE_EXPR (decl);
1790 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1791 decl2 = TREE_OPERAND (decl2, 0);
1792 gcc_assert (DECL_P (decl2));
1793 install_var_local (decl2, ctx);
1794 fixup_remapped_decl (decl2, ctx, false);
1795 }
1796 install_var_local (decl, ctx);
1797 }
1798 fixup_remapped_decl (decl, ctx,
1799 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1800 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1801 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1802 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1803 scan_array_reductions = true;
1804 break;
1805
1806 case OMP_CLAUSE_REDUCTION:
1807 case OMP_CLAUSE_IN_REDUCTION:
1808 decl = OMP_CLAUSE_DECL (c);
1809 if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1810 {
1811 if (is_variable_sized (decl))
1812 install_var_local (decl, ctx);
1813 fixup_remapped_decl (decl, ctx, false);
1814 }
1815 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1816 scan_array_reductions = true;
1817 break;
1818
1819 case OMP_CLAUSE_TASK_REDUCTION:
1820 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1821 scan_array_reductions = true;
1822 break;
1823
1824 case OMP_CLAUSE_SHARED:
1825 /* Ignore shared directives in teams construct inside of
1826 target construct. */
1827 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1828 && !is_host_teams_ctx (ctx))
1829 break;
1830 decl = OMP_CLAUSE_DECL (c);
1831 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1832 break;
1833 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1834 {
1835 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1836 ctx->outer)))
1837 break;
1838 bool by_ref = use_pointer_for_field (decl, ctx);
1839 install_var_field (decl, by_ref, 11, ctx);
1840 break;
1841 }
1842 fixup_remapped_decl (decl, ctx, false);
1843 break;
1844
1845 case OMP_CLAUSE_MAP:
1846 if (!is_gimple_omp_offloaded (ctx->stmt))
1847 break;
1848 decl = OMP_CLAUSE_DECL (c);
1849 if (DECL_P (decl)
1850 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1851 && (OMP_CLAUSE_MAP_KIND (c)
1852 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1853 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1854 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1855 && varpool_node::get_create (decl)->offloadable)
1856 break;
1857 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1858 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1859 && is_omp_target (ctx->stmt)
1860 && !is_gimple_omp_offloaded (ctx->stmt))
1861 break;
1862 if (DECL_P (decl))
1863 {
1864 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1865 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1866 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1867 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1868 {
1869 tree new_decl = lookup_decl (decl, ctx);
1870 TREE_TYPE (new_decl)
1871 = remap_type (TREE_TYPE (decl), &ctx->cb);
1872 }
1873 else if (DECL_SIZE (decl)
1874 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1875 {
1876 tree decl2 = DECL_VALUE_EXPR (decl);
1877 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1878 decl2 = TREE_OPERAND (decl2, 0);
1879 gcc_assert (DECL_P (decl2));
1880 fixup_remapped_decl (decl2, ctx, false);
1881 fixup_remapped_decl (decl, ctx, true);
1882 }
1883 else
1884 fixup_remapped_decl (decl, ctx, false);
1885 }
1886 break;
1887
1888 case OMP_CLAUSE_COPYPRIVATE:
1889 case OMP_CLAUSE_COPYIN:
1890 case OMP_CLAUSE_DEFAULT:
1891 case OMP_CLAUSE_IF:
1892 case OMP_CLAUSE_NUM_THREADS:
1893 case OMP_CLAUSE_NUM_TEAMS:
1894 case OMP_CLAUSE_THREAD_LIMIT:
1895 case OMP_CLAUSE_DEVICE:
1896 case OMP_CLAUSE_SCHEDULE:
1897 case OMP_CLAUSE_DIST_SCHEDULE:
1898 case OMP_CLAUSE_NOWAIT:
1899 case OMP_CLAUSE_ORDERED:
1900 case OMP_CLAUSE_COLLAPSE:
1901 case OMP_CLAUSE_UNTIED:
1902 case OMP_CLAUSE_FINAL:
1903 case OMP_CLAUSE_MERGEABLE:
1904 case OMP_CLAUSE_PROC_BIND:
1905 case OMP_CLAUSE_SAFELEN:
1906 case OMP_CLAUSE_SIMDLEN:
1907 case OMP_CLAUSE_ALIGNED:
1908 case OMP_CLAUSE_DEPEND:
1909 case OMP_CLAUSE_DETACH:
1910 case OMP_CLAUSE_ALLOCATE:
1911 case OMP_CLAUSE__LOOPTEMP_:
1912 case OMP_CLAUSE__REDUCTEMP_:
1913 case OMP_CLAUSE_TO:
1914 case OMP_CLAUSE_FROM:
1915 case OMP_CLAUSE_PRIORITY:
1916 case OMP_CLAUSE_GRAINSIZE:
1917 case OMP_CLAUSE_NUM_TASKS:
1918 case OMP_CLAUSE_THREADS:
1919 case OMP_CLAUSE_SIMD:
1920 case OMP_CLAUSE_NOGROUP:
1921 case OMP_CLAUSE_DEFAULTMAP:
1922 case OMP_CLAUSE_ORDER:
1923 case OMP_CLAUSE_BIND:
1924 case OMP_CLAUSE_USE_DEVICE_PTR:
1925 case OMP_CLAUSE_USE_DEVICE_ADDR:
1926 case OMP_CLAUSE_NONTEMPORAL:
1927 case OMP_CLAUSE_ASYNC:
1928 case OMP_CLAUSE_WAIT:
1929 case OMP_CLAUSE_NUM_GANGS:
1930 case OMP_CLAUSE_NUM_WORKERS:
1931 case OMP_CLAUSE_VECTOR_LENGTH:
1932 case OMP_CLAUSE_GANG:
1933 case OMP_CLAUSE_WORKER:
1934 case OMP_CLAUSE_VECTOR:
1935 case OMP_CLAUSE_INDEPENDENT:
1936 case OMP_CLAUSE_AUTO:
1937 case OMP_CLAUSE_SEQ:
1938 case OMP_CLAUSE_TILE:
1939 case OMP_CLAUSE__SIMT_:
1940 case OMP_CLAUSE_IF_PRESENT:
1941 case OMP_CLAUSE_FINALIZE:
1942 case OMP_CLAUSE_FILTER:
1943 case OMP_CLAUSE__CONDTEMP_:
1944 break;
1945
1946 case OMP_CLAUSE__CACHE_:
1947 case OMP_CLAUSE_NOHOST:
1948 default:
1949 gcc_unreachable ();
1950 }
1951 }
1952
1953 gcc_checking_assert (!scan_array_reductions
1954 || !is_gimple_omp_oacc (ctx->stmt));
1955 if (scan_array_reductions)
1956 {
1957 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1958 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1959 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1960 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1961 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1962 {
1963 omp_context *rctx = ctx;
1964 if (is_omp_target (ctx->stmt))
1965 rctx = ctx->outer;
1966 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1967 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1968 }
1969 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1970 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1971 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1972 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1973 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1974 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1975 }
1976 }
1977
1978 /* Create a new name for omp child function. Returns an identifier. */
1979
1980 static tree
create_omp_child_function_name(bool task_copy)1981 create_omp_child_function_name (bool task_copy)
1982 {
1983 return clone_function_name_numbered (current_function_decl,
1984 task_copy ? "_omp_cpyfn" : "_omp_fn");
1985 }
1986
1987 /* Return true if CTX may belong to offloaded code: either if current function
1988 is offloaded, or any enclosing context corresponds to a target region. */
1989
1990 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1991 omp_maybe_offloaded_ctx (omp_context *ctx)
1992 {
1993 if (cgraph_node::get (current_function_decl)->offloadable)
1994 return true;
1995 for (; ctx; ctx = ctx->outer)
1996 if (is_gimple_omp_offloaded (ctx->stmt))
1997 return true;
1998 return false;
1999 }
2000
2001 /* Build a decl for the omp child function. It'll not contain a body
2002 yet, just the bare decl. */
2003
2004 static void
create_omp_child_function(omp_context * ctx,bool task_copy)2005 create_omp_child_function (omp_context *ctx, bool task_copy)
2006 {
2007 tree decl, type, name, t;
2008
2009 name = create_omp_child_function_name (task_copy);
2010 if (task_copy)
2011 type = build_function_type_list (void_type_node, ptr_type_node,
2012 ptr_type_node, NULL_TREE);
2013 else
2014 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
2015
2016 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
2017
2018 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
2019 || !task_copy);
2020 if (!task_copy)
2021 ctx->cb.dst_fn = decl;
2022 else
2023 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
2024
2025 TREE_STATIC (decl) = 1;
2026 TREE_USED (decl) = 1;
2027 DECL_ARTIFICIAL (decl) = 1;
2028 DECL_IGNORED_P (decl) = 0;
2029 TREE_PUBLIC (decl) = 0;
2030 DECL_UNINLINABLE (decl) = 1;
2031 DECL_EXTERNAL (decl) = 0;
2032 DECL_CONTEXT (decl) = NULL_TREE;
2033 DECL_INITIAL (decl) = make_node (BLOCK);
2034 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
2035 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
2036 /* Remove omp declare simd attribute from the new attributes. */
2037 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
2038 {
2039 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
2040 a = a2;
2041 a = TREE_CHAIN (a);
2042 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
2043 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
2044 *p = TREE_CHAIN (*p);
2045 else
2046 {
2047 tree chain = TREE_CHAIN (*p);
2048 *p = copy_node (*p);
2049 p = &TREE_CHAIN (*p);
2050 *p = chain;
2051 }
2052 }
2053 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2054 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2055 DECL_FUNCTION_SPECIFIC_TARGET (decl)
2056 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2057 DECL_FUNCTION_VERSIONED (decl)
2058 = DECL_FUNCTION_VERSIONED (current_function_decl);
2059
2060 if (omp_maybe_offloaded_ctx (ctx))
2061 {
2062 cgraph_node::get_create (decl)->offloadable = 1;
2063 if (ENABLE_OFFLOADING)
2064 g->have_offload = true;
2065 }
2066
2067 if (cgraph_node::get_create (decl)->offloadable)
2068 {
2069 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2070 ? "omp target entrypoint"
2071 : "omp declare target");
2072 if (lookup_attribute ("omp declare target",
2073 DECL_ATTRIBUTES (current_function_decl)))
2074 {
2075 if (is_gimple_omp_offloaded (ctx->stmt))
2076 DECL_ATTRIBUTES (decl)
2077 = remove_attribute ("omp declare target",
2078 copy_list (DECL_ATTRIBUTES (decl)));
2079 else
2080 target_attr = NULL;
2081 }
2082 if (target_attr)
2083 DECL_ATTRIBUTES (decl)
2084 = tree_cons (get_identifier (target_attr),
2085 NULL_TREE, DECL_ATTRIBUTES (decl));
2086 }
2087
2088 t = build_decl (DECL_SOURCE_LOCATION (decl),
2089 RESULT_DECL, NULL_TREE, void_type_node);
2090 DECL_ARTIFICIAL (t) = 1;
2091 DECL_IGNORED_P (t) = 1;
2092 DECL_CONTEXT (t) = decl;
2093 DECL_RESULT (decl) = t;
2094
2095 tree data_name = get_identifier (".omp_data_i");
2096 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2097 ptr_type_node);
2098 DECL_ARTIFICIAL (t) = 1;
2099 DECL_NAMELESS (t) = 1;
2100 DECL_ARG_TYPE (t) = ptr_type_node;
2101 DECL_CONTEXT (t) = current_function_decl;
2102 TREE_USED (t) = 1;
2103 TREE_READONLY (t) = 1;
2104 DECL_ARGUMENTS (decl) = t;
2105 if (!task_copy)
2106 ctx->receiver_decl = t;
2107 else
2108 {
2109 t = build_decl (DECL_SOURCE_LOCATION (decl),
2110 PARM_DECL, get_identifier (".omp_data_o"),
2111 ptr_type_node);
2112 DECL_ARTIFICIAL (t) = 1;
2113 DECL_NAMELESS (t) = 1;
2114 DECL_ARG_TYPE (t) = ptr_type_node;
2115 DECL_CONTEXT (t) = current_function_decl;
2116 TREE_USED (t) = 1;
2117 TREE_ADDRESSABLE (t) = 1;
2118 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2119 DECL_ARGUMENTS (decl) = t;
2120 }
2121
2122 /* Allocate memory for the function structure. The call to
2123 allocate_struct_function clobbers CFUN, so we need to restore
2124 it afterward. */
2125 push_struct_function (decl);
2126 cfun->function_end_locus = gimple_location (ctx->stmt);
2127 init_tree_ssa (cfun);
2128 pop_cfun ();
2129 }
2130
2131 /* Callback for walk_gimple_seq. Check if combined parallel
2132 contains gimple_omp_for_combined_into_p OMP_FOR. */
2133
2134 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2135 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2136 bool *handled_ops_p,
2137 struct walk_stmt_info *wi)
2138 {
2139 gimple *stmt = gsi_stmt (*gsi_p);
2140
2141 *handled_ops_p = true;
2142 switch (gimple_code (stmt))
2143 {
2144 WALK_SUBSTMTS;
2145
2146 case GIMPLE_OMP_FOR:
2147 if (gimple_omp_for_combined_into_p (stmt)
2148 && gimple_omp_for_kind (stmt)
2149 == *(const enum gf_mask *) (wi->info))
2150 {
2151 wi->info = stmt;
2152 return integer_zero_node;
2153 }
2154 break;
2155 default:
2156 break;
2157 }
2158 return NULL;
2159 }
2160
2161 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2162
2163 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)2164 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2165 omp_context *outer_ctx)
2166 {
2167 struct walk_stmt_info wi;
2168
2169 memset (&wi, 0, sizeof (wi));
2170 wi.val_only = true;
2171 wi.info = (void *) &msk;
2172 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2173 if (wi.info != (void *) &msk)
2174 {
2175 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2176 struct omp_for_data fd;
2177 omp_extract_for_data (for_stmt, &fd, NULL);
2178 /* We need two temporaries with fd.loop.v type (istart/iend)
2179 and then (fd.collapse - 1) temporaries with the same
2180 type for count2 ... countN-1 vars if not constant. */
2181 size_t count = 2, i;
2182 tree type = fd.iter_type;
2183 if (fd.collapse > 1
2184 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2185 {
2186 count += fd.collapse - 1;
2187 /* If there are lastprivate clauses on the inner
2188 GIMPLE_OMP_FOR, add one more temporaries for the total number
2189 of iterations (product of count1 ... countN-1). */
2190 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2191 OMP_CLAUSE_LASTPRIVATE)
2192 || (msk == GF_OMP_FOR_KIND_FOR
2193 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2194 OMP_CLAUSE_LASTPRIVATE)))
2195 {
2196 tree temp = create_tmp_var (type);
2197 tree c = build_omp_clause (UNKNOWN_LOCATION,
2198 OMP_CLAUSE__LOOPTEMP_);
2199 insert_decl_map (&outer_ctx->cb, temp, temp);
2200 OMP_CLAUSE_DECL (c) = temp;
2201 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2202 gimple_omp_taskreg_set_clauses (stmt, c);
2203 }
2204 if (fd.non_rect
2205 && fd.last_nonrect == fd.first_nonrect + 1)
2206 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2207 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2208 {
2209 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2210 tree type2 = TREE_TYPE (v);
2211 count++;
2212 for (i = 0; i < 3; i++)
2213 {
2214 tree temp = create_tmp_var (type2);
2215 tree c = build_omp_clause (UNKNOWN_LOCATION,
2216 OMP_CLAUSE__LOOPTEMP_);
2217 insert_decl_map (&outer_ctx->cb, temp, temp);
2218 OMP_CLAUSE_DECL (c) = temp;
2219 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2220 gimple_omp_taskreg_set_clauses (stmt, c);
2221 }
2222 }
2223 }
2224 for (i = 0; i < count; i++)
2225 {
2226 tree temp = create_tmp_var (type);
2227 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2228 insert_decl_map (&outer_ctx->cb, temp, temp);
2229 OMP_CLAUSE_DECL (c) = temp;
2230 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2231 gimple_omp_taskreg_set_clauses (stmt, c);
2232 }
2233 }
2234 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2235 && omp_find_clause (gimple_omp_task_clauses (stmt),
2236 OMP_CLAUSE_REDUCTION))
2237 {
2238 tree type = build_pointer_type (pointer_sized_int_node);
2239 tree temp = create_tmp_var (type);
2240 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2241 insert_decl_map (&outer_ctx->cb, temp, temp);
2242 OMP_CLAUSE_DECL (c) = temp;
2243 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2244 gimple_omp_task_set_clauses (stmt, c);
2245 }
2246 }
2247
2248 /* Scan an OpenMP parallel directive. */
2249
2250 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2251 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2252 {
2253 omp_context *ctx;
2254 tree name;
2255 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2256
2257 /* Ignore parallel directives with empty bodies, unless there
2258 are copyin clauses. */
2259 if (optimize > 0
2260 && empty_body_p (gimple_omp_body (stmt))
2261 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2262 OMP_CLAUSE_COPYIN) == NULL)
2263 {
2264 gsi_replace (gsi, gimple_build_nop (), false);
2265 return;
2266 }
2267
2268 if (gimple_omp_parallel_combined_p (stmt))
2269 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2270 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2271 OMP_CLAUSE_REDUCTION);
2272 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2273 if (OMP_CLAUSE_REDUCTION_TASK (c))
2274 {
2275 tree type = build_pointer_type (pointer_sized_int_node);
2276 tree temp = create_tmp_var (type);
2277 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2278 if (outer_ctx)
2279 insert_decl_map (&outer_ctx->cb, temp, temp);
2280 OMP_CLAUSE_DECL (c) = temp;
2281 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2282 gimple_omp_parallel_set_clauses (stmt, c);
2283 break;
2284 }
2285 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2286 break;
2287
2288 ctx = new_omp_context (stmt, outer_ctx);
2289 taskreg_contexts.safe_push (ctx);
2290 if (taskreg_nesting_level > 1)
2291 ctx->is_nested = true;
2292 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2293 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2294 name = create_tmp_var_name (".omp_data_s");
2295 name = build_decl (gimple_location (stmt),
2296 TYPE_DECL, name, ctx->record_type);
2297 DECL_ARTIFICIAL (name) = 1;
2298 DECL_NAMELESS (name) = 1;
2299 TYPE_NAME (ctx->record_type) = name;
2300 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2301 create_omp_child_function (ctx, false);
2302 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2303
2304 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2305 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2306
2307 if (TYPE_FIELDS (ctx->record_type) == NULL)
2308 ctx->record_type = ctx->receiver_decl = NULL;
2309 }
2310
2311 /* Scan an OpenMP task directive. */
2312
2313 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2314 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2315 {
2316 omp_context *ctx;
2317 tree name, t;
2318 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2319
2320 /* Ignore task directives with empty bodies, unless they have depend
2321 clause. */
2322 if (optimize > 0
2323 && gimple_omp_body (stmt)
2324 && empty_body_p (gimple_omp_body (stmt))
2325 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2326 {
2327 gsi_replace (gsi, gimple_build_nop (), false);
2328 return;
2329 }
2330
2331 if (gimple_omp_task_taskloop_p (stmt))
2332 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2333
2334 ctx = new_omp_context (stmt, outer_ctx);
2335
2336 if (gimple_omp_task_taskwait_p (stmt))
2337 {
2338 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2339 return;
2340 }
2341
2342 taskreg_contexts.safe_push (ctx);
2343 if (taskreg_nesting_level > 1)
2344 ctx->is_nested = true;
2345 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2346 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2347 name = create_tmp_var_name (".omp_data_s");
2348 name = build_decl (gimple_location (stmt),
2349 TYPE_DECL, name, ctx->record_type);
2350 DECL_ARTIFICIAL (name) = 1;
2351 DECL_NAMELESS (name) = 1;
2352 TYPE_NAME (ctx->record_type) = name;
2353 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2354 create_omp_child_function (ctx, false);
2355 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2356
2357 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2358
2359 if (ctx->srecord_type)
2360 {
2361 name = create_tmp_var_name (".omp_data_a");
2362 name = build_decl (gimple_location (stmt),
2363 TYPE_DECL, name, ctx->srecord_type);
2364 DECL_ARTIFICIAL (name) = 1;
2365 DECL_NAMELESS (name) = 1;
2366 TYPE_NAME (ctx->srecord_type) = name;
2367 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2368 create_omp_child_function (ctx, true);
2369 }
2370
2371 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2372
2373 if (TYPE_FIELDS (ctx->record_type) == NULL)
2374 {
2375 ctx->record_type = ctx->receiver_decl = NULL;
2376 t = build_int_cst (long_integer_type_node, 0);
2377 gimple_omp_task_set_arg_size (stmt, t);
2378 t = build_int_cst (long_integer_type_node, 1);
2379 gimple_omp_task_set_arg_align (stmt, t);
2380 }
2381 }
2382
2383 /* Helper function for finish_taskreg_scan, called through walk_tree.
2384 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2385 tree, replace it in the expression. */
2386
2387 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)2388 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2389 {
2390 if (VAR_P (*tp))
2391 {
2392 omp_context *ctx = (omp_context *) data;
2393 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2394 if (t != *tp)
2395 {
2396 if (DECL_HAS_VALUE_EXPR_P (t))
2397 t = unshare_expr (DECL_VALUE_EXPR (t));
2398 *tp = t;
2399 }
2400 *walk_subtrees = 0;
2401 }
2402 else if (IS_TYPE_OR_DECL_P (*tp))
2403 *walk_subtrees = 0;
2404 return NULL_TREE;
2405 }
2406
2407 /* If any decls have been made addressable during scan_omp,
2408 adjust their fields if needed, and layout record types
2409 of parallel/task constructs. */
2410
2411 static void
finish_taskreg_scan(omp_context * ctx)2412 finish_taskreg_scan (omp_context *ctx)
2413 {
2414 if (ctx->record_type == NULL_TREE)
2415 return;
2416
2417 /* If any make_addressable_vars were needed, verify all
2418 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2419 statements if use_pointer_for_field hasn't changed
2420 because of that. If it did, update field types now. */
2421 if (make_addressable_vars)
2422 {
2423 tree c;
2424
2425 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2426 c; c = OMP_CLAUSE_CHAIN (c))
2427 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2428 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2429 {
2430 tree decl = OMP_CLAUSE_DECL (c);
2431
2432 /* Global variables don't need to be copied,
2433 the receiver side will use them directly. */
2434 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2435 continue;
2436 if (!bitmap_bit_p (make_addressable_vars, DECL_UID (decl))
2437 || !use_pointer_for_field (decl, ctx))
2438 continue;
2439 tree field = lookup_field (decl, ctx);
2440 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2441 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2442 continue;
2443 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2444 TREE_THIS_VOLATILE (field) = 0;
2445 DECL_USER_ALIGN (field) = 0;
2446 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2447 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2448 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2449 if (ctx->srecord_type)
2450 {
2451 tree sfield = lookup_sfield (decl, ctx);
2452 TREE_TYPE (sfield) = TREE_TYPE (field);
2453 TREE_THIS_VOLATILE (sfield) = 0;
2454 DECL_USER_ALIGN (sfield) = 0;
2455 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2456 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2457 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2458 }
2459 }
2460 }
2461
2462 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2463 {
2464 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2465 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2466 if (c)
2467 {
2468 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2469 expects to find it at the start of data. */
2470 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2471 tree *p = &TYPE_FIELDS (ctx->record_type);
2472 while (*p)
2473 if (*p == f)
2474 {
2475 *p = DECL_CHAIN (*p);
2476 break;
2477 }
2478 else
2479 p = &DECL_CHAIN (*p);
2480 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2481 TYPE_FIELDS (ctx->record_type) = f;
2482 }
2483 layout_type (ctx->record_type);
2484 fixup_child_record_type (ctx);
2485 }
2486 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2487 {
2488 layout_type (ctx->record_type);
2489 fixup_child_record_type (ctx);
2490 }
2491 else
2492 {
2493 location_t loc = gimple_location (ctx->stmt);
2494 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2495 tree detach_clause
2496 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2497 OMP_CLAUSE_DETACH);
2498 /* Move VLA fields to the end. */
2499 p = &TYPE_FIELDS (ctx->record_type);
2500 while (*p)
2501 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2502 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2503 {
2504 *q = *p;
2505 *p = TREE_CHAIN (*p);
2506 TREE_CHAIN (*q) = NULL_TREE;
2507 q = &TREE_CHAIN (*q);
2508 }
2509 else
2510 p = &DECL_CHAIN (*p);
2511 *p = vla_fields;
2512 if (gimple_omp_task_taskloop_p (ctx->stmt))
2513 {
2514 /* Move fields corresponding to first and second _looptemp_
2515 clause first. There are filled by GOMP_taskloop
2516 and thus need to be in specific positions. */
2517 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2518 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2519 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2520 OMP_CLAUSE__LOOPTEMP_);
2521 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2522 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2523 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2524 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2525 p = &TYPE_FIELDS (ctx->record_type);
2526 while (*p)
2527 if (*p == f1 || *p == f2 || *p == f3)
2528 *p = DECL_CHAIN (*p);
2529 else
2530 p = &DECL_CHAIN (*p);
2531 DECL_CHAIN (f1) = f2;
2532 if (c3)
2533 {
2534 DECL_CHAIN (f2) = f3;
2535 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2536 }
2537 else
2538 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2539 TYPE_FIELDS (ctx->record_type) = f1;
2540 if (ctx->srecord_type)
2541 {
2542 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2543 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2544 if (c3)
2545 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2546 p = &TYPE_FIELDS (ctx->srecord_type);
2547 while (*p)
2548 if (*p == f1 || *p == f2 || *p == f3)
2549 *p = DECL_CHAIN (*p);
2550 else
2551 p = &DECL_CHAIN (*p);
2552 DECL_CHAIN (f1) = f2;
2553 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2554 if (c3)
2555 {
2556 DECL_CHAIN (f2) = f3;
2557 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2558 }
2559 else
2560 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2561 TYPE_FIELDS (ctx->srecord_type) = f1;
2562 }
2563 }
2564 if (detach_clause)
2565 {
2566 tree c, field;
2567
2568 /* Look for a firstprivate clause with the detach event handle. */
2569 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2570 c; c = OMP_CLAUSE_CHAIN (c))
2571 {
2572 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2573 continue;
2574 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2575 == OMP_CLAUSE_DECL (detach_clause))
2576 break;
2577 }
2578
2579 gcc_assert (c);
2580 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2581
2582 /* Move field corresponding to the detach clause first.
2583 This is filled by GOMP_task and needs to be in a
2584 specific position. */
2585 p = &TYPE_FIELDS (ctx->record_type);
2586 while (*p)
2587 if (*p == field)
2588 *p = DECL_CHAIN (*p);
2589 else
2590 p = &DECL_CHAIN (*p);
2591 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2592 TYPE_FIELDS (ctx->record_type) = field;
2593 if (ctx->srecord_type)
2594 {
2595 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2596 p = &TYPE_FIELDS (ctx->srecord_type);
2597 while (*p)
2598 if (*p == field)
2599 *p = DECL_CHAIN (*p);
2600 else
2601 p = &DECL_CHAIN (*p);
2602 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2603 TYPE_FIELDS (ctx->srecord_type) = field;
2604 }
2605 }
2606 layout_type (ctx->record_type);
2607 fixup_child_record_type (ctx);
2608 if (ctx->srecord_type)
2609 layout_type (ctx->srecord_type);
2610 tree t = fold_convert_loc (loc, long_integer_type_node,
2611 TYPE_SIZE_UNIT (ctx->record_type));
2612 if (TREE_CODE (t) != INTEGER_CST)
2613 {
2614 t = unshare_expr (t);
2615 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2616 }
2617 gimple_omp_task_set_arg_size (ctx->stmt, t);
2618 t = build_int_cst (long_integer_type_node,
2619 TYPE_ALIGN_UNIT (ctx->record_type));
2620 gimple_omp_task_set_arg_align (ctx->stmt, t);
2621 }
2622 }
2623
2624 /* Find the enclosing offload context. */
2625
2626 static omp_context *
enclosing_target_ctx(omp_context * ctx)2627 enclosing_target_ctx (omp_context *ctx)
2628 {
2629 for (; ctx; ctx = ctx->outer)
2630 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2631 break;
2632
2633 return ctx;
2634 }
2635
2636 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2637 construct.
2638 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2639
2640 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2641 ctx_in_oacc_kernels_region (omp_context *ctx)
2642 {
2643 for (;ctx != NULL; ctx = ctx->outer)
2644 {
2645 gimple *stmt = ctx->stmt;
2646 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2647 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2648 return true;
2649 }
2650
2651 return false;
2652 }
2653
2654 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2655 (This doesn't include OpenACC 'kernels' decomposed parts.)
2656 Until kernels handling moves to use the same loop indirection
2657 scheme as parallel, we need to do this checking early. */
2658
2659 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2660 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2661 {
2662 bool checking = true;
2663 unsigned outer_mask = 0;
2664 unsigned this_mask = 0;
2665 bool has_seq = false, has_auto = false;
2666
2667 if (ctx->outer)
2668 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2669 if (!stmt)
2670 {
2671 checking = false;
2672 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2673 return outer_mask;
2674 stmt = as_a <gomp_for *> (ctx->stmt);
2675 }
2676
2677 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2678 {
2679 switch (OMP_CLAUSE_CODE (c))
2680 {
2681 case OMP_CLAUSE_GANG:
2682 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2683 break;
2684 case OMP_CLAUSE_WORKER:
2685 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2686 break;
2687 case OMP_CLAUSE_VECTOR:
2688 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2689 break;
2690 case OMP_CLAUSE_SEQ:
2691 has_seq = true;
2692 break;
2693 case OMP_CLAUSE_AUTO:
2694 has_auto = true;
2695 break;
2696 default:
2697 break;
2698 }
2699 }
2700
2701 if (checking)
2702 {
2703 if (has_seq && (this_mask || has_auto))
2704 error_at (gimple_location (stmt), "%<seq%> overrides other"
2705 " OpenACC loop specifiers");
2706 else if (has_auto && this_mask)
2707 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2708 " OpenACC loop specifiers");
2709
2710 if (this_mask & outer_mask)
2711 error_at (gimple_location (stmt), "inner loop uses same"
2712 " OpenACC parallelism as containing loop");
2713 }
2714
2715 return outer_mask | this_mask;
2716 }
2717
2718 /* Scan a GIMPLE_OMP_FOR. */
2719
2720 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2721 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2722 {
2723 omp_context *ctx;
2724 size_t i;
2725 tree clauses = gimple_omp_for_clauses (stmt);
2726
2727 ctx = new_omp_context (stmt, outer_ctx);
2728
2729 if (is_gimple_omp_oacc (stmt))
2730 {
2731 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2732
2733 if (!(tgt && is_oacc_kernels (tgt)))
2734 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2735 {
2736 tree c_op0;
2737 switch (OMP_CLAUSE_CODE (c))
2738 {
2739 case OMP_CLAUSE_GANG:
2740 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2741 break;
2742
2743 case OMP_CLAUSE_WORKER:
2744 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2745 break;
2746
2747 case OMP_CLAUSE_VECTOR:
2748 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2749 break;
2750
2751 default:
2752 continue;
2753 }
2754
2755 if (c_op0)
2756 {
2757 /* By construction, this is impossible for OpenACC 'kernels'
2758 decomposed parts. */
2759 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2760
2761 error_at (OMP_CLAUSE_LOCATION (c),
2762 "argument not permitted on %qs clause",
2763 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2764 if (tgt)
2765 inform (gimple_location (tgt->stmt),
2766 "enclosing parent compute construct");
2767 else if (oacc_get_fn_attrib (current_function_decl))
2768 inform (DECL_SOURCE_LOCATION (current_function_decl),
2769 "enclosing routine");
2770 else
2771 gcc_unreachable ();
2772 }
2773 }
2774
2775 if (tgt && is_oacc_kernels (tgt))
2776 check_oacc_kernel_gwv (stmt, ctx);
2777
2778 /* Collect all variables named in reductions on this loop. Ensure
2779 that, if this loop has a reduction on some variable v, and there is
2780 a reduction on v somewhere in an outer context, then there is a
2781 reduction on v on all intervening loops as well. */
2782 tree local_reduction_clauses = NULL;
2783 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2784 {
2785 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2786 local_reduction_clauses
2787 = tree_cons (NULL, c, local_reduction_clauses);
2788 }
2789 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2790 ctx->outer_reduction_clauses
2791 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2792 ctx->outer->outer_reduction_clauses);
2793 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2794 tree local_iter = local_reduction_clauses;
2795 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2796 {
2797 tree local_clause = TREE_VALUE (local_iter);
2798 tree local_var = OMP_CLAUSE_DECL (local_clause);
2799 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2800 bool have_outer_reduction = false;
2801 tree ctx_iter = outer_reduction_clauses;
2802 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2803 {
2804 tree outer_clause = TREE_VALUE (ctx_iter);
2805 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2806 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2807 if (outer_var == local_var && outer_op != local_op)
2808 {
2809 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2810 "conflicting reduction operations for %qE",
2811 local_var);
2812 inform (OMP_CLAUSE_LOCATION (outer_clause),
2813 "location of the previous reduction for %qE",
2814 outer_var);
2815 }
2816 if (outer_var == local_var)
2817 {
2818 have_outer_reduction = true;
2819 break;
2820 }
2821 }
2822 if (have_outer_reduction)
2823 {
2824 /* There is a reduction on outer_var both on this loop and on
2825 some enclosing loop. Walk up the context tree until such a
2826 loop with a reduction on outer_var is found, and complain
2827 about all intervening loops that do not have such a
2828 reduction. */
2829 struct omp_context *curr_loop = ctx->outer;
2830 bool found = false;
2831 while (curr_loop != NULL)
2832 {
2833 tree curr_iter = curr_loop->local_reduction_clauses;
2834 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2835 {
2836 tree curr_clause = TREE_VALUE (curr_iter);
2837 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2838 if (curr_var == local_var)
2839 {
2840 found = true;
2841 break;
2842 }
2843 }
2844 if (!found)
2845 warning_at (gimple_location (curr_loop->stmt), 0,
2846 "nested loop in reduction needs "
2847 "reduction clause for %qE",
2848 local_var);
2849 else
2850 break;
2851 curr_loop = curr_loop->outer;
2852 }
2853 }
2854 }
2855 ctx->local_reduction_clauses = local_reduction_clauses;
2856 ctx->outer_reduction_clauses
2857 = chainon (unshare_expr (ctx->local_reduction_clauses),
2858 ctx->outer_reduction_clauses);
2859
2860 if (tgt && is_oacc_kernels (tgt))
2861 {
2862 /* Strip out reductions, as they are not handled yet. */
2863 tree *prev_ptr = &clauses;
2864
2865 while (tree probe = *prev_ptr)
2866 {
2867 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2868
2869 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2870 *prev_ptr = *next_ptr;
2871 else
2872 prev_ptr = next_ptr;
2873 }
2874
2875 gimple_omp_for_set_clauses (stmt, clauses);
2876 }
2877 }
2878
2879 scan_sharing_clauses (clauses, ctx);
2880
2881 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2882 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2883 {
2884 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2885 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2886 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2887 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2888 }
2889 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2890 return ctx;
2891 }
2892
2893 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2894
2895 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2896 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2897 omp_context *outer_ctx)
2898 {
2899 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2900 gsi_replace (gsi, bind, false);
2901 gimple_seq seq = NULL;
2902 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2903 tree cond = create_tmp_var_raw (integer_type_node);
2904 DECL_CONTEXT (cond) = current_function_decl;
2905 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2906 gimple_bind_set_vars (bind, cond);
2907 gimple_call_set_lhs (g, cond);
2908 gimple_seq_add_stmt (&seq, g);
2909 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2910 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2911 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2912 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2913 gimple_seq_add_stmt (&seq, g);
2914 g = gimple_build_label (lab1);
2915 gimple_seq_add_stmt (&seq, g);
2916 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2917 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2918 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2919 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2920 gimple_omp_for_set_clauses (new_stmt, clause);
2921 gimple_seq_add_stmt (&seq, new_stmt);
2922 g = gimple_build_goto (lab3);
2923 gimple_seq_add_stmt (&seq, g);
2924 g = gimple_build_label (lab2);
2925 gimple_seq_add_stmt (&seq, g);
2926 gimple_seq_add_stmt (&seq, stmt);
2927 g = gimple_build_label (lab3);
2928 gimple_seq_add_stmt (&seq, g);
2929 gimple_bind_set_body (bind, seq);
2930 update_stmt (bind);
2931 scan_omp_for (new_stmt, outer_ctx);
2932 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2933 }
2934
2935 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2936 struct walk_stmt_info *);
2937 static omp_context *maybe_lookup_ctx (gimple *);
2938
2939 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2940 for scan phase loop. */
2941
2942 static void
scan_omp_simd_scan(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2943 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2944 omp_context *outer_ctx)
2945 {
2946 /* The only change between inclusive and exclusive scan will be
2947 within the first simd loop, so just use inclusive in the
2948 worksharing loop. */
2949 outer_ctx->scan_inclusive = true;
2950 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2951 OMP_CLAUSE_DECL (c) = integer_zero_node;
2952
2953 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2954 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2955 gsi_replace (gsi, input_stmt, false);
2956 gimple_seq input_body = NULL;
2957 gimple_seq_add_stmt (&input_body, stmt);
2958 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2959
2960 gimple_stmt_iterator input1_gsi = gsi_none ();
2961 struct walk_stmt_info wi;
2962 memset (&wi, 0, sizeof (wi));
2963 wi.val_only = true;
2964 wi.info = (void *) &input1_gsi;
2965 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2966 gcc_assert (!gsi_end_p (input1_gsi));
2967
2968 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2969 gsi_next (&input1_gsi);
2970 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2971 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2972 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2973 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2974 std::swap (input_stmt1, scan_stmt1);
2975
2976 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2977 gimple_omp_set_body (input_stmt1, NULL);
2978
2979 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2980 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2981
2982 gimple_omp_set_body (input_stmt1, input_body1);
2983 gimple_omp_set_body (scan_stmt1, NULL);
2984
2985 gimple_stmt_iterator input2_gsi = gsi_none ();
2986 memset (&wi, 0, sizeof (wi));
2987 wi.val_only = true;
2988 wi.info = (void *) &input2_gsi;
2989 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2990 NULL, &wi);
2991 gcc_assert (!gsi_end_p (input2_gsi));
2992
2993 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2994 gsi_next (&input2_gsi);
2995 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2996 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2997 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2998 std::swap (input_stmt2, scan_stmt2);
2999
3000 gimple_omp_set_body (input_stmt2, NULL);
3001
3002 gimple_omp_set_body (input_stmt, input_body);
3003 gimple_omp_set_body (scan_stmt, scan_body);
3004
3005 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
3006 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
3007
3008 ctx = new_omp_context (scan_stmt, outer_ctx);
3009 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
3010
3011 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
3012 }
3013
3014 /* Scan an OpenMP sections directive. */
3015
3016 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)3017 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
3018 {
3019 omp_context *ctx;
3020
3021 ctx = new_omp_context (stmt, outer_ctx);
3022 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
3023 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3024 }
3025
3026 /* Scan an OpenMP single directive. */
3027
3028 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)3029 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
3030 {
3031 omp_context *ctx;
3032 tree name;
3033
3034 ctx = new_omp_context (stmt, outer_ctx);
3035 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3036 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3037 name = create_tmp_var_name (".omp_copy_s");
3038 name = build_decl (gimple_location (stmt),
3039 TYPE_DECL, name, ctx->record_type);
3040 TYPE_NAME (ctx->record_type) = name;
3041
3042 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
3043 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3044
3045 if (TYPE_FIELDS (ctx->record_type) == NULL)
3046 ctx->record_type = NULL;
3047 else
3048 layout_type (ctx->record_type);
3049 }
3050
3051 /* Scan a GIMPLE_OMP_TARGET. */
3052
3053 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)3054 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3055 {
3056 omp_context *ctx;
3057 tree name;
3058 bool offloaded = is_gimple_omp_offloaded (stmt);
3059 tree clauses = gimple_omp_target_clauses (stmt);
3060
3061 ctx = new_omp_context (stmt, outer_ctx);
3062 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3063 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3064 name = create_tmp_var_name (".omp_data_t");
3065 name = build_decl (gimple_location (stmt),
3066 TYPE_DECL, name, ctx->record_type);
3067 DECL_ARTIFICIAL (name) = 1;
3068 DECL_NAMELESS (name) = 1;
3069 TYPE_NAME (ctx->record_type) = name;
3070 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3071
3072 if (offloaded)
3073 {
3074 create_omp_child_function (ctx, false);
3075 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3076 }
3077
3078 scan_sharing_clauses (clauses, ctx);
3079 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3080
3081 if (TYPE_FIELDS (ctx->record_type) == NULL)
3082 ctx->record_type = ctx->receiver_decl = NULL;
3083 else
3084 {
3085 TYPE_FIELDS (ctx->record_type)
3086 = nreverse (TYPE_FIELDS (ctx->record_type));
3087 if (flag_checking)
3088 {
3089 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3090 for (tree field = TYPE_FIELDS (ctx->record_type);
3091 field;
3092 field = DECL_CHAIN (field))
3093 gcc_assert (DECL_ALIGN (field) == align);
3094 }
3095 layout_type (ctx->record_type);
3096 if (offloaded)
3097 fixup_child_record_type (ctx);
3098 }
3099
3100 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3101 {
3102 error_at (gimple_location (stmt),
3103 "%<target%> construct with nested %<teams%> construct "
3104 "contains directives outside of the %<teams%> construct");
3105 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3106 }
3107 }
3108
3109 /* Scan an OpenMP teams directive. */
3110
3111 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)3112 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3113 {
3114 omp_context *ctx = new_omp_context (stmt, outer_ctx);
3115
3116 if (!gimple_omp_teams_host (stmt))
3117 {
3118 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3119 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3120 return;
3121 }
3122 taskreg_contexts.safe_push (ctx);
3123 gcc_assert (taskreg_nesting_level == 1);
3124 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3125 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3126 tree name = create_tmp_var_name (".omp_data_s");
3127 name = build_decl (gimple_location (stmt),
3128 TYPE_DECL, name, ctx->record_type);
3129 DECL_ARTIFICIAL (name) = 1;
3130 DECL_NAMELESS (name) = 1;
3131 TYPE_NAME (ctx->record_type) = name;
3132 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3133 create_omp_child_function (ctx, false);
3134 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3135
3136 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3137 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3138
3139 if (TYPE_FIELDS (ctx->record_type) == NULL)
3140 ctx->record_type = ctx->receiver_decl = NULL;
3141 }
3142
3143 /* Check nesting restrictions. */
3144 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)3145 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3146 {
3147 tree c;
3148
3149 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3150 inside an OpenACC CTX. */
3151 if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3152 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3153 /* ..., except for the atomic codes that OpenACC shares with OpenMP. */
3154 ;
3155 else if (!(is_gimple_omp (stmt)
3156 && is_gimple_omp_oacc (stmt)))
3157 {
3158 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3159 {
3160 error_at (gimple_location (stmt),
3161 "non-OpenACC construct inside of OpenACC routine");
3162 return false;
3163 }
3164 else
3165 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3166 if (is_gimple_omp (octx->stmt)
3167 && is_gimple_omp_oacc (octx->stmt))
3168 {
3169 error_at (gimple_location (stmt),
3170 "non-OpenACC construct inside of OpenACC region");
3171 return false;
3172 }
3173 }
3174
3175 if (ctx != NULL)
3176 {
3177 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3178 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3179 {
3180 c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3181 OMP_CLAUSE_DEVICE);
3182 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3183 {
3184 error_at (gimple_location (stmt),
3185 "OpenMP constructs are not allowed in target region "
3186 "with %<ancestor%>");
3187 return false;
3188 }
3189
3190 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3191 ctx->teams_nested_p = true;
3192 else
3193 ctx->nonteams_nested_p = true;
3194 }
3195 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3196 && ctx->outer
3197 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3198 ctx = ctx->outer;
3199 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3200 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3201 && !ctx->loop_p)
3202 {
3203 c = NULL_TREE;
3204 if (ctx->order_concurrent
3205 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3206 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3207 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3208 {
3209 error_at (gimple_location (stmt),
3210 "OpenMP constructs other than %<parallel%>, %<loop%>"
3211 " or %<simd%> may not be nested inside a region with"
3212 " the %<order(concurrent)%> clause");
3213 return false;
3214 }
3215 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3216 {
3217 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3218 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3219 {
3220 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3221 && (ctx->outer == NULL
3222 || !gimple_omp_for_combined_into_p (ctx->stmt)
3223 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3224 || (gimple_omp_for_kind (ctx->outer->stmt)
3225 != GF_OMP_FOR_KIND_FOR)
3226 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3227 {
3228 error_at (gimple_location (stmt),
3229 "%<ordered simd threads%> must be closely "
3230 "nested inside of %<%s simd%> region",
3231 lang_GNU_Fortran () ? "do" : "for");
3232 return false;
3233 }
3234 return true;
3235 }
3236 }
3237 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3238 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3239 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3240 return true;
3241 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3242 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3243 return true;
3244 error_at (gimple_location (stmt),
3245 "OpenMP constructs other than "
3246 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3247 "not be nested inside %<simd%> region");
3248 return false;
3249 }
3250 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3251 {
3252 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3253 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3254 && omp_find_clause (gimple_omp_for_clauses (stmt),
3255 OMP_CLAUSE_BIND) == NULL_TREE))
3256 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3257 {
3258 error_at (gimple_location (stmt),
3259 "only %<distribute%>, %<parallel%> or %<loop%> "
3260 "regions are allowed to be strictly nested inside "
3261 "%<teams%> region");
3262 return false;
3263 }
3264 }
3265 else if (ctx->order_concurrent
3266 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3267 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3268 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3269 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3270 {
3271 if (ctx->loop_p)
3272 error_at (gimple_location (stmt),
3273 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3274 "%<simd%> may not be nested inside a %<loop%> region");
3275 else
3276 error_at (gimple_location (stmt),
3277 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3278 "%<simd%> may not be nested inside a region with "
3279 "the %<order(concurrent)%> clause");
3280 return false;
3281 }
3282 }
3283 switch (gimple_code (stmt))
3284 {
3285 case GIMPLE_OMP_FOR:
3286 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3287 return true;
3288 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3289 {
3290 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3291 {
3292 error_at (gimple_location (stmt),
3293 "%<distribute%> region must be strictly nested "
3294 "inside %<teams%> construct");
3295 return false;
3296 }
3297 return true;
3298 }
3299 /* We split taskloop into task and nested taskloop in it. */
3300 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3301 return true;
3302 /* For now, hope this will change and loop bind(parallel) will not
3303 be allowed in lots of contexts. */
3304 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3305 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3306 return true;
3307 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3308 {
3309 bool ok = false;
3310
3311 if (ctx)
3312 switch (gimple_code (ctx->stmt))
3313 {
3314 case GIMPLE_OMP_FOR:
3315 ok = (gimple_omp_for_kind (ctx->stmt)
3316 == GF_OMP_FOR_KIND_OACC_LOOP);
3317 break;
3318
3319 case GIMPLE_OMP_TARGET:
3320 switch (gimple_omp_target_kind (ctx->stmt))
3321 {
3322 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3323 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3324 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3325 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3326 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3327 ok = true;
3328 break;
3329
3330 default:
3331 break;
3332 }
3333
3334 default:
3335 break;
3336 }
3337 else if (oacc_get_fn_attrib (current_function_decl))
3338 ok = true;
3339 if (!ok)
3340 {
3341 error_at (gimple_location (stmt),
3342 "OpenACC loop directive must be associated with"
3343 " an OpenACC compute region");
3344 return false;
3345 }
3346 }
3347 /* FALLTHRU */
3348 case GIMPLE_CALL:
3349 if (is_gimple_call (stmt)
3350 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3351 == BUILT_IN_GOMP_CANCEL
3352 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3353 == BUILT_IN_GOMP_CANCELLATION_POINT))
3354 {
3355 const char *bad = NULL;
3356 const char *kind = NULL;
3357 const char *construct
3358 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3359 == BUILT_IN_GOMP_CANCEL)
3360 ? "cancel"
3361 : "cancellation point";
3362 if (ctx == NULL)
3363 {
3364 error_at (gimple_location (stmt), "orphaned %qs construct",
3365 construct);
3366 return false;
3367 }
3368 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3369 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3370 : 0)
3371 {
3372 case 1:
3373 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3374 bad = "parallel";
3375 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3376 == BUILT_IN_GOMP_CANCEL
3377 && !integer_zerop (gimple_call_arg (stmt, 1)))
3378 ctx->cancellable = true;
3379 kind = "parallel";
3380 break;
3381 case 2:
3382 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3383 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3384 bad = "for";
3385 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3386 == BUILT_IN_GOMP_CANCEL
3387 && !integer_zerop (gimple_call_arg (stmt, 1)))
3388 {
3389 ctx->cancellable = true;
3390 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3391 OMP_CLAUSE_NOWAIT))
3392 warning_at (gimple_location (stmt), 0,
3393 "%<cancel for%> inside "
3394 "%<nowait%> for construct");
3395 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3396 OMP_CLAUSE_ORDERED))
3397 warning_at (gimple_location (stmt), 0,
3398 "%<cancel for%> inside "
3399 "%<ordered%> for construct");
3400 }
3401 kind = "for";
3402 break;
3403 case 4:
3404 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3405 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3406 bad = "sections";
3407 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3408 == BUILT_IN_GOMP_CANCEL
3409 && !integer_zerop (gimple_call_arg (stmt, 1)))
3410 {
3411 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3412 {
3413 ctx->cancellable = true;
3414 if (omp_find_clause (gimple_omp_sections_clauses
3415 (ctx->stmt),
3416 OMP_CLAUSE_NOWAIT))
3417 warning_at (gimple_location (stmt), 0,
3418 "%<cancel sections%> inside "
3419 "%<nowait%> sections construct");
3420 }
3421 else
3422 {
3423 gcc_assert (ctx->outer
3424 && gimple_code (ctx->outer->stmt)
3425 == GIMPLE_OMP_SECTIONS);
3426 ctx->outer->cancellable = true;
3427 if (omp_find_clause (gimple_omp_sections_clauses
3428 (ctx->outer->stmt),
3429 OMP_CLAUSE_NOWAIT))
3430 warning_at (gimple_location (stmt), 0,
3431 "%<cancel sections%> inside "
3432 "%<nowait%> sections construct");
3433 }
3434 }
3435 kind = "sections";
3436 break;
3437 case 8:
3438 if (!is_task_ctx (ctx)
3439 && (!is_taskloop_ctx (ctx)
3440 || ctx->outer == NULL
3441 || !is_task_ctx (ctx->outer)))
3442 bad = "task";
3443 else
3444 {
3445 for (omp_context *octx = ctx->outer;
3446 octx; octx = octx->outer)
3447 {
3448 switch (gimple_code (octx->stmt))
3449 {
3450 case GIMPLE_OMP_TASKGROUP:
3451 break;
3452 case GIMPLE_OMP_TARGET:
3453 if (gimple_omp_target_kind (octx->stmt)
3454 != GF_OMP_TARGET_KIND_REGION)
3455 continue;
3456 /* FALLTHRU */
3457 case GIMPLE_OMP_PARALLEL:
3458 case GIMPLE_OMP_TEAMS:
3459 error_at (gimple_location (stmt),
3460 "%<%s taskgroup%> construct not closely "
3461 "nested inside of %<taskgroup%> region",
3462 construct);
3463 return false;
3464 case GIMPLE_OMP_TASK:
3465 if (gimple_omp_task_taskloop_p (octx->stmt)
3466 && octx->outer
3467 && is_taskloop_ctx (octx->outer))
3468 {
3469 tree clauses
3470 = gimple_omp_for_clauses (octx->outer->stmt);
3471 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3472 break;
3473 }
3474 continue;
3475 default:
3476 continue;
3477 }
3478 break;
3479 }
3480 ctx->cancellable = true;
3481 }
3482 kind = "taskgroup";
3483 break;
3484 default:
3485 error_at (gimple_location (stmt), "invalid arguments");
3486 return false;
3487 }
3488 if (bad)
3489 {
3490 error_at (gimple_location (stmt),
3491 "%<%s %s%> construct not closely nested inside of %qs",
3492 construct, kind, bad);
3493 return false;
3494 }
3495 }
3496 /* FALLTHRU */
3497 case GIMPLE_OMP_SECTIONS:
3498 case GIMPLE_OMP_SINGLE:
3499 for (; ctx != NULL; ctx = ctx->outer)
3500 switch (gimple_code (ctx->stmt))
3501 {
3502 case GIMPLE_OMP_FOR:
3503 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3504 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3505 break;
3506 /* FALLTHRU */
3507 case GIMPLE_OMP_SECTIONS:
3508 case GIMPLE_OMP_SINGLE:
3509 case GIMPLE_OMP_ORDERED:
3510 case GIMPLE_OMP_MASTER:
3511 case GIMPLE_OMP_MASKED:
3512 case GIMPLE_OMP_TASK:
3513 case GIMPLE_OMP_CRITICAL:
3514 if (is_gimple_call (stmt))
3515 {
3516 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3517 != BUILT_IN_GOMP_BARRIER)
3518 return true;
3519 error_at (gimple_location (stmt),
3520 "barrier region may not be closely nested inside "
3521 "of work-sharing, %<loop%>, %<critical%>, "
3522 "%<ordered%>, %<master%>, %<masked%>, explicit "
3523 "%<task%> or %<taskloop%> region");
3524 return false;
3525 }
3526 error_at (gimple_location (stmt),
3527 "work-sharing region may not be closely nested inside "
3528 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3529 "%<master%>, %<masked%>, explicit %<task%> or "
3530 "%<taskloop%> region");
3531 return false;
3532 case GIMPLE_OMP_PARALLEL:
3533 case GIMPLE_OMP_TEAMS:
3534 return true;
3535 case GIMPLE_OMP_TARGET:
3536 if (gimple_omp_target_kind (ctx->stmt)
3537 == GF_OMP_TARGET_KIND_REGION)
3538 return true;
3539 break;
3540 default:
3541 break;
3542 }
3543 break;
3544 case GIMPLE_OMP_MASTER:
3545 case GIMPLE_OMP_MASKED:
3546 for (; ctx != NULL; ctx = ctx->outer)
3547 switch (gimple_code (ctx->stmt))
3548 {
3549 case GIMPLE_OMP_FOR:
3550 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3551 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3552 break;
3553 /* FALLTHRU */
3554 case GIMPLE_OMP_SECTIONS:
3555 case GIMPLE_OMP_SINGLE:
3556 case GIMPLE_OMP_TASK:
3557 error_at (gimple_location (stmt),
3558 "%qs region may not be closely nested inside "
3559 "of work-sharing, %<loop%>, explicit %<task%> or "
3560 "%<taskloop%> region",
3561 gimple_code (stmt) == GIMPLE_OMP_MASTER
3562 ? "master" : "masked");
3563 return false;
3564 case GIMPLE_OMP_PARALLEL:
3565 case GIMPLE_OMP_TEAMS:
3566 return true;
3567 case GIMPLE_OMP_TARGET:
3568 if (gimple_omp_target_kind (ctx->stmt)
3569 == GF_OMP_TARGET_KIND_REGION)
3570 return true;
3571 break;
3572 default:
3573 break;
3574 }
3575 break;
3576 case GIMPLE_OMP_SCOPE:
3577 for (; ctx != NULL; ctx = ctx->outer)
3578 switch (gimple_code (ctx->stmt))
3579 {
3580 case GIMPLE_OMP_FOR:
3581 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3582 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3583 break;
3584 /* FALLTHRU */
3585 case GIMPLE_OMP_SECTIONS:
3586 case GIMPLE_OMP_SINGLE:
3587 case GIMPLE_OMP_TASK:
3588 case GIMPLE_OMP_CRITICAL:
3589 case GIMPLE_OMP_ORDERED:
3590 case GIMPLE_OMP_MASTER:
3591 case GIMPLE_OMP_MASKED:
3592 error_at (gimple_location (stmt),
3593 "%<scope%> region may not be closely nested inside "
3594 "of work-sharing, %<loop%>, explicit %<task%>, "
3595 "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3596 "or %<masked%> region");
3597 return false;
3598 case GIMPLE_OMP_PARALLEL:
3599 case GIMPLE_OMP_TEAMS:
3600 return true;
3601 case GIMPLE_OMP_TARGET:
3602 if (gimple_omp_target_kind (ctx->stmt)
3603 == GF_OMP_TARGET_KIND_REGION)
3604 return true;
3605 break;
3606 default:
3607 break;
3608 }
3609 break;
3610 case GIMPLE_OMP_TASK:
3611 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3612 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3613 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3614 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3615 {
3616 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3617 error_at (OMP_CLAUSE_LOCATION (c),
3618 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3619 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3620 return false;
3621 }
3622 break;
3623 case GIMPLE_OMP_ORDERED:
3624 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3625 c; c = OMP_CLAUSE_CHAIN (c))
3626 {
3627 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3628 {
3629 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3630 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3631 continue;
3632 }
3633 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3634 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3635 || kind == OMP_CLAUSE_DEPEND_SINK)
3636 {
3637 tree oclause;
3638 /* Look for containing ordered(N) loop. */
3639 if (ctx == NULL
3640 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3641 || (oclause
3642 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3643 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3644 {
3645 error_at (OMP_CLAUSE_LOCATION (c),
3646 "%<ordered%> construct with %<depend%> clause "
3647 "must be closely nested inside an %<ordered%> "
3648 "loop");
3649 return false;
3650 }
3651 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3652 {
3653 error_at (OMP_CLAUSE_LOCATION (c),
3654 "%<ordered%> construct with %<depend%> clause "
3655 "must be closely nested inside a loop with "
3656 "%<ordered%> clause with a parameter");
3657 return false;
3658 }
3659 }
3660 else
3661 {
3662 error_at (OMP_CLAUSE_LOCATION (c),
3663 "invalid depend kind in omp %<ordered%> %<depend%>");
3664 return false;
3665 }
3666 }
3667 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3668 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3669 {
3670 /* ordered simd must be closely nested inside of simd region,
3671 and simd region must not encounter constructs other than
3672 ordered simd, therefore ordered simd may be either orphaned,
3673 or ctx->stmt must be simd. The latter case is handled already
3674 earlier. */
3675 if (ctx != NULL)
3676 {
3677 error_at (gimple_location (stmt),
3678 "%<ordered%> %<simd%> must be closely nested inside "
3679 "%<simd%> region");
3680 return false;
3681 }
3682 }
3683 for (; ctx != NULL; ctx = ctx->outer)
3684 switch (gimple_code (ctx->stmt))
3685 {
3686 case GIMPLE_OMP_CRITICAL:
3687 case GIMPLE_OMP_TASK:
3688 case GIMPLE_OMP_ORDERED:
3689 ordered_in_taskloop:
3690 error_at (gimple_location (stmt),
3691 "%<ordered%> region may not be closely nested inside "
3692 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3693 "%<taskloop%> region");
3694 return false;
3695 case GIMPLE_OMP_FOR:
3696 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3697 goto ordered_in_taskloop;
3698 tree o;
3699 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3700 OMP_CLAUSE_ORDERED);
3701 if (o == NULL)
3702 {
3703 error_at (gimple_location (stmt),
3704 "%<ordered%> region must be closely nested inside "
3705 "a loop region with an %<ordered%> clause");
3706 return false;
3707 }
3708 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3709 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3710 {
3711 error_at (gimple_location (stmt),
3712 "%<ordered%> region without %<depend%> clause may "
3713 "not be closely nested inside a loop region with "
3714 "an %<ordered%> clause with a parameter");
3715 return false;
3716 }
3717 return true;
3718 case GIMPLE_OMP_TARGET:
3719 if (gimple_omp_target_kind (ctx->stmt)
3720 != GF_OMP_TARGET_KIND_REGION)
3721 break;
3722 /* FALLTHRU */
3723 case GIMPLE_OMP_PARALLEL:
3724 case GIMPLE_OMP_TEAMS:
3725 error_at (gimple_location (stmt),
3726 "%<ordered%> region must be closely nested inside "
3727 "a loop region with an %<ordered%> clause");
3728 return false;
3729 default:
3730 break;
3731 }
3732 break;
3733 case GIMPLE_OMP_CRITICAL:
3734 {
3735 tree this_stmt_name
3736 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3737 for (; ctx != NULL; ctx = ctx->outer)
3738 if (gomp_critical *other_crit
3739 = dyn_cast <gomp_critical *> (ctx->stmt))
3740 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3741 {
3742 error_at (gimple_location (stmt),
3743 "%<critical%> region may not be nested inside "
3744 "a %<critical%> region with the same name");
3745 return false;
3746 }
3747 }
3748 break;
3749 case GIMPLE_OMP_TEAMS:
3750 if (ctx == NULL)
3751 break;
3752 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3753 || (gimple_omp_target_kind (ctx->stmt)
3754 != GF_OMP_TARGET_KIND_REGION))
3755 {
3756 /* Teams construct can appear either strictly nested inside of
3757 target construct with no intervening stmts, or can be encountered
3758 only by initial task (so must not appear inside any OpenMP
3759 construct. */
3760 error_at (gimple_location (stmt),
3761 "%<teams%> construct must be closely nested inside of "
3762 "%<target%> construct or not nested in any OpenMP "
3763 "construct");
3764 return false;
3765 }
3766 break;
3767 case GIMPLE_OMP_TARGET:
3768 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3769 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3770 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3771 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3772 {
3773 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3774 error_at (OMP_CLAUSE_LOCATION (c),
3775 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3776 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3777 return false;
3778 }
3779 if (is_gimple_omp_offloaded (stmt)
3780 && oacc_get_fn_attrib (cfun->decl) != NULL)
3781 {
3782 error_at (gimple_location (stmt),
3783 "OpenACC region inside of OpenACC routine, nested "
3784 "parallelism not supported yet");
3785 return false;
3786 }
3787 for (; ctx != NULL; ctx = ctx->outer)
3788 {
3789 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3790 {
3791 if (is_gimple_omp (stmt)
3792 && is_gimple_omp_oacc (stmt)
3793 && is_gimple_omp (ctx->stmt))
3794 {
3795 error_at (gimple_location (stmt),
3796 "OpenACC construct inside of non-OpenACC region");
3797 return false;
3798 }
3799 continue;
3800 }
3801
3802 const char *stmt_name, *ctx_stmt_name;
3803 switch (gimple_omp_target_kind (stmt))
3804 {
3805 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3806 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3807 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3808 case GF_OMP_TARGET_KIND_ENTER_DATA:
3809 stmt_name = "target enter data"; break;
3810 case GF_OMP_TARGET_KIND_EXIT_DATA:
3811 stmt_name = "target exit data"; break;
3812 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3813 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3814 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3815 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3816 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3817 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3818 stmt_name = "enter data"; break;
3819 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3820 stmt_name = "exit data"; break;
3821 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3822 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3823 break;
3824 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3825 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3826 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3827 /* OpenACC 'kernels' decomposed parts. */
3828 stmt_name = "kernels"; break;
3829 default: gcc_unreachable ();
3830 }
3831 switch (gimple_omp_target_kind (ctx->stmt))
3832 {
3833 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3834 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3835 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3836 ctx_stmt_name = "parallel"; break;
3837 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3838 ctx_stmt_name = "kernels"; break;
3839 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3840 ctx_stmt_name = "serial"; break;
3841 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3842 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3843 ctx_stmt_name = "host_data"; break;
3844 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3845 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3846 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3847 /* OpenACC 'kernels' decomposed parts. */
3848 ctx_stmt_name = "kernels"; break;
3849 default: gcc_unreachable ();
3850 }
3851
3852 /* OpenACC/OpenMP mismatch? */
3853 if (is_gimple_omp_oacc (stmt)
3854 != is_gimple_omp_oacc (ctx->stmt))
3855 {
3856 error_at (gimple_location (stmt),
3857 "%s %qs construct inside of %s %qs region",
3858 (is_gimple_omp_oacc (stmt)
3859 ? "OpenACC" : "OpenMP"), stmt_name,
3860 (is_gimple_omp_oacc (ctx->stmt)
3861 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3862 return false;
3863 }
3864 if (is_gimple_omp_offloaded (ctx->stmt))
3865 {
3866 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3867 if (is_gimple_omp_oacc (ctx->stmt))
3868 {
3869 error_at (gimple_location (stmt),
3870 "%qs construct inside of %qs region",
3871 stmt_name, ctx_stmt_name);
3872 return false;
3873 }
3874 else
3875 {
3876 warning_at (gimple_location (stmt), 0,
3877 "%qs construct inside of %qs region",
3878 stmt_name, ctx_stmt_name);
3879 }
3880 }
3881 }
3882 break;
3883 default:
3884 break;
3885 }
3886 return true;
3887 }
3888
3889
3890 /* Helper function scan_omp.
3891
3892 Callback for walk_tree or operators in walk_gimple_stmt used to
3893 scan for OMP directives in TP. */
3894
3895 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3896 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3897 {
3898 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3899 omp_context *ctx = (omp_context *) wi->info;
3900 tree t = *tp;
3901
3902 switch (TREE_CODE (t))
3903 {
3904 case VAR_DECL:
3905 case PARM_DECL:
3906 case LABEL_DECL:
3907 case RESULT_DECL:
3908 if (ctx)
3909 {
3910 tree repl = remap_decl (t, &ctx->cb);
3911 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3912 *tp = repl;
3913 }
3914 break;
3915
3916 default:
3917 if (ctx && TYPE_P (t))
3918 *tp = remap_type (t, &ctx->cb);
3919 else if (!DECL_P (t))
3920 {
3921 *walk_subtrees = 1;
3922 if (ctx)
3923 {
3924 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3925 if (tem != TREE_TYPE (t))
3926 {
3927 if (TREE_CODE (t) == INTEGER_CST)
3928 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3929 else
3930 TREE_TYPE (t) = tem;
3931 }
3932 }
3933 }
3934 break;
3935 }
3936
3937 return NULL_TREE;
3938 }
3939
3940 /* Return true if FNDECL is a setjmp or a longjmp. */
3941
3942 static bool
setjmp_or_longjmp_p(const_tree fndecl)3943 setjmp_or_longjmp_p (const_tree fndecl)
3944 {
3945 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3946 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3947 return true;
3948
3949 tree declname = DECL_NAME (fndecl);
3950 if (!declname
3951 || (DECL_CONTEXT (fndecl) != NULL_TREE
3952 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3953 || !TREE_PUBLIC (fndecl))
3954 return false;
3955
3956 const char *name = IDENTIFIER_POINTER (declname);
3957 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3958 }
3959
3960 /* Return true if FNDECL is an omp_* runtime API call. */
3961
3962 static bool
omp_runtime_api_call(const_tree fndecl)3963 omp_runtime_api_call (const_tree fndecl)
3964 {
3965 tree declname = DECL_NAME (fndecl);
3966 if (!declname
3967 || (DECL_CONTEXT (fndecl) != NULL_TREE
3968 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3969 || !TREE_PUBLIC (fndecl))
3970 return false;
3971
3972 const char *name = IDENTIFIER_POINTER (declname);
3973 if (!startswith (name, "omp_"))
3974 return false;
3975
3976 static const char *omp_runtime_apis[] =
3977 {
3978 /* This array has 3 sections. First omp_* calls that don't
3979 have any suffixes. */
3980 "aligned_alloc",
3981 "aligned_calloc",
3982 "alloc",
3983 "calloc",
3984 "free",
3985 "realloc",
3986 "target_alloc",
3987 "target_associate_ptr",
3988 "target_disassociate_ptr",
3989 "target_free",
3990 "target_is_present",
3991 "target_memcpy",
3992 "target_memcpy_rect",
3993 NULL,
3994 /* Now omp_* calls that are available as omp_* and omp_*_; however, the
3995 DECL_NAME is always omp_* without tailing underscore. */
3996 "capture_affinity",
3997 "destroy_allocator",
3998 "destroy_lock",
3999 "destroy_nest_lock",
4000 "display_affinity",
4001 "fulfill_event",
4002 "get_active_level",
4003 "get_affinity_format",
4004 "get_cancellation",
4005 "get_default_allocator",
4006 "get_default_device",
4007 "get_device_num",
4008 "get_dynamic",
4009 "get_initial_device",
4010 "get_level",
4011 "get_max_active_levels",
4012 "get_max_task_priority",
4013 "get_max_teams",
4014 "get_max_threads",
4015 "get_nested",
4016 "get_num_devices",
4017 "get_num_places",
4018 "get_num_procs",
4019 "get_num_teams",
4020 "get_num_threads",
4021 "get_partition_num_places",
4022 "get_place_num",
4023 "get_proc_bind",
4024 "get_supported_active_levels",
4025 "get_team_num",
4026 "get_teams_thread_limit",
4027 "get_thread_limit",
4028 "get_thread_num",
4029 "get_wtick",
4030 "get_wtime",
4031 "in_final",
4032 "in_parallel",
4033 "init_lock",
4034 "init_nest_lock",
4035 "is_initial_device",
4036 "pause_resource",
4037 "pause_resource_all",
4038 "set_affinity_format",
4039 "set_default_allocator",
4040 "set_lock",
4041 "set_nest_lock",
4042 "test_lock",
4043 "test_nest_lock",
4044 "unset_lock",
4045 "unset_nest_lock",
4046 NULL,
4047 /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
4048 as DECL_NAME only omp_* and omp_*_8 appear. */
4049 "display_env",
4050 "get_ancestor_thread_num",
4051 "init_allocator",
4052 "get_partition_place_nums",
4053 "get_place_num_procs",
4054 "get_place_proc_ids",
4055 "get_schedule",
4056 "get_team_size",
4057 "set_default_device",
4058 "set_dynamic",
4059 "set_max_active_levels",
4060 "set_nested",
4061 "set_num_teams",
4062 "set_num_threads",
4063 "set_schedule",
4064 "set_teams_thread_limit"
4065 };
4066
4067 int mode = 0;
4068 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4069 {
4070 if (omp_runtime_apis[i] == NULL)
4071 {
4072 mode++;
4073 continue;
4074 }
4075 size_t len = strlen (omp_runtime_apis[i]);
4076 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4077 && (name[4 + len] == '\0'
4078 || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4079 return true;
4080 }
4081 return false;
4082 }
4083
4084 /* Helper function for scan_omp.
4085
4086 Callback for walk_gimple_stmt used to scan for OMP directives in
4087 the current statement in GSI. */
4088
4089 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)4090 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4091 struct walk_stmt_info *wi)
4092 {
4093 gimple *stmt = gsi_stmt (*gsi);
4094 omp_context *ctx = (omp_context *) wi->info;
4095
4096 if (gimple_has_location (stmt))
4097 input_location = gimple_location (stmt);
4098
4099 /* Check the nesting restrictions. */
4100 bool remove = false;
4101 if (is_gimple_omp (stmt))
4102 remove = !check_omp_nesting_restrictions (stmt, ctx);
4103 else if (is_gimple_call (stmt))
4104 {
4105 tree fndecl = gimple_call_fndecl (stmt);
4106 if (fndecl)
4107 {
4108 if (ctx
4109 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4110 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4111 && setjmp_or_longjmp_p (fndecl)
4112 && !ctx->loop_p)
4113 {
4114 remove = true;
4115 error_at (gimple_location (stmt),
4116 "setjmp/longjmp inside %<simd%> construct");
4117 }
4118 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4119 switch (DECL_FUNCTION_CODE (fndecl))
4120 {
4121 case BUILT_IN_GOMP_BARRIER:
4122 case BUILT_IN_GOMP_CANCEL:
4123 case BUILT_IN_GOMP_CANCELLATION_POINT:
4124 case BUILT_IN_GOMP_TASKYIELD:
4125 case BUILT_IN_GOMP_TASKWAIT:
4126 case BUILT_IN_GOMP_TASKGROUP_START:
4127 case BUILT_IN_GOMP_TASKGROUP_END:
4128 remove = !check_omp_nesting_restrictions (stmt, ctx);
4129 break;
4130 default:
4131 break;
4132 }
4133 else if (ctx)
4134 {
4135 omp_context *octx = ctx;
4136 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4137 octx = ctx->outer;
4138 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4139 {
4140 remove = true;
4141 error_at (gimple_location (stmt),
4142 "OpenMP runtime API call %qD in a region with "
4143 "%<order(concurrent)%> clause", fndecl);
4144 }
4145 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4146 && omp_runtime_api_call (fndecl)
4147 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4148 != strlen ("omp_get_num_teams"))
4149 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4150 "omp_get_num_teams") != 0)
4151 && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4152 != strlen ("omp_get_team_num"))
4153 || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4154 "omp_get_team_num") != 0))
4155 {
4156 remove = true;
4157 error_at (gimple_location (stmt),
4158 "OpenMP runtime API call %qD strictly nested in a "
4159 "%<teams%> region", fndecl);
4160 }
4161 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4162 && (gimple_omp_target_kind (ctx->stmt)
4163 == GF_OMP_TARGET_KIND_REGION)
4164 && omp_runtime_api_call (fndecl))
4165 {
4166 tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4167 tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4168 if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4169 error_at (gimple_location (stmt),
4170 "OpenMP runtime API call %qD in a region with "
4171 "%<device(ancestor)%> clause", fndecl);
4172 }
4173 }
4174 }
4175 }
4176 if (remove)
4177 {
4178 stmt = gimple_build_nop ();
4179 gsi_replace (gsi, stmt, false);
4180 }
4181
4182 *handled_ops_p = true;
4183
4184 switch (gimple_code (stmt))
4185 {
4186 case GIMPLE_OMP_PARALLEL:
4187 taskreg_nesting_level++;
4188 scan_omp_parallel (gsi, ctx);
4189 taskreg_nesting_level--;
4190 break;
4191
4192 case GIMPLE_OMP_TASK:
4193 taskreg_nesting_level++;
4194 scan_omp_task (gsi, ctx);
4195 taskreg_nesting_level--;
4196 break;
4197
4198 case GIMPLE_OMP_FOR:
4199 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4200 == GF_OMP_FOR_KIND_SIMD)
4201 && gimple_omp_for_combined_into_p (stmt)
4202 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4203 {
4204 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4205 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4206 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4207 {
4208 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4209 break;
4210 }
4211 }
4212 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4213 == GF_OMP_FOR_KIND_SIMD)
4214 && omp_maybe_offloaded_ctx (ctx)
4215 && omp_max_simt_vf ()
4216 && gimple_omp_for_collapse (stmt) == 1)
4217 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4218 else
4219 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4220 break;
4221
4222 case GIMPLE_OMP_SCOPE:
4223 ctx = new_omp_context (stmt, ctx);
4224 scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4225 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4226 break;
4227
4228 case GIMPLE_OMP_SECTIONS:
4229 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4230 break;
4231
4232 case GIMPLE_OMP_SINGLE:
4233 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4234 break;
4235
4236 case GIMPLE_OMP_SCAN:
4237 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4238 {
4239 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4240 ctx->scan_inclusive = true;
4241 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4242 ctx->scan_exclusive = true;
4243 }
4244 /* FALLTHRU */
4245 case GIMPLE_OMP_SECTION:
4246 case GIMPLE_OMP_MASTER:
4247 case GIMPLE_OMP_ORDERED:
4248 case GIMPLE_OMP_CRITICAL:
4249 ctx = new_omp_context (stmt, ctx);
4250 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4251 break;
4252
4253 case GIMPLE_OMP_MASKED:
4254 ctx = new_omp_context (stmt, ctx);
4255 scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4256 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4257 break;
4258
4259 case GIMPLE_OMP_TASKGROUP:
4260 ctx = new_omp_context (stmt, ctx);
4261 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4262 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4263 break;
4264
4265 case GIMPLE_OMP_TARGET:
4266 if (is_gimple_omp_offloaded (stmt))
4267 {
4268 taskreg_nesting_level++;
4269 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4270 taskreg_nesting_level--;
4271 }
4272 else
4273 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4274 break;
4275
4276 case GIMPLE_OMP_TEAMS:
4277 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4278 {
4279 taskreg_nesting_level++;
4280 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4281 taskreg_nesting_level--;
4282 }
4283 else
4284 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4285 break;
4286
4287 case GIMPLE_BIND:
4288 {
4289 tree var;
4290
4291 *handled_ops_p = false;
4292 if (ctx)
4293 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4294 var ;
4295 var = DECL_CHAIN (var))
4296 insert_decl_map (&ctx->cb, var, var);
4297 }
4298 break;
4299 default:
4300 *handled_ops_p = false;
4301 break;
4302 }
4303
4304 return NULL_TREE;
4305 }
4306
4307
4308 /* Scan all the statements starting at the current statement. CTX
4309 contains context information about the OMP directives and
4310 clauses found during the scan. */
4311
4312 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)4313 scan_omp (gimple_seq *body_p, omp_context *ctx)
4314 {
4315 location_t saved_location;
4316 struct walk_stmt_info wi;
4317
4318 memset (&wi, 0, sizeof (wi));
4319 wi.info = ctx;
4320 wi.want_locations = true;
4321
4322 saved_location = input_location;
4323 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4324 input_location = saved_location;
4325 }
4326
4327 /* Re-gimplification and code generation routines. */
4328
4329 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4330 of BIND if in a method. */
4331
4332 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)4333 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4334 {
4335 if (DECL_ARGUMENTS (current_function_decl)
4336 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4337 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4338 == POINTER_TYPE))
4339 {
4340 tree vars = gimple_bind_vars (bind);
4341 for (tree *pvar = &vars; *pvar; )
4342 if (omp_member_access_dummy_var (*pvar))
4343 *pvar = DECL_CHAIN (*pvar);
4344 else
4345 pvar = &DECL_CHAIN (*pvar);
4346 gimple_bind_set_vars (bind, vars);
4347 }
4348 }
4349
4350 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4351 block and its subblocks. */
4352
4353 static void
remove_member_access_dummy_vars(tree block)4354 remove_member_access_dummy_vars (tree block)
4355 {
4356 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4357 if (omp_member_access_dummy_var (*pvar))
4358 *pvar = DECL_CHAIN (*pvar);
4359 else
4360 pvar = &DECL_CHAIN (*pvar);
4361
4362 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4363 remove_member_access_dummy_vars (block);
4364 }
4365
4366 /* If a context was created for STMT when it was scanned, return it. */
4367
4368 static omp_context *
maybe_lookup_ctx(gimple * stmt)4369 maybe_lookup_ctx (gimple *stmt)
4370 {
4371 splay_tree_node n;
4372 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4373 return n ? (omp_context *) n->value : NULL;
4374 }
4375
4376
4377 /* Find the mapping for DECL in CTX or the immediately enclosing
4378 context that has a mapping for DECL.
4379
4380 If CTX is a nested parallel directive, we may have to use the decl
4381 mappings created in CTX's parent context. Suppose that we have the
4382 following parallel nesting (variable UIDs showed for clarity):
4383
4384 iD.1562 = 0;
4385 #omp parallel shared(iD.1562) -> outer parallel
4386 iD.1562 = iD.1562 + 1;
4387
4388 #omp parallel shared (iD.1562) -> inner parallel
4389 iD.1562 = iD.1562 - 1;
4390
4391 Each parallel structure will create a distinct .omp_data_s structure
4392 for copying iD.1562 in/out of the directive:
4393
4394 outer parallel .omp_data_s.1.i -> iD.1562
4395 inner parallel .omp_data_s.2.i -> iD.1562
4396
4397 A shared variable mapping will produce a copy-out operation before
4398 the parallel directive and a copy-in operation after it. So, in
4399 this case we would have:
4400
4401 iD.1562 = 0;
4402 .omp_data_o.1.i = iD.1562;
4403 #omp parallel shared(iD.1562) -> outer parallel
4404 .omp_data_i.1 = &.omp_data_o.1
4405 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4406
4407 .omp_data_o.2.i = iD.1562; -> **
4408 #omp parallel shared(iD.1562) -> inner parallel
4409 .omp_data_i.2 = &.omp_data_o.2
4410 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4411
4412
4413 ** This is a problem. The symbol iD.1562 cannot be referenced
4414 inside the body of the outer parallel region. But since we are
4415 emitting this copy operation while expanding the inner parallel
4416 directive, we need to access the CTX structure of the outer
4417 parallel directive to get the correct mapping:
4418
4419 .omp_data_o.2.i = .omp_data_i.1->i
4420
4421 Since there may be other workshare or parallel directives enclosing
4422 the parallel directive, it may be necessary to walk up the context
4423 parent chain. This is not a problem in general because nested
4424 parallelism happens only rarely. */
4425
4426 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)4427 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4428 {
4429 tree t;
4430 omp_context *up;
4431
4432 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4433 t = maybe_lookup_decl (decl, up);
4434
4435 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4436
4437 return t ? t : decl;
4438 }
4439
4440
4441 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4442 in outer contexts. */
4443
4444 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)4445 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4446 {
4447 tree t = NULL;
4448 omp_context *up;
4449
4450 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4451 t = maybe_lookup_decl (decl, up);
4452
4453 return t ? t : decl;
4454 }
4455
4456
4457 /* Construct the initialization value for reduction operation OP. */
4458
4459 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)4460 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4461 {
4462 switch (op)
4463 {
4464 case PLUS_EXPR:
4465 case MINUS_EXPR:
4466 case BIT_IOR_EXPR:
4467 case BIT_XOR_EXPR:
4468 case TRUTH_OR_EXPR:
4469 case TRUTH_ORIF_EXPR:
4470 case TRUTH_XOR_EXPR:
4471 case NE_EXPR:
4472 return build_zero_cst (type);
4473
4474 case MULT_EXPR:
4475 case TRUTH_AND_EXPR:
4476 case TRUTH_ANDIF_EXPR:
4477 case EQ_EXPR:
4478 return fold_convert_loc (loc, type, integer_one_node);
4479
4480 case BIT_AND_EXPR:
4481 return fold_convert_loc (loc, type, integer_minus_one_node);
4482
4483 case MAX_EXPR:
4484 if (SCALAR_FLOAT_TYPE_P (type))
4485 {
4486 REAL_VALUE_TYPE max, min;
4487 if (HONOR_INFINITIES (type))
4488 {
4489 real_inf (&max);
4490 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4491 }
4492 else
4493 real_maxval (&min, 1, TYPE_MODE (type));
4494 return build_real (type, min);
4495 }
4496 else if (POINTER_TYPE_P (type))
4497 {
4498 wide_int min
4499 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4500 return wide_int_to_tree (type, min);
4501 }
4502 else
4503 {
4504 gcc_assert (INTEGRAL_TYPE_P (type));
4505 return TYPE_MIN_VALUE (type);
4506 }
4507
4508 case MIN_EXPR:
4509 if (SCALAR_FLOAT_TYPE_P (type))
4510 {
4511 REAL_VALUE_TYPE max;
4512 if (HONOR_INFINITIES (type))
4513 real_inf (&max);
4514 else
4515 real_maxval (&max, 0, TYPE_MODE (type));
4516 return build_real (type, max);
4517 }
4518 else if (POINTER_TYPE_P (type))
4519 {
4520 wide_int max
4521 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4522 return wide_int_to_tree (type, max);
4523 }
4524 else
4525 {
4526 gcc_assert (INTEGRAL_TYPE_P (type));
4527 return TYPE_MAX_VALUE (type);
4528 }
4529
4530 default:
4531 gcc_unreachable ();
4532 }
4533 }
4534
4535 /* Construct the initialization value for reduction CLAUSE. */
4536
4537 tree
omp_reduction_init(tree clause,tree type)4538 omp_reduction_init (tree clause, tree type)
4539 {
4540 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4541 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4542 }
4543
4544 /* Return alignment to be assumed for var in CLAUSE, which should be
4545 OMP_CLAUSE_ALIGNED. */
4546
4547 static tree
omp_clause_aligned_alignment(tree clause)4548 omp_clause_aligned_alignment (tree clause)
4549 {
4550 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4551 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4552
4553 /* Otherwise return implementation defined alignment. */
4554 unsigned int al = 1;
4555 opt_scalar_mode mode_iter;
4556 auto_vector_modes modes;
4557 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4558 static enum mode_class classes[]
4559 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4560 for (int i = 0; i < 4; i += 2)
4561 /* The for loop above dictates that we only walk through scalar classes. */
4562 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4563 {
4564 scalar_mode mode = mode_iter.require ();
4565 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4566 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4567 continue;
4568 machine_mode alt_vmode;
4569 for (unsigned int j = 0; j < modes.length (); ++j)
4570 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4571 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4572 vmode = alt_vmode;
4573
4574 tree type = lang_hooks.types.type_for_mode (mode, 1);
4575 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4576 continue;
4577 type = build_vector_type_for_mode (type, vmode);
4578 if (TYPE_MODE (type) != vmode)
4579 continue;
4580 if (TYPE_ALIGN_UNIT (type) > al)
4581 al = TYPE_ALIGN_UNIT (type);
4582 }
4583 return build_int_cst (integer_type_node, al);
4584 }
4585
4586
4587 /* This structure is part of the interface between lower_rec_simd_input_clauses
4588 and lower_rec_input_clauses. */
4589
4590 class omplow_simd_context {
4591 public:
omplow_simd_context()4592 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4593 tree idx;
4594 tree lane;
4595 tree lastlane;
4596 vec<tree, va_heap> simt_eargs;
4597 gimple_seq simt_dlist;
4598 poly_uint64_pod max_vf;
4599 bool is_simt;
4600 };
4601
4602 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4603 privatization. */
4604
4605 static bool
lower_rec_simd_input_clauses(tree new_var,omp_context * ctx,omplow_simd_context * sctx,tree & ivar,tree & lvar,tree * rvar=NULL,tree * rvar2=NULL)4606 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4607 omplow_simd_context *sctx, tree &ivar,
4608 tree &lvar, tree *rvar = NULL,
4609 tree *rvar2 = NULL)
4610 {
4611 if (known_eq (sctx->max_vf, 0U))
4612 {
4613 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4614 if (maybe_gt (sctx->max_vf, 1U))
4615 {
4616 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4617 OMP_CLAUSE_SAFELEN);
4618 if (c)
4619 {
4620 poly_uint64 safe_len;
4621 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4622 || maybe_lt (safe_len, 1U))
4623 sctx->max_vf = 1;
4624 else
4625 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4626 }
4627 }
4628 if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4629 {
4630 for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4631 c = OMP_CLAUSE_CHAIN (c))
4632 {
4633 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4634 continue;
4635
4636 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4637 {
4638 /* UDR reductions are not supported yet for SIMT, disable
4639 SIMT. */
4640 sctx->max_vf = 1;
4641 break;
4642 }
4643
4644 if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4645 && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4646 {
4647 /* Doing boolean operations on non-integral types is
4648 for conformance only, it's not worth supporting this
4649 for SIMT. */
4650 sctx->max_vf = 1;
4651 break;
4652 }
4653 }
4654 }
4655 if (maybe_gt (sctx->max_vf, 1U))
4656 {
4657 sctx->idx = create_tmp_var (unsigned_type_node);
4658 sctx->lane = create_tmp_var (unsigned_type_node);
4659 }
4660 }
4661 if (known_eq (sctx->max_vf, 1U))
4662 return false;
4663
4664 if (sctx->is_simt)
4665 {
4666 if (is_gimple_reg (new_var))
4667 {
4668 ivar = lvar = new_var;
4669 return true;
4670 }
4671 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4672 ivar = lvar = create_tmp_var (type);
4673 TREE_ADDRESSABLE (ivar) = 1;
4674 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4675 NULL, DECL_ATTRIBUTES (ivar));
4676 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4677 tree clobber = build_clobber (type);
4678 gimple *g = gimple_build_assign (ivar, clobber);
4679 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4680 }
4681 else
4682 {
4683 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4684 tree avar = create_tmp_var_raw (atype);
4685 if (TREE_ADDRESSABLE (new_var))
4686 TREE_ADDRESSABLE (avar) = 1;
4687 DECL_ATTRIBUTES (avar)
4688 = tree_cons (get_identifier ("omp simd array"), NULL,
4689 DECL_ATTRIBUTES (avar));
4690 gimple_add_tmp_var (avar);
4691 tree iavar = avar;
4692 if (rvar && !ctx->for_simd_scan_phase)
4693 {
4694 /* For inscan reductions, create another array temporary,
4695 which will hold the reduced value. */
4696 iavar = create_tmp_var_raw (atype);
4697 if (TREE_ADDRESSABLE (new_var))
4698 TREE_ADDRESSABLE (iavar) = 1;
4699 DECL_ATTRIBUTES (iavar)
4700 = tree_cons (get_identifier ("omp simd array"), NULL,
4701 tree_cons (get_identifier ("omp simd inscan"), NULL,
4702 DECL_ATTRIBUTES (iavar)));
4703 gimple_add_tmp_var (iavar);
4704 ctx->cb.decl_map->put (avar, iavar);
4705 if (sctx->lastlane == NULL_TREE)
4706 sctx->lastlane = create_tmp_var (unsigned_type_node);
4707 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4708 sctx->lastlane, NULL_TREE, NULL_TREE);
4709 TREE_THIS_NOTRAP (*rvar) = 1;
4710
4711 if (ctx->scan_exclusive)
4712 {
4713 /* And for exclusive scan yet another one, which will
4714 hold the value during the scan phase. */
4715 tree savar = create_tmp_var_raw (atype);
4716 if (TREE_ADDRESSABLE (new_var))
4717 TREE_ADDRESSABLE (savar) = 1;
4718 DECL_ATTRIBUTES (savar)
4719 = tree_cons (get_identifier ("omp simd array"), NULL,
4720 tree_cons (get_identifier ("omp simd inscan "
4721 "exclusive"), NULL,
4722 DECL_ATTRIBUTES (savar)));
4723 gimple_add_tmp_var (savar);
4724 ctx->cb.decl_map->put (iavar, savar);
4725 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4726 sctx->idx, NULL_TREE, NULL_TREE);
4727 TREE_THIS_NOTRAP (*rvar2) = 1;
4728 }
4729 }
4730 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4731 NULL_TREE, NULL_TREE);
4732 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4733 NULL_TREE, NULL_TREE);
4734 TREE_THIS_NOTRAP (ivar) = 1;
4735 TREE_THIS_NOTRAP (lvar) = 1;
4736 }
4737 if (DECL_P (new_var))
4738 {
4739 SET_DECL_VALUE_EXPR (new_var, lvar);
4740 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4741 }
4742 return true;
4743 }
4744
4745 /* Helper function of lower_rec_input_clauses. For a reference
4746 in simd reduction, add an underlying variable it will reference. */
4747
4748 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)4749 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4750 {
4751 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4752 if (TREE_CONSTANT (z))
4753 {
4754 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4755 get_name (new_vard));
4756 gimple_add_tmp_var (z);
4757 TREE_ADDRESSABLE (z) = 1;
4758 z = build_fold_addr_expr_loc (loc, z);
4759 gimplify_assign (new_vard, z, ilist);
4760 }
4761 }
4762
4763 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4764 code to emit (type) (tskred_temp[idx]). */
4765
4766 static tree
task_reduction_read(gimple_seq * ilist,tree tskred_temp,tree type,unsigned idx)4767 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4768 unsigned idx)
4769 {
4770 unsigned HOST_WIDE_INT sz
4771 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4772 tree r = build2 (MEM_REF, pointer_sized_int_node,
4773 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4774 idx * sz));
4775 tree v = create_tmp_var (pointer_sized_int_node);
4776 gimple *g = gimple_build_assign (v, r);
4777 gimple_seq_add_stmt (ilist, g);
4778 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4779 {
4780 v = create_tmp_var (type);
4781 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4782 gimple_seq_add_stmt (ilist, g);
4783 }
4784 return v;
4785 }
4786
4787 /* Lower early initialization of privatized variable NEW_VAR
4788 if it needs an allocator (has allocate clause). */
4789
4790 static bool
lower_private_allocate(tree var,tree new_var,tree & allocator,tree & allocate_ptr,gimple_seq * ilist,omp_context * ctx,bool is_ref,tree size)4791 lower_private_allocate (tree var, tree new_var, tree &allocator,
4792 tree &allocate_ptr, gimple_seq *ilist,
4793 omp_context *ctx, bool is_ref, tree size)
4794 {
4795 if (allocator)
4796 return false;
4797 gcc_assert (allocate_ptr == NULL_TREE);
4798 if (ctx->allocate_map
4799 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4800 if (tree *allocatorp = ctx->allocate_map->get (var))
4801 allocator = *allocatorp;
4802 if (allocator == NULL_TREE)
4803 return false;
4804 if (!is_ref && omp_privatize_by_reference (var))
4805 {
4806 allocator = NULL_TREE;
4807 return false;
4808 }
4809
4810 unsigned HOST_WIDE_INT ialign = 0;
4811 if (TREE_CODE (allocator) == TREE_LIST)
4812 {
4813 ialign = tree_to_uhwi (TREE_VALUE (allocator));
4814 allocator = TREE_PURPOSE (allocator);
4815 }
4816 if (TREE_CODE (allocator) != INTEGER_CST)
4817 allocator = build_outer_var_ref (allocator, ctx);
4818 allocator = fold_convert (pointer_sized_int_node, allocator);
4819 if (TREE_CODE (allocator) != INTEGER_CST)
4820 {
4821 tree var = create_tmp_var (TREE_TYPE (allocator));
4822 gimplify_assign (var, allocator, ilist);
4823 allocator = var;
4824 }
4825
4826 tree ptr_type, align, sz = size;
4827 if (TYPE_P (new_var))
4828 {
4829 ptr_type = build_pointer_type (new_var);
4830 ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4831 }
4832 else if (is_ref)
4833 {
4834 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4835 ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4836 }
4837 else
4838 {
4839 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4840 ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4841 if (sz == NULL_TREE)
4842 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4843 }
4844 align = build_int_cst (size_type_node, ialign);
4845 if (TREE_CODE (sz) != INTEGER_CST)
4846 {
4847 tree szvar = create_tmp_var (size_type_node);
4848 gimplify_assign (szvar, sz, ilist);
4849 sz = szvar;
4850 }
4851 allocate_ptr = create_tmp_var (ptr_type);
4852 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4853 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4854 gimple_call_set_lhs (g, allocate_ptr);
4855 gimple_seq_add_stmt (ilist, g);
4856 if (!is_ref)
4857 {
4858 tree x = build_simple_mem_ref (allocate_ptr);
4859 TREE_THIS_NOTRAP (x) = 1;
4860 SET_DECL_VALUE_EXPR (new_var, x);
4861 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4862 }
4863 return true;
4864 }
4865
4866 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4867 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4868 private variables. Initialization statements go in ILIST, while calls
4869 to destructors go in DLIST. */
4870
4871 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)4872 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4873 omp_context *ctx, struct omp_for_data *fd)
4874 {
4875 tree c, copyin_seq, x, ptr;
4876 bool copyin_by_ref = false;
4877 bool lastprivate_firstprivate = false;
4878 bool reduction_omp_orig_ref = false;
4879 int pass;
4880 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4881 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4882 omplow_simd_context sctx = omplow_simd_context ();
4883 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4884 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4885 gimple_seq llist[4] = { };
4886 tree nonconst_simd_if = NULL_TREE;
4887
4888 copyin_seq = NULL;
4889 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4890
4891 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4892 with data sharing clauses referencing variable sized vars. That
4893 is unnecessarily hard to support and very unlikely to result in
4894 vectorized code anyway. */
4895 if (is_simd)
4896 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4897 switch (OMP_CLAUSE_CODE (c))
4898 {
4899 case OMP_CLAUSE_LINEAR:
4900 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4901 sctx.max_vf = 1;
4902 /* FALLTHRU */
4903 case OMP_CLAUSE_PRIVATE:
4904 case OMP_CLAUSE_FIRSTPRIVATE:
4905 case OMP_CLAUSE_LASTPRIVATE:
4906 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4907 sctx.max_vf = 1;
4908 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4909 {
4910 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4911 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4912 sctx.max_vf = 1;
4913 }
4914 break;
4915 case OMP_CLAUSE_REDUCTION:
4916 case OMP_CLAUSE_IN_REDUCTION:
4917 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4918 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4919 sctx.max_vf = 1;
4920 else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4921 {
4922 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4923 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4924 sctx.max_vf = 1;
4925 }
4926 break;
4927 case OMP_CLAUSE_IF:
4928 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4929 sctx.max_vf = 1;
4930 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4931 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4932 break;
4933 case OMP_CLAUSE_SIMDLEN:
4934 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4935 sctx.max_vf = 1;
4936 break;
4937 case OMP_CLAUSE__CONDTEMP_:
4938 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4939 if (sctx.is_simt)
4940 sctx.max_vf = 1;
4941 break;
4942 default:
4943 continue;
4944 }
4945
4946 /* Add a placeholder for simduid. */
4947 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4948 sctx.simt_eargs.safe_push (NULL_TREE);
4949
4950 unsigned task_reduction_cnt = 0;
4951 unsigned task_reduction_cntorig = 0;
4952 unsigned task_reduction_cnt_full = 0;
4953 unsigned task_reduction_cntorig_full = 0;
4954 unsigned task_reduction_other_cnt = 0;
4955 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4956 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4957 /* Do all the fixed sized types in the first pass, and the variable sized
4958 types in the second pass. This makes sure that the scalar arguments to
4959 the variable sized types are processed before we use them in the
4960 variable sized operations. For task reductions we use 4 passes, in the
4961 first two we ignore them, in the third one gather arguments for
4962 GOMP_task_reduction_remap call and in the last pass actually handle
4963 the task reductions. */
4964 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4965 ? 4 : 2); ++pass)
4966 {
4967 if (pass == 2 && task_reduction_cnt)
4968 {
4969 tskred_atype
4970 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4971 + task_reduction_cntorig);
4972 tskred_avar = create_tmp_var_raw (tskred_atype);
4973 gimple_add_tmp_var (tskred_avar);
4974 TREE_ADDRESSABLE (tskred_avar) = 1;
4975 task_reduction_cnt_full = task_reduction_cnt;
4976 task_reduction_cntorig_full = task_reduction_cntorig;
4977 }
4978 else if (pass == 3 && task_reduction_cnt)
4979 {
4980 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4981 gimple *g
4982 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4983 size_int (task_reduction_cntorig),
4984 build_fold_addr_expr (tskred_avar));
4985 gimple_seq_add_stmt (ilist, g);
4986 }
4987 if (pass == 3 && task_reduction_other_cnt)
4988 {
4989 /* For reduction clauses, build
4990 tskred_base = (void *) tskred_temp[2]
4991 + omp_get_thread_num () * tskred_temp[1]
4992 or if tskred_temp[1] is known to be constant, that constant
4993 directly. This is the start of the private reduction copy block
4994 for the current thread. */
4995 tree v = create_tmp_var (integer_type_node);
4996 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4997 gimple *g = gimple_build_call (x, 0);
4998 gimple_call_set_lhs (g, v);
4999 gimple_seq_add_stmt (ilist, g);
5000 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
5001 tskred_temp = OMP_CLAUSE_DECL (c);
5002 if (is_taskreg_ctx (ctx))
5003 tskred_temp = lookup_decl (tskred_temp, ctx);
5004 tree v2 = create_tmp_var (sizetype);
5005 g = gimple_build_assign (v2, NOP_EXPR, v);
5006 gimple_seq_add_stmt (ilist, g);
5007 if (ctx->task_reductions[0])
5008 v = fold_convert (sizetype, ctx->task_reductions[0]);
5009 else
5010 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
5011 tree v3 = create_tmp_var (sizetype);
5012 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
5013 gimple_seq_add_stmt (ilist, g);
5014 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
5015 tskred_base = create_tmp_var (ptr_type_node);
5016 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
5017 gimple_seq_add_stmt (ilist, g);
5018 }
5019 task_reduction_cnt = 0;
5020 task_reduction_cntorig = 0;
5021 task_reduction_other_cnt = 0;
5022 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5023 {
5024 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
5025 tree var, new_var;
5026 bool by_ref;
5027 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5028 bool task_reduction_p = false;
5029 bool task_reduction_needs_orig_p = false;
5030 tree cond = NULL_TREE;
5031 tree allocator, allocate_ptr;
5032
5033 switch (c_kind)
5034 {
5035 case OMP_CLAUSE_PRIVATE:
5036 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
5037 continue;
5038 break;
5039 case OMP_CLAUSE_SHARED:
5040 /* Ignore shared directives in teams construct inside
5041 of target construct. */
5042 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5043 && !is_host_teams_ctx (ctx))
5044 continue;
5045 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
5046 {
5047 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
5048 || is_global_var (OMP_CLAUSE_DECL (c)));
5049 continue;
5050 }
5051 case OMP_CLAUSE_FIRSTPRIVATE:
5052 case OMP_CLAUSE_COPYIN:
5053 break;
5054 case OMP_CLAUSE_LINEAR:
5055 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5056 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5057 lastprivate_firstprivate = true;
5058 break;
5059 case OMP_CLAUSE_REDUCTION:
5060 case OMP_CLAUSE_IN_REDUCTION:
5061 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5062 || is_task_ctx (ctx)
5063 || OMP_CLAUSE_REDUCTION_TASK (c))
5064 {
5065 task_reduction_p = true;
5066 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5067 {
5068 task_reduction_other_cnt++;
5069 if (pass == 2)
5070 continue;
5071 }
5072 else
5073 task_reduction_cnt++;
5074 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5075 {
5076 var = OMP_CLAUSE_DECL (c);
5077 /* If var is a global variable that isn't privatized
5078 in outer contexts, we don't need to look up the
5079 original address, it is always the address of the
5080 global variable itself. */
5081 if (!DECL_P (var)
5082 || omp_privatize_by_reference (var)
5083 || !is_global_var
5084 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5085 {
5086 task_reduction_needs_orig_p = true;
5087 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5088 task_reduction_cntorig++;
5089 }
5090 }
5091 }
5092 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5093 reduction_omp_orig_ref = true;
5094 break;
5095 case OMP_CLAUSE__REDUCTEMP_:
5096 if (!is_taskreg_ctx (ctx))
5097 continue;
5098 /* FALLTHRU */
5099 case OMP_CLAUSE__LOOPTEMP_:
5100 /* Handle _looptemp_/_reductemp_ clauses only on
5101 parallel/task. */
5102 if (fd)
5103 continue;
5104 break;
5105 case OMP_CLAUSE_LASTPRIVATE:
5106 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5107 {
5108 lastprivate_firstprivate = true;
5109 if (pass != 0 || is_taskloop_ctx (ctx))
5110 continue;
5111 }
5112 /* Even without corresponding firstprivate, if
5113 decl is Fortran allocatable, it needs outer var
5114 reference. */
5115 else if (pass == 0
5116 && lang_hooks.decls.omp_private_outer_ref
5117 (OMP_CLAUSE_DECL (c)))
5118 lastprivate_firstprivate = true;
5119 break;
5120 case OMP_CLAUSE_ALIGNED:
5121 if (pass != 1)
5122 continue;
5123 var = OMP_CLAUSE_DECL (c);
5124 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5125 && !is_global_var (var))
5126 {
5127 new_var = maybe_lookup_decl (var, ctx);
5128 if (new_var == NULL_TREE)
5129 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5130 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5131 tree alarg = omp_clause_aligned_alignment (c);
5132 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5133 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5134 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5135 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5136 gimplify_and_add (x, ilist);
5137 }
5138 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5139 && is_global_var (var))
5140 {
5141 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5142 new_var = lookup_decl (var, ctx);
5143 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5144 t = build_fold_addr_expr_loc (clause_loc, t);
5145 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5146 tree alarg = omp_clause_aligned_alignment (c);
5147 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5148 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5149 t = fold_convert_loc (clause_loc, ptype, t);
5150 x = create_tmp_var (ptype);
5151 t = build2 (MODIFY_EXPR, ptype, x, t);
5152 gimplify_and_add (t, ilist);
5153 t = build_simple_mem_ref_loc (clause_loc, x);
5154 SET_DECL_VALUE_EXPR (new_var, t);
5155 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5156 }
5157 continue;
5158 case OMP_CLAUSE__CONDTEMP_:
5159 if (is_parallel_ctx (ctx)
5160 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5161 break;
5162 continue;
5163 default:
5164 continue;
5165 }
5166
5167 if (task_reduction_p != (pass >= 2))
5168 continue;
5169
5170 allocator = NULL_TREE;
5171 allocate_ptr = NULL_TREE;
5172 new_var = var = OMP_CLAUSE_DECL (c);
5173 if ((c_kind == OMP_CLAUSE_REDUCTION
5174 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5175 && TREE_CODE (var) == MEM_REF)
5176 {
5177 var = TREE_OPERAND (var, 0);
5178 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5179 var = TREE_OPERAND (var, 0);
5180 if (TREE_CODE (var) == INDIRECT_REF
5181 || TREE_CODE (var) == ADDR_EXPR)
5182 var = TREE_OPERAND (var, 0);
5183 if (is_variable_sized (var))
5184 {
5185 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5186 var = DECL_VALUE_EXPR (var);
5187 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5188 var = TREE_OPERAND (var, 0);
5189 gcc_assert (DECL_P (var));
5190 }
5191 new_var = var;
5192 }
5193 if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5194 {
5195 splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5196 new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5197 }
5198 else if (c_kind != OMP_CLAUSE_COPYIN)
5199 new_var = lookup_decl (var, ctx);
5200
5201 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5202 {
5203 if (pass != 0)
5204 continue;
5205 }
5206 /* C/C++ array section reductions. */
5207 else if ((c_kind == OMP_CLAUSE_REDUCTION
5208 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5209 && var != OMP_CLAUSE_DECL (c))
5210 {
5211 if (pass == 0)
5212 continue;
5213
5214 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5215 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5216
5217 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5218 {
5219 tree b = TREE_OPERAND (orig_var, 1);
5220 if (is_omp_target (ctx->stmt))
5221 b = NULL_TREE;
5222 else
5223 b = maybe_lookup_decl (b, ctx);
5224 if (b == NULL)
5225 {
5226 b = TREE_OPERAND (orig_var, 1);
5227 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5228 }
5229 if (integer_zerop (bias))
5230 bias = b;
5231 else
5232 {
5233 bias = fold_convert_loc (clause_loc,
5234 TREE_TYPE (b), bias);
5235 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5236 TREE_TYPE (b), b, bias);
5237 }
5238 orig_var = TREE_OPERAND (orig_var, 0);
5239 }
5240 if (pass == 2)
5241 {
5242 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5243 if (is_global_var (out)
5244 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5245 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5246 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5247 != POINTER_TYPE)))
5248 x = var;
5249 else if (is_omp_target (ctx->stmt))
5250 x = out;
5251 else
5252 {
5253 bool by_ref = use_pointer_for_field (var, NULL);
5254 x = build_receiver_ref (var, by_ref, ctx);
5255 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5256 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5257 == POINTER_TYPE))
5258 x = build_fold_addr_expr (x);
5259 }
5260 if (TREE_CODE (orig_var) == INDIRECT_REF)
5261 x = build_simple_mem_ref (x);
5262 else if (TREE_CODE (orig_var) == ADDR_EXPR)
5263 {
5264 if (var == TREE_OPERAND (orig_var, 0))
5265 x = build_fold_addr_expr (x);
5266 }
5267 bias = fold_convert (sizetype, bias);
5268 x = fold_convert (ptr_type_node, x);
5269 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5270 TREE_TYPE (x), x, bias);
5271 unsigned cnt = task_reduction_cnt - 1;
5272 if (!task_reduction_needs_orig_p)
5273 cnt += (task_reduction_cntorig_full
5274 - task_reduction_cntorig);
5275 else
5276 cnt = task_reduction_cntorig - 1;
5277 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5278 size_int (cnt), NULL_TREE, NULL_TREE);
5279 gimplify_assign (r, x, ilist);
5280 continue;
5281 }
5282
5283 if (TREE_CODE (orig_var) == INDIRECT_REF
5284 || TREE_CODE (orig_var) == ADDR_EXPR)
5285 orig_var = TREE_OPERAND (orig_var, 0);
5286 tree d = OMP_CLAUSE_DECL (c);
5287 tree type = TREE_TYPE (d);
5288 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5289 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5290 tree sz = v;
5291 const char *name = get_name (orig_var);
5292 if (pass != 3 && !TREE_CONSTANT (v))
5293 {
5294 tree t;
5295 if (is_omp_target (ctx->stmt))
5296 t = NULL_TREE;
5297 else
5298 t = maybe_lookup_decl (v, ctx);
5299 if (t)
5300 v = t;
5301 else
5302 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5303 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5304 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5305 TREE_TYPE (v), v,
5306 build_int_cst (TREE_TYPE (v), 1));
5307 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5308 TREE_TYPE (v), t,
5309 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5310 }
5311 if (pass == 3)
5312 {
5313 tree xv = create_tmp_var (ptr_type_node);
5314 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5315 {
5316 unsigned cnt = task_reduction_cnt - 1;
5317 if (!task_reduction_needs_orig_p)
5318 cnt += (task_reduction_cntorig_full
5319 - task_reduction_cntorig);
5320 else
5321 cnt = task_reduction_cntorig - 1;
5322 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5323 size_int (cnt), NULL_TREE, NULL_TREE);
5324
5325 gimple *g = gimple_build_assign (xv, x);
5326 gimple_seq_add_stmt (ilist, g);
5327 }
5328 else
5329 {
5330 unsigned int idx = *ctx->task_reduction_map->get (c);
5331 tree off;
5332 if (ctx->task_reductions[1 + idx])
5333 off = fold_convert (sizetype,
5334 ctx->task_reductions[1 + idx]);
5335 else
5336 off = task_reduction_read (ilist, tskred_temp, sizetype,
5337 7 + 3 * idx + 1);
5338 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5339 tskred_base, off);
5340 gimple_seq_add_stmt (ilist, g);
5341 }
5342 x = fold_convert (build_pointer_type (boolean_type_node),
5343 xv);
5344 if (TREE_CONSTANT (v))
5345 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5346 TYPE_SIZE_UNIT (type));
5347 else
5348 {
5349 tree t;
5350 if (is_omp_target (ctx->stmt))
5351 t = NULL_TREE;
5352 else
5353 t = maybe_lookup_decl (v, ctx);
5354 if (t)
5355 v = t;
5356 else
5357 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5358 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5359 fb_rvalue);
5360 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5361 TREE_TYPE (v), v,
5362 build_int_cst (TREE_TYPE (v), 1));
5363 t = fold_build2_loc (clause_loc, MULT_EXPR,
5364 TREE_TYPE (v), t,
5365 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5366 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5367 }
5368 cond = create_tmp_var (TREE_TYPE (x));
5369 gimplify_assign (cond, x, ilist);
5370 x = xv;
5371 }
5372 else if (lower_private_allocate (var, type, allocator,
5373 allocate_ptr, ilist, ctx,
5374 true,
5375 TREE_CONSTANT (v)
5376 ? TYPE_SIZE_UNIT (type)
5377 : sz))
5378 x = allocate_ptr;
5379 else if (TREE_CONSTANT (v))
5380 {
5381 x = create_tmp_var_raw (type, name);
5382 gimple_add_tmp_var (x);
5383 TREE_ADDRESSABLE (x) = 1;
5384 x = build_fold_addr_expr_loc (clause_loc, x);
5385 }
5386 else
5387 {
5388 tree atmp
5389 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5390 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5391 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5392 }
5393
5394 tree ptype = build_pointer_type (TREE_TYPE (type));
5395 x = fold_convert_loc (clause_loc, ptype, x);
5396 tree y = create_tmp_var (ptype, name);
5397 gimplify_assign (y, x, ilist);
5398 x = y;
5399 tree yb = y;
5400
5401 if (!integer_zerop (bias))
5402 {
5403 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5404 bias);
5405 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5406 x);
5407 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5408 pointer_sized_int_node, yb, bias);
5409 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5410 yb = create_tmp_var (ptype, name);
5411 gimplify_assign (yb, x, ilist);
5412 x = yb;
5413 }
5414
5415 d = TREE_OPERAND (d, 0);
5416 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5417 d = TREE_OPERAND (d, 0);
5418 if (TREE_CODE (d) == ADDR_EXPR)
5419 {
5420 if (orig_var != var)
5421 {
5422 gcc_assert (is_variable_sized (orig_var));
5423 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5424 x);
5425 gimplify_assign (new_var, x, ilist);
5426 tree new_orig_var = lookup_decl (orig_var, ctx);
5427 tree t = build_fold_indirect_ref (new_var);
5428 DECL_IGNORED_P (new_var) = 0;
5429 TREE_THIS_NOTRAP (t) = 1;
5430 SET_DECL_VALUE_EXPR (new_orig_var, t);
5431 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5432 }
5433 else
5434 {
5435 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5436 build_int_cst (ptype, 0));
5437 SET_DECL_VALUE_EXPR (new_var, x);
5438 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5439 }
5440 }
5441 else
5442 {
5443 gcc_assert (orig_var == var);
5444 if (TREE_CODE (d) == INDIRECT_REF)
5445 {
5446 x = create_tmp_var (ptype, name);
5447 TREE_ADDRESSABLE (x) = 1;
5448 gimplify_assign (x, yb, ilist);
5449 x = build_fold_addr_expr_loc (clause_loc, x);
5450 }
5451 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5452 gimplify_assign (new_var, x, ilist);
5453 }
5454 /* GOMP_taskgroup_reduction_register memsets the whole
5455 array to zero. If the initializer is zero, we don't
5456 need to initialize it again, just mark it as ever
5457 used unconditionally, i.e. cond = true. */
5458 if (cond
5459 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5460 && initializer_zerop (omp_reduction_init (c,
5461 TREE_TYPE (type))))
5462 {
5463 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5464 boolean_true_node);
5465 gimple_seq_add_stmt (ilist, g);
5466 continue;
5467 }
5468 tree end = create_artificial_label (UNKNOWN_LOCATION);
5469 if (cond)
5470 {
5471 gimple *g;
5472 if (!is_parallel_ctx (ctx))
5473 {
5474 tree condv = create_tmp_var (boolean_type_node);
5475 g = gimple_build_assign (condv,
5476 build_simple_mem_ref (cond));
5477 gimple_seq_add_stmt (ilist, g);
5478 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5479 g = gimple_build_cond (NE_EXPR, condv,
5480 boolean_false_node, end, lab1);
5481 gimple_seq_add_stmt (ilist, g);
5482 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5483 }
5484 g = gimple_build_assign (build_simple_mem_ref (cond),
5485 boolean_true_node);
5486 gimple_seq_add_stmt (ilist, g);
5487 }
5488
5489 tree y1 = create_tmp_var (ptype);
5490 gimplify_assign (y1, y, ilist);
5491 tree i2 = NULL_TREE, y2 = NULL_TREE;
5492 tree body2 = NULL_TREE, end2 = NULL_TREE;
5493 tree y3 = NULL_TREE, y4 = NULL_TREE;
5494 if (task_reduction_needs_orig_p)
5495 {
5496 y3 = create_tmp_var (ptype);
5497 tree ref;
5498 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5499 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5500 size_int (task_reduction_cnt_full
5501 + task_reduction_cntorig - 1),
5502 NULL_TREE, NULL_TREE);
5503 else
5504 {
5505 unsigned int idx = *ctx->task_reduction_map->get (c);
5506 ref = task_reduction_read (ilist, tskred_temp, ptype,
5507 7 + 3 * idx);
5508 }
5509 gimplify_assign (y3, ref, ilist);
5510 }
5511 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5512 {
5513 if (pass != 3)
5514 {
5515 y2 = create_tmp_var (ptype);
5516 gimplify_assign (y2, y, ilist);
5517 }
5518 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5519 {
5520 tree ref = build_outer_var_ref (var, ctx);
5521 /* For ref build_outer_var_ref already performs this. */
5522 if (TREE_CODE (d) == INDIRECT_REF)
5523 gcc_assert (omp_privatize_by_reference (var));
5524 else if (TREE_CODE (d) == ADDR_EXPR)
5525 ref = build_fold_addr_expr (ref);
5526 else if (omp_privatize_by_reference (var))
5527 ref = build_fold_addr_expr (ref);
5528 ref = fold_convert_loc (clause_loc, ptype, ref);
5529 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5530 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5531 {
5532 y3 = create_tmp_var (ptype);
5533 gimplify_assign (y3, unshare_expr (ref), ilist);
5534 }
5535 if (is_simd)
5536 {
5537 y4 = create_tmp_var (ptype);
5538 gimplify_assign (y4, ref, dlist);
5539 }
5540 }
5541 }
5542 tree i = create_tmp_var (TREE_TYPE (v));
5543 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5544 tree body = create_artificial_label (UNKNOWN_LOCATION);
5545 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5546 if (y2)
5547 {
5548 i2 = create_tmp_var (TREE_TYPE (v));
5549 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5550 body2 = create_artificial_label (UNKNOWN_LOCATION);
5551 end2 = create_artificial_label (UNKNOWN_LOCATION);
5552 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5553 }
5554 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5555 {
5556 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5557 tree decl_placeholder
5558 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5559 SET_DECL_VALUE_EXPR (decl_placeholder,
5560 build_simple_mem_ref (y1));
5561 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5562 SET_DECL_VALUE_EXPR (placeholder,
5563 y3 ? build_simple_mem_ref (y3)
5564 : error_mark_node);
5565 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5566 x = lang_hooks.decls.omp_clause_default_ctor
5567 (c, build_simple_mem_ref (y1),
5568 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5569 if (x)
5570 gimplify_and_add (x, ilist);
5571 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5572 {
5573 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5574 lower_omp (&tseq, ctx);
5575 gimple_seq_add_seq (ilist, tseq);
5576 }
5577 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5578 if (is_simd)
5579 {
5580 SET_DECL_VALUE_EXPR (decl_placeholder,
5581 build_simple_mem_ref (y2));
5582 SET_DECL_VALUE_EXPR (placeholder,
5583 build_simple_mem_ref (y4));
5584 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5585 lower_omp (&tseq, ctx);
5586 gimple_seq_add_seq (dlist, tseq);
5587 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5588 }
5589 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5590 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5591 if (y2)
5592 {
5593 x = lang_hooks.decls.omp_clause_dtor
5594 (c, build_simple_mem_ref (y2));
5595 if (x)
5596 gimplify_and_add (x, dlist);
5597 }
5598 }
5599 else
5600 {
5601 x = omp_reduction_init (c, TREE_TYPE (type));
5602 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5603
5604 /* reduction(-:var) sums up the partial results, so it
5605 acts identically to reduction(+:var). */
5606 if (code == MINUS_EXPR)
5607 code = PLUS_EXPR;
5608
5609 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5610 if (is_simd)
5611 {
5612 x = build2 (code, TREE_TYPE (type),
5613 build_simple_mem_ref (y4),
5614 build_simple_mem_ref (y2));
5615 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5616 }
5617 }
5618 gimple *g
5619 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5620 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5621 gimple_seq_add_stmt (ilist, g);
5622 if (y3)
5623 {
5624 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5625 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5626 gimple_seq_add_stmt (ilist, g);
5627 }
5628 g = gimple_build_assign (i, PLUS_EXPR, i,
5629 build_int_cst (TREE_TYPE (i), 1));
5630 gimple_seq_add_stmt (ilist, g);
5631 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5632 gimple_seq_add_stmt (ilist, g);
5633 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5634 if (y2)
5635 {
5636 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5637 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5638 gimple_seq_add_stmt (dlist, g);
5639 if (y4)
5640 {
5641 g = gimple_build_assign
5642 (y4, POINTER_PLUS_EXPR, y4,
5643 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5644 gimple_seq_add_stmt (dlist, g);
5645 }
5646 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5647 build_int_cst (TREE_TYPE (i2), 1));
5648 gimple_seq_add_stmt (dlist, g);
5649 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5650 gimple_seq_add_stmt (dlist, g);
5651 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5652 }
5653 if (allocator)
5654 {
5655 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5656 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5657 gimple_seq_add_stmt (dlist, g);
5658 }
5659 continue;
5660 }
5661 else if (pass == 2)
5662 {
5663 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5664 if (is_global_var (out))
5665 x = var;
5666 else if (is_omp_target (ctx->stmt))
5667 x = out;
5668 else
5669 {
5670 bool by_ref = use_pointer_for_field (var, ctx);
5671 x = build_receiver_ref (var, by_ref, ctx);
5672 }
5673 if (!omp_privatize_by_reference (var))
5674 x = build_fold_addr_expr (x);
5675 x = fold_convert (ptr_type_node, x);
5676 unsigned cnt = task_reduction_cnt - 1;
5677 if (!task_reduction_needs_orig_p)
5678 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5679 else
5680 cnt = task_reduction_cntorig - 1;
5681 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5682 size_int (cnt), NULL_TREE, NULL_TREE);
5683 gimplify_assign (r, x, ilist);
5684 continue;
5685 }
5686 else if (pass == 3)
5687 {
5688 tree type = TREE_TYPE (new_var);
5689 if (!omp_privatize_by_reference (var))
5690 type = build_pointer_type (type);
5691 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5692 {
5693 unsigned cnt = task_reduction_cnt - 1;
5694 if (!task_reduction_needs_orig_p)
5695 cnt += (task_reduction_cntorig_full
5696 - task_reduction_cntorig);
5697 else
5698 cnt = task_reduction_cntorig - 1;
5699 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5700 size_int (cnt), NULL_TREE, NULL_TREE);
5701 }
5702 else
5703 {
5704 unsigned int idx = *ctx->task_reduction_map->get (c);
5705 tree off;
5706 if (ctx->task_reductions[1 + idx])
5707 off = fold_convert (sizetype,
5708 ctx->task_reductions[1 + idx]);
5709 else
5710 off = task_reduction_read (ilist, tskred_temp, sizetype,
5711 7 + 3 * idx + 1);
5712 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5713 tskred_base, off);
5714 }
5715 x = fold_convert (type, x);
5716 tree t;
5717 if (omp_privatize_by_reference (var))
5718 {
5719 gimplify_assign (new_var, x, ilist);
5720 t = new_var;
5721 new_var = build_simple_mem_ref (new_var);
5722 }
5723 else
5724 {
5725 t = create_tmp_var (type);
5726 gimplify_assign (t, x, ilist);
5727 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5728 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5729 }
5730 t = fold_convert (build_pointer_type (boolean_type_node), t);
5731 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5732 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5733 cond = create_tmp_var (TREE_TYPE (t));
5734 gimplify_assign (cond, t, ilist);
5735 }
5736 else if (is_variable_sized (var))
5737 {
5738 /* For variable sized types, we need to allocate the
5739 actual storage here. Call alloca and store the
5740 result in the pointer decl that we created elsewhere. */
5741 if (pass == 0)
5742 continue;
5743
5744 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5745 {
5746 tree tmp;
5747
5748 ptr = DECL_VALUE_EXPR (new_var);
5749 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5750 ptr = TREE_OPERAND (ptr, 0);
5751 gcc_assert (DECL_P (ptr));
5752 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5753
5754 if (lower_private_allocate (var, new_var, allocator,
5755 allocate_ptr, ilist, ctx,
5756 false, x))
5757 tmp = allocate_ptr;
5758 else
5759 {
5760 /* void *tmp = __builtin_alloca */
5761 tree atmp
5762 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5763 gcall *stmt
5764 = gimple_build_call (atmp, 2, x,
5765 size_int (DECL_ALIGN (var)));
5766 cfun->calls_alloca = 1;
5767 tmp = create_tmp_var_raw (ptr_type_node);
5768 gimple_add_tmp_var (tmp);
5769 gimple_call_set_lhs (stmt, tmp);
5770
5771 gimple_seq_add_stmt (ilist, stmt);
5772 }
5773
5774 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5775 gimplify_assign (ptr, x, ilist);
5776 }
5777 }
5778 else if (omp_privatize_by_reference (var)
5779 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5780 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5781 {
5782 /* For references that are being privatized for Fortran,
5783 allocate new backing storage for the new pointer
5784 variable. This allows us to avoid changing all the
5785 code that expects a pointer to something that expects
5786 a direct variable. */
5787 if (pass == 0)
5788 continue;
5789
5790 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5791 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5792 {
5793 x = build_receiver_ref (var, false, ctx);
5794 if (ctx->allocate_map)
5795 if (tree *allocatep = ctx->allocate_map->get (var))
5796 {
5797 allocator = *allocatep;
5798 if (TREE_CODE (allocator) == TREE_LIST)
5799 allocator = TREE_PURPOSE (allocator);
5800 if (TREE_CODE (allocator) != INTEGER_CST)
5801 allocator = build_outer_var_ref (allocator, ctx);
5802 allocator = fold_convert (pointer_sized_int_node,
5803 allocator);
5804 allocate_ptr = unshare_expr (x);
5805 }
5806 if (allocator == NULL_TREE)
5807 x = build_fold_addr_expr_loc (clause_loc, x);
5808 }
5809 else if (lower_private_allocate (var, new_var, allocator,
5810 allocate_ptr,
5811 ilist, ctx, true, x))
5812 x = allocate_ptr;
5813 else if (TREE_CONSTANT (x))
5814 {
5815 /* For reduction in SIMD loop, defer adding the
5816 initialization of the reference, because if we decide
5817 to use SIMD array for it, the initilization could cause
5818 expansion ICE. Ditto for other privatization clauses. */
5819 if (is_simd)
5820 x = NULL_TREE;
5821 else
5822 {
5823 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5824 get_name (var));
5825 gimple_add_tmp_var (x);
5826 TREE_ADDRESSABLE (x) = 1;
5827 x = build_fold_addr_expr_loc (clause_loc, x);
5828 }
5829 }
5830 else
5831 {
5832 tree atmp
5833 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5834 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5835 tree al = size_int (TYPE_ALIGN (rtype));
5836 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5837 }
5838
5839 if (x)
5840 {
5841 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5842 gimplify_assign (new_var, x, ilist);
5843 }
5844
5845 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5846 }
5847 else if ((c_kind == OMP_CLAUSE_REDUCTION
5848 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5849 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5850 {
5851 if (pass == 0)
5852 continue;
5853 }
5854 else if (pass != 0)
5855 continue;
5856
5857 switch (OMP_CLAUSE_CODE (c))
5858 {
5859 case OMP_CLAUSE_SHARED:
5860 /* Ignore shared directives in teams construct inside
5861 target construct. */
5862 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5863 && !is_host_teams_ctx (ctx))
5864 continue;
5865 /* Shared global vars are just accessed directly. */
5866 if (is_global_var (new_var))
5867 break;
5868 /* For taskloop firstprivate/lastprivate, represented
5869 as firstprivate and shared clause on the task, new_var
5870 is the firstprivate var. */
5871 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5872 break;
5873 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5874 needs to be delayed until after fixup_child_record_type so
5875 that we get the correct type during the dereference. */
5876 by_ref = use_pointer_for_field (var, ctx);
5877 x = build_receiver_ref (var, by_ref, ctx);
5878 SET_DECL_VALUE_EXPR (new_var, x);
5879 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5880
5881 /* ??? If VAR is not passed by reference, and the variable
5882 hasn't been initialized yet, then we'll get a warning for
5883 the store into the omp_data_s structure. Ideally, we'd be
5884 able to notice this and not store anything at all, but
5885 we're generating code too early. Suppress the warning. */
5886 if (!by_ref)
5887 suppress_warning (var, OPT_Wuninitialized);
5888 break;
5889
5890 case OMP_CLAUSE__CONDTEMP_:
5891 if (is_parallel_ctx (ctx))
5892 {
5893 x = build_receiver_ref (var, false, ctx);
5894 SET_DECL_VALUE_EXPR (new_var, x);
5895 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5896 }
5897 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5898 {
5899 x = build_zero_cst (TREE_TYPE (var));
5900 goto do_private;
5901 }
5902 break;
5903
5904 case OMP_CLAUSE_LASTPRIVATE:
5905 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5906 break;
5907 /* FALLTHRU */
5908
5909 case OMP_CLAUSE_PRIVATE:
5910 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5911 x = build_outer_var_ref (var, ctx);
5912 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5913 {
5914 if (is_task_ctx (ctx))
5915 x = build_receiver_ref (var, false, ctx);
5916 else
5917 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5918 }
5919 else
5920 x = NULL;
5921 do_private:
5922 tree nx;
5923 bool copy_ctor;
5924 copy_ctor = false;
5925 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5926 ilist, ctx, false, NULL_TREE);
5927 nx = unshare_expr (new_var);
5928 if (is_simd
5929 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5930 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5931 copy_ctor = true;
5932 if (copy_ctor)
5933 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5934 else
5935 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5936 if (is_simd)
5937 {
5938 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5939 if ((TREE_ADDRESSABLE (new_var) || nx || y
5940 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5941 && (gimple_omp_for_collapse (ctx->stmt) != 1
5942 || (gimple_omp_for_index (ctx->stmt, 0)
5943 != new_var)))
5944 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5945 || omp_privatize_by_reference (var))
5946 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5947 ivar, lvar))
5948 {
5949 if (omp_privatize_by_reference (var))
5950 {
5951 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5952 tree new_vard = TREE_OPERAND (new_var, 0);
5953 gcc_assert (DECL_P (new_vard));
5954 SET_DECL_VALUE_EXPR (new_vard,
5955 build_fold_addr_expr (lvar));
5956 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5957 }
5958
5959 if (nx)
5960 {
5961 tree iv = unshare_expr (ivar);
5962 if (copy_ctor)
5963 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5964 x);
5965 else
5966 x = lang_hooks.decls.omp_clause_default_ctor (c,
5967 iv,
5968 x);
5969 }
5970 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5971 {
5972 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5973 unshare_expr (ivar), x);
5974 nx = x;
5975 }
5976 if (nx && x)
5977 gimplify_and_add (x, &llist[0]);
5978 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5979 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5980 {
5981 tree v = new_var;
5982 if (!DECL_P (v))
5983 {
5984 gcc_assert (TREE_CODE (v) == MEM_REF);
5985 v = TREE_OPERAND (v, 0);
5986 gcc_assert (DECL_P (v));
5987 }
5988 v = *ctx->lastprivate_conditional_map->get (v);
5989 tree t = create_tmp_var (TREE_TYPE (v));
5990 tree z = build_zero_cst (TREE_TYPE (v));
5991 tree orig_v
5992 = build_outer_var_ref (var, ctx,
5993 OMP_CLAUSE_LASTPRIVATE);
5994 gimple_seq_add_stmt (dlist,
5995 gimple_build_assign (t, z));
5996 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5997 tree civar = DECL_VALUE_EXPR (v);
5998 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5999 civar = unshare_expr (civar);
6000 TREE_OPERAND (civar, 1) = sctx.idx;
6001 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
6002 unshare_expr (civar));
6003 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
6004 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
6005 orig_v, unshare_expr (ivar)));
6006 tree cond = build2 (LT_EXPR, boolean_type_node, t,
6007 civar);
6008 x = build3 (COND_EXPR, void_type_node, cond, x,
6009 void_node);
6010 gimple_seq tseq = NULL;
6011 gimplify_and_add (x, &tseq);
6012 if (ctx->outer)
6013 lower_omp (&tseq, ctx->outer);
6014 gimple_seq_add_seq (&llist[1], tseq);
6015 }
6016 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6017 && ctx->for_simd_scan_phase)
6018 {
6019 x = unshare_expr (ivar);
6020 tree orig_v
6021 = build_outer_var_ref (var, ctx,
6022 OMP_CLAUSE_LASTPRIVATE);
6023 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6024 orig_v);
6025 gimplify_and_add (x, &llist[0]);
6026 }
6027 if (y)
6028 {
6029 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
6030 if (y)
6031 gimplify_and_add (y, &llist[1]);
6032 }
6033 break;
6034 }
6035 if (omp_privatize_by_reference (var))
6036 {
6037 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6038 tree new_vard = TREE_OPERAND (new_var, 0);
6039 gcc_assert (DECL_P (new_vard));
6040 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6041 x = TYPE_SIZE_UNIT (type);
6042 if (TREE_CONSTANT (x))
6043 {
6044 x = create_tmp_var_raw (type, get_name (var));
6045 gimple_add_tmp_var (x);
6046 TREE_ADDRESSABLE (x) = 1;
6047 x = build_fold_addr_expr_loc (clause_loc, x);
6048 x = fold_convert_loc (clause_loc,
6049 TREE_TYPE (new_vard), x);
6050 gimplify_assign (new_vard, x, ilist);
6051 }
6052 }
6053 }
6054 if (nx)
6055 gimplify_and_add (nx, ilist);
6056 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6057 && is_simd
6058 && ctx->for_simd_scan_phase)
6059 {
6060 tree orig_v = build_outer_var_ref (var, ctx,
6061 OMP_CLAUSE_LASTPRIVATE);
6062 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6063 orig_v);
6064 gimplify_and_add (x, ilist);
6065 }
6066 /* FALLTHRU */
6067
6068 do_dtor:
6069 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6070 if (x)
6071 gimplify_and_add (x, dlist);
6072 if (allocator)
6073 {
6074 if (!is_gimple_val (allocator))
6075 {
6076 tree avar = create_tmp_var (TREE_TYPE (allocator));
6077 gimplify_assign (avar, allocator, dlist);
6078 allocator = avar;
6079 }
6080 if (!is_gimple_val (allocate_ptr))
6081 {
6082 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6083 gimplify_assign (apvar, allocate_ptr, dlist);
6084 allocate_ptr = apvar;
6085 }
6086 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6087 gimple *g
6088 = gimple_build_call (f, 2, allocate_ptr, allocator);
6089 gimple_seq_add_stmt (dlist, g);
6090 }
6091 break;
6092
6093 case OMP_CLAUSE_LINEAR:
6094 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6095 goto do_firstprivate;
6096 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6097 x = NULL;
6098 else
6099 x = build_outer_var_ref (var, ctx);
6100 goto do_private;
6101
6102 case OMP_CLAUSE_FIRSTPRIVATE:
6103 if (is_task_ctx (ctx))
6104 {
6105 if ((omp_privatize_by_reference (var)
6106 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6107 || is_variable_sized (var))
6108 goto do_dtor;
6109 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6110 ctx))
6111 || use_pointer_for_field (var, NULL))
6112 {
6113 x = build_receiver_ref (var, false, ctx);
6114 if (ctx->allocate_map)
6115 if (tree *allocatep = ctx->allocate_map->get (var))
6116 {
6117 allocator = *allocatep;
6118 if (TREE_CODE (allocator) == TREE_LIST)
6119 allocator = TREE_PURPOSE (allocator);
6120 if (TREE_CODE (allocator) != INTEGER_CST)
6121 allocator = build_outer_var_ref (allocator, ctx);
6122 allocator = fold_convert (pointer_sized_int_node,
6123 allocator);
6124 allocate_ptr = unshare_expr (x);
6125 x = build_simple_mem_ref (x);
6126 TREE_THIS_NOTRAP (x) = 1;
6127 }
6128 SET_DECL_VALUE_EXPR (new_var, x);
6129 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6130 goto do_dtor;
6131 }
6132 }
6133 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6134 && omp_privatize_by_reference (var))
6135 {
6136 x = build_outer_var_ref (var, ctx);
6137 gcc_assert (TREE_CODE (x) == MEM_REF
6138 && integer_zerop (TREE_OPERAND (x, 1)));
6139 x = TREE_OPERAND (x, 0);
6140 x = lang_hooks.decls.omp_clause_copy_ctor
6141 (c, unshare_expr (new_var), x);
6142 gimplify_and_add (x, ilist);
6143 goto do_dtor;
6144 }
6145 do_firstprivate:
6146 lower_private_allocate (var, new_var, allocator, allocate_ptr,
6147 ilist, ctx, false, NULL_TREE);
6148 x = build_outer_var_ref (var, ctx);
6149 if (is_simd)
6150 {
6151 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6152 && gimple_omp_for_combined_into_p (ctx->stmt))
6153 {
6154 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6155 if (DECL_P (t))
6156 t = build_outer_var_ref (t, ctx);
6157 tree stept = TREE_TYPE (t);
6158 tree ct = omp_find_clause (clauses,
6159 OMP_CLAUSE__LOOPTEMP_);
6160 gcc_assert (ct);
6161 tree l = OMP_CLAUSE_DECL (ct);
6162 tree n1 = fd->loop.n1;
6163 tree step = fd->loop.step;
6164 tree itype = TREE_TYPE (l);
6165 if (POINTER_TYPE_P (itype))
6166 itype = signed_type_for (itype);
6167 l = fold_build2 (MINUS_EXPR, itype, l, n1);
6168 if (TYPE_UNSIGNED (itype)
6169 && fd->loop.cond_code == GT_EXPR)
6170 l = fold_build2 (TRUNC_DIV_EXPR, itype,
6171 fold_build1 (NEGATE_EXPR, itype, l),
6172 fold_build1 (NEGATE_EXPR,
6173 itype, step));
6174 else
6175 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6176 t = fold_build2 (MULT_EXPR, stept,
6177 fold_convert (stept, l), t);
6178
6179 if (OMP_CLAUSE_LINEAR_ARRAY (c))
6180 {
6181 if (omp_privatize_by_reference (var))
6182 {
6183 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6184 tree new_vard = TREE_OPERAND (new_var, 0);
6185 gcc_assert (DECL_P (new_vard));
6186 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6187 nx = TYPE_SIZE_UNIT (type);
6188 if (TREE_CONSTANT (nx))
6189 {
6190 nx = create_tmp_var_raw (type,
6191 get_name (var));
6192 gimple_add_tmp_var (nx);
6193 TREE_ADDRESSABLE (nx) = 1;
6194 nx = build_fold_addr_expr_loc (clause_loc,
6195 nx);
6196 nx = fold_convert_loc (clause_loc,
6197 TREE_TYPE (new_vard),
6198 nx);
6199 gimplify_assign (new_vard, nx, ilist);
6200 }
6201 }
6202
6203 x = lang_hooks.decls.omp_clause_linear_ctor
6204 (c, new_var, x, t);
6205 gimplify_and_add (x, ilist);
6206 goto do_dtor;
6207 }
6208
6209 if (POINTER_TYPE_P (TREE_TYPE (x)))
6210 x = fold_build_pointer_plus (x, t);
6211 else
6212 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x,
6213 fold_convert (TREE_TYPE (x), t));
6214 }
6215
6216 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6217 || TREE_ADDRESSABLE (new_var)
6218 || omp_privatize_by_reference (var))
6219 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6220 ivar, lvar))
6221 {
6222 if (omp_privatize_by_reference (var))
6223 {
6224 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6225 tree new_vard = TREE_OPERAND (new_var, 0);
6226 gcc_assert (DECL_P (new_vard));
6227 SET_DECL_VALUE_EXPR (new_vard,
6228 build_fold_addr_expr (lvar));
6229 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6230 }
6231 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6232 {
6233 tree iv = create_tmp_var (TREE_TYPE (new_var));
6234 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6235 gimplify_and_add (x, ilist);
6236 gimple_stmt_iterator gsi
6237 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6238 gassign *g
6239 = gimple_build_assign (unshare_expr (lvar), iv);
6240 gsi_insert_before_without_update (&gsi, g,
6241 GSI_SAME_STMT);
6242 tree t = OMP_CLAUSE_LINEAR_STEP (c);
6243 enum tree_code code = PLUS_EXPR;
6244 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6245 code = POINTER_PLUS_EXPR;
6246 g = gimple_build_assign (iv, code, iv, t);
6247 gsi_insert_before_without_update (&gsi, g,
6248 GSI_SAME_STMT);
6249 break;
6250 }
6251 x = lang_hooks.decls.omp_clause_copy_ctor
6252 (c, unshare_expr (ivar), x);
6253 gimplify_and_add (x, &llist[0]);
6254 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6255 if (x)
6256 gimplify_and_add (x, &llist[1]);
6257 break;
6258 }
6259 if (omp_privatize_by_reference (var))
6260 {
6261 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6262 tree new_vard = TREE_OPERAND (new_var, 0);
6263 gcc_assert (DECL_P (new_vard));
6264 tree type = TREE_TYPE (TREE_TYPE (new_vard));
6265 nx = TYPE_SIZE_UNIT (type);
6266 if (TREE_CONSTANT (nx))
6267 {
6268 nx = create_tmp_var_raw (type, get_name (var));
6269 gimple_add_tmp_var (nx);
6270 TREE_ADDRESSABLE (nx) = 1;
6271 nx = build_fold_addr_expr_loc (clause_loc, nx);
6272 nx = fold_convert_loc (clause_loc,
6273 TREE_TYPE (new_vard), nx);
6274 gimplify_assign (new_vard, nx, ilist);
6275 }
6276 }
6277 }
6278 x = lang_hooks.decls.omp_clause_copy_ctor
6279 (c, unshare_expr (new_var), x);
6280 gimplify_and_add (x, ilist);
6281 goto do_dtor;
6282
6283 case OMP_CLAUSE__LOOPTEMP_:
6284 case OMP_CLAUSE__REDUCTEMP_:
6285 gcc_assert (is_taskreg_ctx (ctx));
6286 x = build_outer_var_ref (var, ctx);
6287 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6288 gimplify_and_add (x, ilist);
6289 break;
6290
6291 case OMP_CLAUSE_COPYIN:
6292 by_ref = use_pointer_for_field (var, NULL);
6293 x = build_receiver_ref (var, by_ref, ctx);
6294 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6295 append_to_statement_list (x, ©in_seq);
6296 copyin_by_ref |= by_ref;
6297 break;
6298
6299 case OMP_CLAUSE_REDUCTION:
6300 case OMP_CLAUSE_IN_REDUCTION:
6301 /* OpenACC reductions are initialized using the
6302 GOACC_REDUCTION internal function. */
6303 if (is_gimple_omp_oacc (ctx->stmt))
6304 break;
6305 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6306 {
6307 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6308 gimple *tseq;
6309 tree ptype = TREE_TYPE (placeholder);
6310 if (cond)
6311 {
6312 x = error_mark_node;
6313 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6314 && !task_reduction_needs_orig_p)
6315 x = var;
6316 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6317 {
6318 tree pptype = build_pointer_type (ptype);
6319 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6320 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6321 size_int (task_reduction_cnt_full
6322 + task_reduction_cntorig - 1),
6323 NULL_TREE, NULL_TREE);
6324 else
6325 {
6326 unsigned int idx
6327 = *ctx->task_reduction_map->get (c);
6328 x = task_reduction_read (ilist, tskred_temp,
6329 pptype, 7 + 3 * idx);
6330 }
6331 x = fold_convert (pptype, x);
6332 x = build_simple_mem_ref (x);
6333 }
6334 }
6335 else
6336 {
6337 lower_private_allocate (var, new_var, allocator,
6338 allocate_ptr, ilist, ctx, false,
6339 NULL_TREE);
6340 x = build_outer_var_ref (var, ctx);
6341
6342 if (omp_privatize_by_reference (var)
6343 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6344 x = build_fold_addr_expr_loc (clause_loc, x);
6345 }
6346 SET_DECL_VALUE_EXPR (placeholder, x);
6347 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6348 tree new_vard = new_var;
6349 if (omp_privatize_by_reference (var))
6350 {
6351 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6352 new_vard = TREE_OPERAND (new_var, 0);
6353 gcc_assert (DECL_P (new_vard));
6354 }
6355 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6356 if (is_simd
6357 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6358 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6359 rvarp = &rvar;
6360 if (is_simd
6361 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6362 ivar, lvar, rvarp,
6363 &rvar2))
6364 {
6365 if (new_vard == new_var)
6366 {
6367 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6368 SET_DECL_VALUE_EXPR (new_var, ivar);
6369 }
6370 else
6371 {
6372 SET_DECL_VALUE_EXPR (new_vard,
6373 build_fold_addr_expr (ivar));
6374 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6375 }
6376 x = lang_hooks.decls.omp_clause_default_ctor
6377 (c, unshare_expr (ivar),
6378 build_outer_var_ref (var, ctx));
6379 if (rvarp && ctx->for_simd_scan_phase)
6380 {
6381 if (x)
6382 gimplify_and_add (x, &llist[0]);
6383 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6384 if (x)
6385 gimplify_and_add (x, &llist[1]);
6386 break;
6387 }
6388 else if (rvarp)
6389 {
6390 if (x)
6391 {
6392 gimplify_and_add (x, &llist[0]);
6393
6394 tree ivar2 = unshare_expr (lvar);
6395 TREE_OPERAND (ivar2, 1) = sctx.idx;
6396 x = lang_hooks.decls.omp_clause_default_ctor
6397 (c, ivar2, build_outer_var_ref (var, ctx));
6398 gimplify_and_add (x, &llist[0]);
6399
6400 if (rvar2)
6401 {
6402 x = lang_hooks.decls.omp_clause_default_ctor
6403 (c, unshare_expr (rvar2),
6404 build_outer_var_ref (var, ctx));
6405 gimplify_and_add (x, &llist[0]);
6406 }
6407
6408 /* For types that need construction, add another
6409 private var which will be default constructed
6410 and optionally initialized with
6411 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6412 loop we want to assign this value instead of
6413 constructing and destructing it in each
6414 iteration. */
6415 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6416 gimple_add_tmp_var (nv);
6417 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6418 ? rvar2
6419 : ivar, 0),
6420 nv);
6421 x = lang_hooks.decls.omp_clause_default_ctor
6422 (c, nv, build_outer_var_ref (var, ctx));
6423 gimplify_and_add (x, ilist);
6424
6425 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6426 {
6427 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6428 x = DECL_VALUE_EXPR (new_vard);
6429 tree vexpr = nv;
6430 if (new_vard != new_var)
6431 vexpr = build_fold_addr_expr (nv);
6432 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6433 lower_omp (&tseq, ctx);
6434 SET_DECL_VALUE_EXPR (new_vard, x);
6435 gimple_seq_add_seq (ilist, tseq);
6436 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6437 }
6438
6439 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6440 if (x)
6441 gimplify_and_add (x, dlist);
6442 }
6443
6444 tree ref = build_outer_var_ref (var, ctx);
6445 x = unshare_expr (ivar);
6446 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6447 ref);
6448 gimplify_and_add (x, &llist[0]);
6449
6450 ref = build_outer_var_ref (var, ctx);
6451 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6452 rvar);
6453 gimplify_and_add (x, &llist[3]);
6454
6455 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6456 if (new_vard == new_var)
6457 SET_DECL_VALUE_EXPR (new_var, lvar);
6458 else
6459 SET_DECL_VALUE_EXPR (new_vard,
6460 build_fold_addr_expr (lvar));
6461
6462 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6463 if (x)
6464 gimplify_and_add (x, &llist[1]);
6465
6466 tree ivar2 = unshare_expr (lvar);
6467 TREE_OPERAND (ivar2, 1) = sctx.idx;
6468 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6469 if (x)
6470 gimplify_and_add (x, &llist[1]);
6471
6472 if (rvar2)
6473 {
6474 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6475 if (x)
6476 gimplify_and_add (x, &llist[1]);
6477 }
6478 break;
6479 }
6480 if (x)
6481 gimplify_and_add (x, &llist[0]);
6482 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6483 {
6484 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6485 lower_omp (&tseq, ctx);
6486 gimple_seq_add_seq (&llist[0], tseq);
6487 }
6488 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6489 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6490 lower_omp (&tseq, ctx);
6491 gimple_seq_add_seq (&llist[1], tseq);
6492 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6493 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6494 if (new_vard == new_var)
6495 SET_DECL_VALUE_EXPR (new_var, lvar);
6496 else
6497 SET_DECL_VALUE_EXPR (new_vard,
6498 build_fold_addr_expr (lvar));
6499 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6500 if (x)
6501 gimplify_and_add (x, &llist[1]);
6502 break;
6503 }
6504 /* If this is a reference to constant size reduction var
6505 with placeholder, we haven't emitted the initializer
6506 for it because it is undesirable if SIMD arrays are used.
6507 But if they aren't used, we need to emit the deferred
6508 initialization now. */
6509 else if (omp_privatize_by_reference (var) && is_simd)
6510 handle_simd_reference (clause_loc, new_vard, ilist);
6511
6512 tree lab2 = NULL_TREE;
6513 if (cond)
6514 {
6515 gimple *g;
6516 if (!is_parallel_ctx (ctx))
6517 {
6518 tree condv = create_tmp_var (boolean_type_node);
6519 tree m = build_simple_mem_ref (cond);
6520 g = gimple_build_assign (condv, m);
6521 gimple_seq_add_stmt (ilist, g);
6522 tree lab1
6523 = create_artificial_label (UNKNOWN_LOCATION);
6524 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6525 g = gimple_build_cond (NE_EXPR, condv,
6526 boolean_false_node,
6527 lab2, lab1);
6528 gimple_seq_add_stmt (ilist, g);
6529 gimple_seq_add_stmt (ilist,
6530 gimple_build_label (lab1));
6531 }
6532 g = gimple_build_assign (build_simple_mem_ref (cond),
6533 boolean_true_node);
6534 gimple_seq_add_stmt (ilist, g);
6535 }
6536 x = lang_hooks.decls.omp_clause_default_ctor
6537 (c, unshare_expr (new_var),
6538 cond ? NULL_TREE
6539 : build_outer_var_ref (var, ctx));
6540 if (x)
6541 gimplify_and_add (x, ilist);
6542
6543 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6544 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6545 {
6546 if (ctx->for_simd_scan_phase)
6547 goto do_dtor;
6548 if (x || (!is_simd
6549 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6550 {
6551 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6552 gimple_add_tmp_var (nv);
6553 ctx->cb.decl_map->put (new_vard, nv);
6554 x = lang_hooks.decls.omp_clause_default_ctor
6555 (c, nv, build_outer_var_ref (var, ctx));
6556 if (x)
6557 gimplify_and_add (x, ilist);
6558 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6559 {
6560 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6561 tree vexpr = nv;
6562 if (new_vard != new_var)
6563 vexpr = build_fold_addr_expr (nv);
6564 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6565 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6566 lower_omp (&tseq, ctx);
6567 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6568 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6569 gimple_seq_add_seq (ilist, tseq);
6570 }
6571 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6572 if (is_simd && ctx->scan_exclusive)
6573 {
6574 tree nv2
6575 = create_tmp_var_raw (TREE_TYPE (new_var));
6576 gimple_add_tmp_var (nv2);
6577 ctx->cb.decl_map->put (nv, nv2);
6578 x = lang_hooks.decls.omp_clause_default_ctor
6579 (c, nv2, build_outer_var_ref (var, ctx));
6580 gimplify_and_add (x, ilist);
6581 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6582 if (x)
6583 gimplify_and_add (x, dlist);
6584 }
6585 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6586 if (x)
6587 gimplify_and_add (x, dlist);
6588 }
6589 else if (is_simd
6590 && ctx->scan_exclusive
6591 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6592 {
6593 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6594 gimple_add_tmp_var (nv2);
6595 ctx->cb.decl_map->put (new_vard, nv2);
6596 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6597 if (x)
6598 gimplify_and_add (x, dlist);
6599 }
6600 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6601 goto do_dtor;
6602 }
6603
6604 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6605 {
6606 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6607 if (c_kind == OMP_CLAUSE_IN_REDUCTION
6608 && is_omp_target (ctx->stmt))
6609 {
6610 tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6611 tree oldv = NULL_TREE;
6612 gcc_assert (d);
6613 if (DECL_HAS_VALUE_EXPR_P (d))
6614 oldv = DECL_VALUE_EXPR (d);
6615 SET_DECL_VALUE_EXPR (d, new_vard);
6616 DECL_HAS_VALUE_EXPR_P (d) = 1;
6617 lower_omp (&tseq, ctx);
6618 if (oldv)
6619 SET_DECL_VALUE_EXPR (d, oldv);
6620 else
6621 {
6622 SET_DECL_VALUE_EXPR (d, NULL_TREE);
6623 DECL_HAS_VALUE_EXPR_P (d) = 0;
6624 }
6625 }
6626 else
6627 lower_omp (&tseq, ctx);
6628 gimple_seq_add_seq (ilist, tseq);
6629 }
6630 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6631 if (is_simd)
6632 {
6633 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6634 lower_omp (&tseq, ctx);
6635 gimple_seq_add_seq (dlist, tseq);
6636 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6637 }
6638 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6639 if (cond)
6640 {
6641 if (lab2)
6642 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6643 break;
6644 }
6645 goto do_dtor;
6646 }
6647 else
6648 {
6649 x = omp_reduction_init (c, TREE_TYPE (new_var));
6650 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6651 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6652
6653 if (cond)
6654 {
6655 gimple *g;
6656 tree lab2 = NULL_TREE;
6657 /* GOMP_taskgroup_reduction_register memsets the whole
6658 array to zero. If the initializer is zero, we don't
6659 need to initialize it again, just mark it as ever
6660 used unconditionally, i.e. cond = true. */
6661 if (initializer_zerop (x))
6662 {
6663 g = gimple_build_assign (build_simple_mem_ref (cond),
6664 boolean_true_node);
6665 gimple_seq_add_stmt (ilist, g);
6666 break;
6667 }
6668
6669 /* Otherwise, emit
6670 if (!cond) { cond = true; new_var = x; } */
6671 if (!is_parallel_ctx (ctx))
6672 {
6673 tree condv = create_tmp_var (boolean_type_node);
6674 tree m = build_simple_mem_ref (cond);
6675 g = gimple_build_assign (condv, m);
6676 gimple_seq_add_stmt (ilist, g);
6677 tree lab1
6678 = create_artificial_label (UNKNOWN_LOCATION);
6679 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6680 g = gimple_build_cond (NE_EXPR, condv,
6681 boolean_false_node,
6682 lab2, lab1);
6683 gimple_seq_add_stmt (ilist, g);
6684 gimple_seq_add_stmt (ilist,
6685 gimple_build_label (lab1));
6686 }
6687 g = gimple_build_assign (build_simple_mem_ref (cond),
6688 boolean_true_node);
6689 gimple_seq_add_stmt (ilist, g);
6690 gimplify_assign (new_var, x, ilist);
6691 if (lab2)
6692 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6693 break;
6694 }
6695
6696 /* reduction(-:var) sums up the partial results, so it
6697 acts identically to reduction(+:var). */
6698 if (code == MINUS_EXPR)
6699 code = PLUS_EXPR;
6700
6701 bool is_truth_op
6702 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6703 tree new_vard = new_var;
6704 if (is_simd && omp_privatize_by_reference (var))
6705 {
6706 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6707 new_vard = TREE_OPERAND (new_var, 0);
6708 gcc_assert (DECL_P (new_vard));
6709 }
6710 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6711 if (is_simd
6712 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6713 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6714 rvarp = &rvar;
6715 if (is_simd
6716 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6717 ivar, lvar, rvarp,
6718 &rvar2))
6719 {
6720 if (new_vard != new_var)
6721 {
6722 SET_DECL_VALUE_EXPR (new_vard,
6723 build_fold_addr_expr (lvar));
6724 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6725 }
6726
6727 tree ref = build_outer_var_ref (var, ctx);
6728
6729 if (rvarp)
6730 {
6731 if (ctx->for_simd_scan_phase)
6732 break;
6733 gimplify_assign (ivar, ref, &llist[0]);
6734 ref = build_outer_var_ref (var, ctx);
6735 gimplify_assign (ref, rvar, &llist[3]);
6736 break;
6737 }
6738
6739 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6740
6741 if (sctx.is_simt)
6742 {
6743 if (!simt_lane)
6744 simt_lane = create_tmp_var (unsigned_type_node);
6745 x = build_call_expr_internal_loc
6746 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6747 TREE_TYPE (ivar), 2, ivar, simt_lane);
6748 /* Make sure x is evaluated unconditionally. */
6749 tree bfly_var = create_tmp_var (TREE_TYPE (ivar));
6750 gimplify_assign (bfly_var, x, &llist[2]);
6751 x = build2 (code, TREE_TYPE (ivar), ivar, bfly_var);
6752 gimplify_assign (ivar, x, &llist[2]);
6753 }
6754 tree ivar2 = ivar;
6755 tree ref2 = ref;
6756 if (is_truth_op)
6757 {
6758 tree zero = build_zero_cst (TREE_TYPE (ivar));
6759 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6760 boolean_type_node, ivar,
6761 zero);
6762 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6763 boolean_type_node, ref,
6764 zero);
6765 }
6766 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6767 if (is_truth_op)
6768 x = fold_convert (TREE_TYPE (ref), x);
6769 ref = build_outer_var_ref (var, ctx);
6770 gimplify_assign (ref, x, &llist[1]);
6771
6772 }
6773 else
6774 {
6775 lower_private_allocate (var, new_var, allocator,
6776 allocate_ptr, ilist, ctx,
6777 false, NULL_TREE);
6778 if (omp_privatize_by_reference (var) && is_simd)
6779 handle_simd_reference (clause_loc, new_vard, ilist);
6780 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6781 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6782 break;
6783 gimplify_assign (new_var, x, ilist);
6784 if (is_simd)
6785 {
6786 tree ref = build_outer_var_ref (var, ctx);
6787 tree new_var2 = new_var;
6788 tree ref2 = ref;
6789 if (is_truth_op)
6790 {
6791 tree zero = build_zero_cst (TREE_TYPE (new_var));
6792 new_var2
6793 = fold_build2_loc (clause_loc, NE_EXPR,
6794 boolean_type_node, new_var,
6795 zero);
6796 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6797 boolean_type_node, ref,
6798 zero);
6799 }
6800 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6801 if (is_truth_op)
6802 x = fold_convert (TREE_TYPE (new_var), x);
6803 ref = build_outer_var_ref (var, ctx);
6804 gimplify_assign (ref, x, dlist);
6805 }
6806 if (allocator)
6807 goto do_dtor;
6808 }
6809 }
6810 break;
6811
6812 default:
6813 gcc_unreachable ();
6814 }
6815 }
6816 }
6817 if (tskred_avar)
6818 {
6819 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6820 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6821 }
6822
6823 if (known_eq (sctx.max_vf, 1U))
6824 {
6825 sctx.is_simt = false;
6826 if (ctx->lastprivate_conditional_map)
6827 {
6828 if (gimple_omp_for_combined_into_p (ctx->stmt))
6829 {
6830 /* Signal to lower_omp_1 that it should use parent context. */
6831 ctx->combined_into_simd_safelen1 = true;
6832 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6833 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6834 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6835 {
6836 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6837 omp_context *outer = ctx->outer;
6838 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6839 outer = outer->outer;
6840 tree *v = ctx->lastprivate_conditional_map->get (o);
6841 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6842 tree *pv = outer->lastprivate_conditional_map->get (po);
6843 *v = *pv;
6844 }
6845 }
6846 else
6847 {
6848 /* When not vectorized, treat lastprivate(conditional:) like
6849 normal lastprivate, as there will be just one simd lane
6850 writing the privatized variable. */
6851 delete ctx->lastprivate_conditional_map;
6852 ctx->lastprivate_conditional_map = NULL;
6853 }
6854 }
6855 }
6856
6857 if (nonconst_simd_if)
6858 {
6859 if (sctx.lane == NULL_TREE)
6860 {
6861 sctx.idx = create_tmp_var (unsigned_type_node);
6862 sctx.lane = create_tmp_var (unsigned_type_node);
6863 }
6864 /* FIXME: For now. */
6865 sctx.is_simt = false;
6866 }
6867
6868 if (sctx.lane || sctx.is_simt)
6869 {
6870 uid = create_tmp_var (ptr_type_node, "simduid");
6871 /* Don't want uninit warnings on simduid, it is always uninitialized,
6872 but we use it not for the value, but for the DECL_UID only. */
6873 suppress_warning (uid, OPT_Wuninitialized);
6874 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6875 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6876 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6877 gimple_omp_for_set_clauses (ctx->stmt, c);
6878 }
6879 /* Emit calls denoting privatized variables and initializing a pointer to
6880 structure that holds private variables as fields after ompdevlow pass. */
6881 if (sctx.is_simt)
6882 {
6883 sctx.simt_eargs[0] = uid;
6884 gimple *g
6885 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6886 gimple_call_set_lhs (g, uid);
6887 gimple_seq_add_stmt (ilist, g);
6888 sctx.simt_eargs.release ();
6889
6890 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6891 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6892 gimple_call_set_lhs (g, simtrec);
6893 gimple_seq_add_stmt (ilist, g);
6894 }
6895 if (sctx.lane)
6896 {
6897 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6898 2 + (nonconst_simd_if != NULL),
6899 uid, integer_zero_node,
6900 nonconst_simd_if);
6901 gimple_call_set_lhs (g, sctx.lane);
6902 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6903 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6904 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6905 build_int_cst (unsigned_type_node, 0));
6906 gimple_seq_add_stmt (ilist, g);
6907 if (sctx.lastlane)
6908 {
6909 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6910 2, uid, sctx.lane);
6911 gimple_call_set_lhs (g, sctx.lastlane);
6912 gimple_seq_add_stmt (dlist, g);
6913 gimple_seq_add_seq (dlist, llist[3]);
6914 }
6915 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6916 if (llist[2])
6917 {
6918 tree simt_vf = create_tmp_var (unsigned_type_node);
6919 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6920 gimple_call_set_lhs (g, simt_vf);
6921 gimple_seq_add_stmt (dlist, g);
6922
6923 tree t = build_int_cst (unsigned_type_node, 1);
6924 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6925 gimple_seq_add_stmt (dlist, g);
6926
6927 t = build_int_cst (unsigned_type_node, 0);
6928 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6929 gimple_seq_add_stmt (dlist, g);
6930
6931 tree body = create_artificial_label (UNKNOWN_LOCATION);
6932 tree header = create_artificial_label (UNKNOWN_LOCATION);
6933 tree end = create_artificial_label (UNKNOWN_LOCATION);
6934 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6935 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6936
6937 gimple_seq_add_seq (dlist, llist[2]);
6938
6939 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6940 gimple_seq_add_stmt (dlist, g);
6941
6942 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6943 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6944 gimple_seq_add_stmt (dlist, g);
6945
6946 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6947 }
6948 for (int i = 0; i < 2; i++)
6949 if (llist[i])
6950 {
6951 tree vf = create_tmp_var (unsigned_type_node);
6952 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6953 gimple_call_set_lhs (g, vf);
6954 gimple_seq *seq = i == 0 ? ilist : dlist;
6955 gimple_seq_add_stmt (seq, g);
6956 tree t = build_int_cst (unsigned_type_node, 0);
6957 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6958 gimple_seq_add_stmt (seq, g);
6959 tree body = create_artificial_label (UNKNOWN_LOCATION);
6960 tree header = create_artificial_label (UNKNOWN_LOCATION);
6961 tree end = create_artificial_label (UNKNOWN_LOCATION);
6962 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6963 gimple_seq_add_stmt (seq, gimple_build_label (body));
6964 gimple_seq_add_seq (seq, llist[i]);
6965 t = build_int_cst (unsigned_type_node, 1);
6966 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6967 gimple_seq_add_stmt (seq, g);
6968 gimple_seq_add_stmt (seq, gimple_build_label (header));
6969 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6970 gimple_seq_add_stmt (seq, g);
6971 gimple_seq_add_stmt (seq, gimple_build_label (end));
6972 }
6973 }
6974 if (sctx.is_simt)
6975 {
6976 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6977 gimple *g
6978 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6979 gimple_seq_add_stmt (dlist, g);
6980 }
6981
6982 /* The copyin sequence is not to be executed by the main thread, since
6983 that would result in self-copies. Perhaps not visible to scalars,
6984 but it certainly is to C++ operator=. */
6985 if (copyin_seq)
6986 {
6987 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6988 0);
6989 x = build2 (NE_EXPR, boolean_type_node, x,
6990 build_int_cst (TREE_TYPE (x), 0));
6991 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6992 gimplify_and_add (x, ilist);
6993 }
6994
6995 /* If any copyin variable is passed by reference, we must ensure the
6996 master thread doesn't modify it before it is copied over in all
6997 threads. Similarly for variables in both firstprivate and
6998 lastprivate clauses we need to ensure the lastprivate copying
6999 happens after firstprivate copying in all threads. And similarly
7000 for UDRs if initializer expression refers to omp_orig. */
7001 if (copyin_by_ref || lastprivate_firstprivate
7002 || (reduction_omp_orig_ref
7003 && !ctx->scan_inclusive
7004 && !ctx->scan_exclusive))
7005 {
7006 /* Don't add any barrier for #pragma omp simd or
7007 #pragma omp distribute. */
7008 if (!is_task_ctx (ctx)
7009 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
7010 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
7011 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
7012 }
7013
7014 /* If max_vf is non-zero, then we can use only a vectorization factor
7015 up to the max_vf we chose. So stick it into the safelen clause. */
7016 if (maybe_ne (sctx.max_vf, 0U))
7017 {
7018 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
7019 OMP_CLAUSE_SAFELEN);
7020 poly_uint64 safe_len;
7021 if (c == NULL_TREE
7022 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
7023 && maybe_gt (safe_len, sctx.max_vf)))
7024 {
7025 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
7026 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
7027 sctx.max_vf);
7028 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
7029 gimple_omp_for_set_clauses (ctx->stmt, c);
7030 }
7031 }
7032 }
7033
7034 /* Create temporary variables for lastprivate(conditional:) implementation
7035 in context CTX with CLAUSES. */
7036
7037 static void
lower_lastprivate_conditional_clauses(tree * clauses,omp_context * ctx)7038 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
7039 {
7040 tree iter_type = NULL_TREE;
7041 tree cond_ptr = NULL_TREE;
7042 tree iter_var = NULL_TREE;
7043 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7044 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
7045 tree next = *clauses;
7046 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
7047 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7048 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
7049 {
7050 if (is_simd)
7051 {
7052 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
7053 gcc_assert (cc);
7054 if (iter_type == NULL_TREE)
7055 {
7056 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7057 iter_var = create_tmp_var_raw (iter_type);
7058 DECL_CONTEXT (iter_var) = current_function_decl;
7059 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7060 DECL_CHAIN (iter_var) = ctx->block_vars;
7061 ctx->block_vars = iter_var;
7062 tree c3
7063 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7064 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7065 OMP_CLAUSE_DECL (c3) = iter_var;
7066 OMP_CLAUSE_CHAIN (c3) = *clauses;
7067 *clauses = c3;
7068 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7069 }
7070 next = OMP_CLAUSE_CHAIN (cc);
7071 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7072 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7073 ctx->lastprivate_conditional_map->put (o, v);
7074 continue;
7075 }
7076 if (iter_type == NULL)
7077 {
7078 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7079 {
7080 struct omp_for_data fd;
7081 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7082 NULL);
7083 iter_type = unsigned_type_for (fd.iter_type);
7084 }
7085 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7086 iter_type = unsigned_type_node;
7087 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7088 if (c2)
7089 {
7090 cond_ptr
7091 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7092 OMP_CLAUSE_DECL (c2) = cond_ptr;
7093 }
7094 else
7095 {
7096 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7097 DECL_CONTEXT (cond_ptr) = current_function_decl;
7098 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7099 DECL_CHAIN (cond_ptr) = ctx->block_vars;
7100 ctx->block_vars = cond_ptr;
7101 c2 = build_omp_clause (UNKNOWN_LOCATION,
7102 OMP_CLAUSE__CONDTEMP_);
7103 OMP_CLAUSE_DECL (c2) = cond_ptr;
7104 OMP_CLAUSE_CHAIN (c2) = *clauses;
7105 *clauses = c2;
7106 }
7107 iter_var = create_tmp_var_raw (iter_type);
7108 DECL_CONTEXT (iter_var) = current_function_decl;
7109 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7110 DECL_CHAIN (iter_var) = ctx->block_vars;
7111 ctx->block_vars = iter_var;
7112 tree c3
7113 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7114 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7115 OMP_CLAUSE_DECL (c3) = iter_var;
7116 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7117 OMP_CLAUSE_CHAIN (c2) = c3;
7118 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7119 }
7120 tree v = create_tmp_var_raw (iter_type);
7121 DECL_CONTEXT (v) = current_function_decl;
7122 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7123 DECL_CHAIN (v) = ctx->block_vars;
7124 ctx->block_vars = v;
7125 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7126 ctx->lastprivate_conditional_map->put (o, v);
7127 }
7128 }
7129
7130
7131 /* Generate code to implement the LASTPRIVATE clauses. This is used for
7132 both parallel and workshare constructs. PREDICATE may be NULL if it's
7133 always true. BODY_P is the sequence to insert early initialization
7134 if needed, STMT_LIST is where the non-conditional lastprivate handling
7135 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7136 section. */
7137
7138 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * body_p,gimple_seq * stmt_list,gimple_seq * cstmt_list,omp_context * ctx)7139 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7140 gimple_seq *stmt_list, gimple_seq *cstmt_list,
7141 omp_context *ctx)
7142 {
7143 tree x, c, label = NULL, orig_clauses = clauses;
7144 bool par_clauses = false;
7145 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7146 unsigned HOST_WIDE_INT conditional_off = 0;
7147 gimple_seq post_stmt_list = NULL;
7148
7149 /* Early exit if there are no lastprivate or linear clauses. */
7150 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7151 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7152 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7153 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7154 break;
7155 if (clauses == NULL)
7156 {
7157 /* If this was a workshare clause, see if it had been combined
7158 with its parallel. In that case, look for the clauses on the
7159 parallel statement itself. */
7160 if (is_parallel_ctx (ctx))
7161 return;
7162
7163 ctx = ctx->outer;
7164 if (ctx == NULL || !is_parallel_ctx (ctx))
7165 return;
7166
7167 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7168 OMP_CLAUSE_LASTPRIVATE);
7169 if (clauses == NULL)
7170 return;
7171 par_clauses = true;
7172 }
7173
7174 bool maybe_simt = false;
7175 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7176 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7177 {
7178 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7179 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7180 if (simduid)
7181 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7182 }
7183
7184 if (predicate)
7185 {
7186 gcond *stmt;
7187 tree label_true, arm1, arm2;
7188 enum tree_code pred_code = TREE_CODE (predicate);
7189
7190 label = create_artificial_label (UNKNOWN_LOCATION);
7191 label_true = create_artificial_label (UNKNOWN_LOCATION);
7192 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7193 {
7194 arm1 = TREE_OPERAND (predicate, 0);
7195 arm2 = TREE_OPERAND (predicate, 1);
7196 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7197 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7198 }
7199 else
7200 {
7201 arm1 = predicate;
7202 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7203 arm2 = boolean_false_node;
7204 pred_code = NE_EXPR;
7205 }
7206 if (maybe_simt)
7207 {
7208 c = build2 (pred_code, boolean_type_node, arm1, arm2);
7209 c = fold_convert (integer_type_node, c);
7210 simtcond = create_tmp_var (integer_type_node);
7211 gimplify_assign (simtcond, c, stmt_list);
7212 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7213 1, simtcond);
7214 c = create_tmp_var (integer_type_node);
7215 gimple_call_set_lhs (g, c);
7216 gimple_seq_add_stmt (stmt_list, g);
7217 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7218 label_true, label);
7219 }
7220 else
7221 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7222 gimple_seq_add_stmt (stmt_list, stmt);
7223 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7224 }
7225
7226 tree cond_ptr = NULL_TREE;
7227 for (c = clauses; c ;)
7228 {
7229 tree var, new_var;
7230 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7231 gimple_seq *this_stmt_list = stmt_list;
7232 tree lab2 = NULL_TREE;
7233
7234 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7235 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7236 && ctx->lastprivate_conditional_map
7237 && !ctx->combined_into_simd_safelen1)
7238 {
7239 gcc_assert (body_p);
7240 if (simduid)
7241 goto next;
7242 if (cond_ptr == NULL_TREE)
7243 {
7244 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7245 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7246 }
7247 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7248 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7249 tree v = *ctx->lastprivate_conditional_map->get (o);
7250 gimplify_assign (v, build_zero_cst (type), body_p);
7251 this_stmt_list = cstmt_list;
7252 tree mem;
7253 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7254 {
7255 mem = build2 (MEM_REF, type, cond_ptr,
7256 build_int_cst (TREE_TYPE (cond_ptr),
7257 conditional_off));
7258 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7259 }
7260 else
7261 mem = build4 (ARRAY_REF, type, cond_ptr,
7262 size_int (conditional_off++), NULL_TREE, NULL_TREE);
7263 tree mem2 = copy_node (mem);
7264 gimple_seq seq = NULL;
7265 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7266 gimple_seq_add_seq (this_stmt_list, seq);
7267 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7268 lab2 = create_artificial_label (UNKNOWN_LOCATION);
7269 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7270 gimple_seq_add_stmt (this_stmt_list, g);
7271 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7272 gimplify_assign (mem2, v, this_stmt_list);
7273 }
7274 else if (predicate
7275 && ctx->combined_into_simd_safelen1
7276 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7277 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7278 && ctx->lastprivate_conditional_map)
7279 this_stmt_list = &post_stmt_list;
7280
7281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7282 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7283 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7284 {
7285 var = OMP_CLAUSE_DECL (c);
7286 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7287 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7288 && is_taskloop_ctx (ctx))
7289 {
7290 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7291 new_var = lookup_decl (var, ctx->outer);
7292 }
7293 else
7294 {
7295 new_var = lookup_decl (var, ctx);
7296 /* Avoid uninitialized warnings for lastprivate and
7297 for linear iterators. */
7298 if (predicate
7299 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7300 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7301 suppress_warning (new_var, OPT_Wuninitialized);
7302 }
7303
7304 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7305 {
7306 tree val = DECL_VALUE_EXPR (new_var);
7307 if (TREE_CODE (val) == ARRAY_REF
7308 && VAR_P (TREE_OPERAND (val, 0))
7309 && lookup_attribute ("omp simd array",
7310 DECL_ATTRIBUTES (TREE_OPERAND (val,
7311 0))))
7312 {
7313 if (lastlane == NULL)
7314 {
7315 lastlane = create_tmp_var (unsigned_type_node);
7316 gcall *g
7317 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7318 2, simduid,
7319 TREE_OPERAND (val, 1));
7320 gimple_call_set_lhs (g, lastlane);
7321 gimple_seq_add_stmt (this_stmt_list, g);
7322 }
7323 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7324 TREE_OPERAND (val, 0), lastlane,
7325 NULL_TREE, NULL_TREE);
7326 TREE_THIS_NOTRAP (new_var) = 1;
7327 }
7328 }
7329 else if (maybe_simt)
7330 {
7331 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7332 ? DECL_VALUE_EXPR (new_var)
7333 : new_var);
7334 if (simtlast == NULL)
7335 {
7336 simtlast = create_tmp_var (unsigned_type_node);
7337 gcall *g = gimple_build_call_internal
7338 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7339 gimple_call_set_lhs (g, simtlast);
7340 gimple_seq_add_stmt (this_stmt_list, g);
7341 }
7342 x = build_call_expr_internal_loc
7343 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7344 TREE_TYPE (val), 2, val, simtlast);
7345 new_var = unshare_expr (new_var);
7346 gimplify_assign (new_var, x, this_stmt_list);
7347 new_var = unshare_expr (new_var);
7348 }
7349
7350 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7351 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7352 {
7353 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7354 gimple_seq_add_seq (this_stmt_list,
7355 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7356 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7357 }
7358 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7359 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7360 {
7361 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7362 gimple_seq_add_seq (this_stmt_list,
7363 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7364 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7365 }
7366
7367 x = NULL_TREE;
7368 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7369 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7370 && is_taskloop_ctx (ctx))
7371 {
7372 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7373 ctx->outer->outer);
7374 if (is_global_var (ovar))
7375 x = ovar;
7376 }
7377 if (!x)
7378 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7379 if (omp_privatize_by_reference (var))
7380 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7381 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7382 gimplify_and_add (x, this_stmt_list);
7383
7384 if (lab2)
7385 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7386 }
7387
7388 next:
7389 c = OMP_CLAUSE_CHAIN (c);
7390 if (c == NULL && !par_clauses)
7391 {
7392 /* If this was a workshare clause, see if it had been combined
7393 with its parallel. In that case, continue looking for the
7394 clauses also on the parallel statement itself. */
7395 if (is_parallel_ctx (ctx))
7396 break;
7397
7398 ctx = ctx->outer;
7399 if (ctx == NULL || !is_parallel_ctx (ctx))
7400 break;
7401
7402 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7403 OMP_CLAUSE_LASTPRIVATE);
7404 par_clauses = true;
7405 }
7406 }
7407
7408 if (label)
7409 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7410 gimple_seq_add_seq (stmt_list, post_stmt_list);
7411 }
7412
7413 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7414 (which might be a placeholder). INNER is true if this is an inner
7415 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7416 join markers. Generate the before-loop forking sequence in
7417 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7418 general form of these sequences is
7419
7420 GOACC_REDUCTION_SETUP
7421 GOACC_FORK
7422 GOACC_REDUCTION_INIT
7423 ...
7424 GOACC_REDUCTION_FINI
7425 GOACC_JOIN
7426 GOACC_REDUCTION_TEARDOWN. */
7427
7428 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * private_marker,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)7429 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7430 gcall *fork, gcall *private_marker, gcall *join,
7431 gimple_seq *fork_seq, gimple_seq *join_seq,
7432 omp_context *ctx)
7433 {
7434 gimple_seq before_fork = NULL;
7435 gimple_seq after_fork = NULL;
7436 gimple_seq before_join = NULL;
7437 gimple_seq after_join = NULL;
7438 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7439 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7440 unsigned offset = 0;
7441
7442 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7443 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7444 {
7445 /* No 'reduction' clauses on OpenACC 'kernels'. */
7446 gcc_checking_assert (!is_oacc_kernels (ctx));
7447 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7448 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7449
7450 tree orig = OMP_CLAUSE_DECL (c);
7451 tree var = maybe_lookup_decl (orig, ctx);
7452 tree ref_to_res = NULL_TREE;
7453 tree incoming, outgoing, v1, v2, v3;
7454 bool is_private = false;
7455
7456 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7457 if (rcode == MINUS_EXPR)
7458 rcode = PLUS_EXPR;
7459 else if (rcode == TRUTH_ANDIF_EXPR)
7460 rcode = BIT_AND_EXPR;
7461 else if (rcode == TRUTH_ORIF_EXPR)
7462 rcode = BIT_IOR_EXPR;
7463 tree op = build_int_cst (unsigned_type_node, rcode);
7464
7465 if (!var)
7466 var = orig;
7467
7468 incoming = outgoing = var;
7469
7470 if (!inner)
7471 {
7472 /* See if an outer construct also reduces this variable. */
7473 omp_context *outer = ctx;
7474
7475 while (omp_context *probe = outer->outer)
7476 {
7477 enum gimple_code type = gimple_code (probe->stmt);
7478 tree cls;
7479
7480 switch (type)
7481 {
7482 case GIMPLE_OMP_FOR:
7483 cls = gimple_omp_for_clauses (probe->stmt);
7484 break;
7485
7486 case GIMPLE_OMP_TARGET:
7487 /* No 'reduction' clauses inside OpenACC 'kernels'
7488 regions. */
7489 gcc_checking_assert (!is_oacc_kernels (probe));
7490
7491 if (!is_gimple_omp_offloaded (probe->stmt))
7492 goto do_lookup;
7493
7494 cls = gimple_omp_target_clauses (probe->stmt);
7495 break;
7496
7497 default:
7498 goto do_lookup;
7499 }
7500
7501 outer = probe;
7502 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7503 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7504 && orig == OMP_CLAUSE_DECL (cls))
7505 {
7506 incoming = outgoing = lookup_decl (orig, probe);
7507 goto has_outer_reduction;
7508 }
7509 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7510 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7511 && orig == OMP_CLAUSE_DECL (cls))
7512 {
7513 is_private = true;
7514 goto do_lookup;
7515 }
7516 }
7517
7518 do_lookup:
7519 /* This is the outermost construct with this reduction,
7520 see if there's a mapping for it. */
7521 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7522 && maybe_lookup_field (orig, outer) && !is_private)
7523 {
7524 ref_to_res = build_receiver_ref (orig, false, outer);
7525 if (omp_privatize_by_reference (orig))
7526 ref_to_res = build_simple_mem_ref (ref_to_res);
7527
7528 tree type = TREE_TYPE (var);
7529 if (POINTER_TYPE_P (type))
7530 type = TREE_TYPE (type);
7531
7532 outgoing = var;
7533 incoming = omp_reduction_init_op (loc, rcode, type);
7534 }
7535 else
7536 {
7537 /* Try to look at enclosing contexts for reduction var,
7538 use original if no mapping found. */
7539 tree t = NULL_TREE;
7540 omp_context *c = ctx->outer;
7541 while (c && !t)
7542 {
7543 t = maybe_lookup_decl (orig, c);
7544 c = c->outer;
7545 }
7546 incoming = outgoing = (t ? t : orig);
7547 }
7548
7549 has_outer_reduction:;
7550 }
7551
7552 if (!ref_to_res)
7553 ref_to_res = integer_zero_node;
7554
7555 if (omp_privatize_by_reference (orig))
7556 {
7557 tree type = TREE_TYPE (var);
7558 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7559
7560 if (!inner)
7561 {
7562 tree x = create_tmp_var (TREE_TYPE (type), id);
7563 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7564 }
7565
7566 v1 = create_tmp_var (type, id);
7567 v2 = create_tmp_var (type, id);
7568 v3 = create_tmp_var (type, id);
7569
7570 gimplify_assign (v1, var, fork_seq);
7571 gimplify_assign (v2, var, fork_seq);
7572 gimplify_assign (v3, var, fork_seq);
7573
7574 var = build_simple_mem_ref (var);
7575 v1 = build_simple_mem_ref (v1);
7576 v2 = build_simple_mem_ref (v2);
7577 v3 = build_simple_mem_ref (v3);
7578 outgoing = build_simple_mem_ref (outgoing);
7579
7580 if (!TREE_CONSTANT (incoming))
7581 incoming = build_simple_mem_ref (incoming);
7582 }
7583 else
7584 /* Note that 'var' might be a mem ref. */
7585 v1 = v2 = v3 = var;
7586
7587 /* Determine position in reduction buffer, which may be used
7588 by target. The parser has ensured that this is not a
7589 variable-sized type. */
7590 fixed_size_mode mode
7591 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7592 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7593 offset = (offset + align - 1) & ~(align - 1);
7594 tree off = build_int_cst (sizetype, offset);
7595 offset += GET_MODE_SIZE (mode);
7596
7597 if (!init_code)
7598 {
7599 init_code = build_int_cst (integer_type_node,
7600 IFN_GOACC_REDUCTION_INIT);
7601 fini_code = build_int_cst (integer_type_node,
7602 IFN_GOACC_REDUCTION_FINI);
7603 setup_code = build_int_cst (integer_type_node,
7604 IFN_GOACC_REDUCTION_SETUP);
7605 teardown_code = build_int_cst (integer_type_node,
7606 IFN_GOACC_REDUCTION_TEARDOWN);
7607 }
7608
7609 tree setup_call
7610 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7611 TREE_TYPE (var), 6, setup_code,
7612 unshare_expr (ref_to_res),
7613 unshare_expr (incoming),
7614 level, op, off);
7615 tree init_call
7616 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7617 TREE_TYPE (var), 6, init_code,
7618 unshare_expr (ref_to_res),
7619 unshare_expr (v1), level, op, off);
7620 tree fini_call
7621 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7622 TREE_TYPE (var), 6, fini_code,
7623 unshare_expr (ref_to_res),
7624 unshare_expr (v2), level, op, off);
7625 tree teardown_call
7626 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7627 TREE_TYPE (var), 6, teardown_code,
7628 ref_to_res, unshare_expr (v3),
7629 level, op, off);
7630
7631 gimplify_assign (unshare_expr (v1), setup_call, &before_fork);
7632 gimplify_assign (unshare_expr (v2), init_call, &after_fork);
7633 gimplify_assign (unshare_expr (v3), fini_call, &before_join);
7634 gimplify_assign (unshare_expr (outgoing), teardown_call, &after_join);
7635 }
7636
7637 /* Now stitch things together. */
7638 gimple_seq_add_seq (fork_seq, before_fork);
7639 if (private_marker)
7640 gimple_seq_add_stmt (fork_seq, private_marker);
7641 if (fork)
7642 gimple_seq_add_stmt (fork_seq, fork);
7643 gimple_seq_add_seq (fork_seq, after_fork);
7644
7645 gimple_seq_add_seq (join_seq, before_join);
7646 if (join)
7647 gimple_seq_add_stmt (join_seq, join);
7648 gimple_seq_add_seq (join_seq, after_join);
7649 }
7650
7651 /* Generate code to implement the REDUCTION clauses, append it
7652 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7653 that should be emitted also inside of the critical section,
7654 in that case clear *CLIST afterwards, otherwise leave it as is
7655 and let the caller emit it itself. */
7656
7657 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,gimple_seq * clist,omp_context * ctx)7658 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7659 gimple_seq *clist, omp_context *ctx)
7660 {
7661 gimple_seq sub_seq = NULL;
7662 gimple *stmt;
7663 tree x, c;
7664 int count = 0;
7665
7666 /* OpenACC loop reductions are handled elsewhere. */
7667 if (is_gimple_omp_oacc (ctx->stmt))
7668 return;
7669
7670 /* SIMD reductions are handled in lower_rec_input_clauses. */
7671 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7672 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7673 return;
7674
7675 /* inscan reductions are handled elsewhere. */
7676 if (ctx->scan_inclusive || ctx->scan_exclusive)
7677 return;
7678
7679 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7680 update in that case, otherwise use a lock. */
7681 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7682 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7683 && !OMP_CLAUSE_REDUCTION_TASK (c))
7684 {
7685 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7686 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7687 {
7688 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7689 count = -1;
7690 break;
7691 }
7692 count++;
7693 }
7694
7695 if (count == 0)
7696 return;
7697
7698 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7699 {
7700 tree var, ref, new_var, orig_var;
7701 enum tree_code code;
7702 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7703
7704 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7705 || OMP_CLAUSE_REDUCTION_TASK (c))
7706 continue;
7707
7708 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7709 orig_var = var = OMP_CLAUSE_DECL (c);
7710 if (TREE_CODE (var) == MEM_REF)
7711 {
7712 var = TREE_OPERAND (var, 0);
7713 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7714 var = TREE_OPERAND (var, 0);
7715 if (TREE_CODE (var) == ADDR_EXPR)
7716 var = TREE_OPERAND (var, 0);
7717 else
7718 {
7719 /* If this is a pointer or referenced based array
7720 section, the var could be private in the outer
7721 context e.g. on orphaned loop construct. Pretend this
7722 is private variable's outer reference. */
7723 ccode = OMP_CLAUSE_PRIVATE;
7724 if (TREE_CODE (var) == INDIRECT_REF)
7725 var = TREE_OPERAND (var, 0);
7726 }
7727 orig_var = var;
7728 if (is_variable_sized (var))
7729 {
7730 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7731 var = DECL_VALUE_EXPR (var);
7732 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7733 var = TREE_OPERAND (var, 0);
7734 gcc_assert (DECL_P (var));
7735 }
7736 }
7737 new_var = lookup_decl (var, ctx);
7738 if (var == OMP_CLAUSE_DECL (c)
7739 && omp_privatize_by_reference (var))
7740 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7741 ref = build_outer_var_ref (var, ctx, ccode);
7742 code = OMP_CLAUSE_REDUCTION_CODE (c);
7743
7744 /* reduction(-:var) sums up the partial results, so it acts
7745 identically to reduction(+:var). */
7746 if (code == MINUS_EXPR)
7747 code = PLUS_EXPR;
7748
7749 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7750 if (count == 1)
7751 {
7752 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7753
7754 addr = save_expr (addr);
7755 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7756 tree new_var2 = new_var;
7757 tree ref2 = ref;
7758 if (is_truth_op)
7759 {
7760 tree zero = build_zero_cst (TREE_TYPE (new_var));
7761 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7762 boolean_type_node, new_var, zero);
7763 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7764 ref, zero);
7765 }
7766 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7767 new_var2);
7768 if (is_truth_op)
7769 x = fold_convert (TREE_TYPE (new_var), x);
7770 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7771 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7772 gimplify_and_add (x, stmt_seqp);
7773 return;
7774 }
7775 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7776 {
7777 tree d = OMP_CLAUSE_DECL (c);
7778 tree type = TREE_TYPE (d);
7779 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7780 tree i = create_tmp_var (TREE_TYPE (v));
7781 tree ptype = build_pointer_type (TREE_TYPE (type));
7782 tree bias = TREE_OPERAND (d, 1);
7783 d = TREE_OPERAND (d, 0);
7784 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7785 {
7786 tree b = TREE_OPERAND (d, 1);
7787 b = maybe_lookup_decl (b, ctx);
7788 if (b == NULL)
7789 {
7790 b = TREE_OPERAND (d, 1);
7791 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7792 }
7793 if (integer_zerop (bias))
7794 bias = b;
7795 else
7796 {
7797 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7798 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7799 TREE_TYPE (b), b, bias);
7800 }
7801 d = TREE_OPERAND (d, 0);
7802 }
7803 /* For ref build_outer_var_ref already performs this, so
7804 only new_var needs a dereference. */
7805 if (TREE_CODE (d) == INDIRECT_REF)
7806 {
7807 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7808 gcc_assert (omp_privatize_by_reference (var)
7809 && var == orig_var);
7810 }
7811 else if (TREE_CODE (d) == ADDR_EXPR)
7812 {
7813 if (orig_var == var)
7814 {
7815 new_var = build_fold_addr_expr (new_var);
7816 ref = build_fold_addr_expr (ref);
7817 }
7818 }
7819 else
7820 {
7821 gcc_assert (orig_var == var);
7822 if (omp_privatize_by_reference (var))
7823 ref = build_fold_addr_expr (ref);
7824 }
7825 if (DECL_P (v))
7826 {
7827 tree t = maybe_lookup_decl (v, ctx);
7828 if (t)
7829 v = t;
7830 else
7831 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7832 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7833 }
7834 if (!integer_zerop (bias))
7835 {
7836 bias = fold_convert_loc (clause_loc, sizetype, bias);
7837 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7838 TREE_TYPE (new_var), new_var,
7839 unshare_expr (bias));
7840 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7841 TREE_TYPE (ref), ref, bias);
7842 }
7843 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7844 ref = fold_convert_loc (clause_loc, ptype, ref);
7845 tree m = create_tmp_var (ptype);
7846 gimplify_assign (m, new_var, stmt_seqp);
7847 new_var = m;
7848 m = create_tmp_var (ptype);
7849 gimplify_assign (m, ref, stmt_seqp);
7850 ref = m;
7851 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7852 tree body = create_artificial_label (UNKNOWN_LOCATION);
7853 tree end = create_artificial_label (UNKNOWN_LOCATION);
7854 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7855 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7856 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7857 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7858 {
7859 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7860 tree decl_placeholder
7861 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7862 SET_DECL_VALUE_EXPR (placeholder, out);
7863 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7864 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7865 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7866 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7867 gimple_seq_add_seq (&sub_seq,
7868 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7869 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7870 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7871 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7872 }
7873 else
7874 {
7875 tree out2 = out;
7876 tree priv2 = priv;
7877 if (is_truth_op)
7878 {
7879 tree zero = build_zero_cst (TREE_TYPE (out));
7880 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7881 boolean_type_node, out, zero);
7882 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7883 boolean_type_node, priv, zero);
7884 }
7885 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7886 if (is_truth_op)
7887 x = fold_convert (TREE_TYPE (out), x);
7888 out = unshare_expr (out);
7889 gimplify_assign (out, x, &sub_seq);
7890 }
7891 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7892 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7893 gimple_seq_add_stmt (&sub_seq, g);
7894 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7895 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7896 gimple_seq_add_stmt (&sub_seq, g);
7897 g = gimple_build_assign (i, PLUS_EXPR, i,
7898 build_int_cst (TREE_TYPE (i), 1));
7899 gimple_seq_add_stmt (&sub_seq, g);
7900 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7901 gimple_seq_add_stmt (&sub_seq, g);
7902 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7903 }
7904 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7905 {
7906 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7907
7908 if (omp_privatize_by_reference (var)
7909 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7910 TREE_TYPE (ref)))
7911 ref = build_fold_addr_expr_loc (clause_loc, ref);
7912 SET_DECL_VALUE_EXPR (placeholder, ref);
7913 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7914 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7915 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7916 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7917 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7918 }
7919 else
7920 {
7921 tree new_var2 = new_var;
7922 tree ref2 = ref;
7923 if (is_truth_op)
7924 {
7925 tree zero = build_zero_cst (TREE_TYPE (new_var));
7926 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7927 boolean_type_node, new_var, zero);
7928 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7929 ref, zero);
7930 }
7931 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7932 if (is_truth_op)
7933 x = fold_convert (TREE_TYPE (new_var), x);
7934 ref = build_outer_var_ref (var, ctx);
7935 gimplify_assign (ref, x, &sub_seq);
7936 }
7937 }
7938
7939 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7940 0);
7941 gimple_seq_add_stmt (stmt_seqp, stmt);
7942
7943 gimple_seq_add_seq (stmt_seqp, sub_seq);
7944
7945 if (clist)
7946 {
7947 gimple_seq_add_seq (stmt_seqp, *clist);
7948 *clist = NULL;
7949 }
7950
7951 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7952 0);
7953 gimple_seq_add_stmt (stmt_seqp, stmt);
7954 }
7955
7956
7957 /* Generate code to implement the COPYPRIVATE clauses. */
7958
7959 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)7960 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7961 omp_context *ctx)
7962 {
7963 tree c;
7964
7965 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7966 {
7967 tree var, new_var, ref, x;
7968 bool by_ref;
7969 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7970
7971 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7972 continue;
7973
7974 var = OMP_CLAUSE_DECL (c);
7975 by_ref = use_pointer_for_field (var, NULL);
7976
7977 ref = build_sender_ref (var, ctx);
7978 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7979 if (by_ref)
7980 {
7981 x = build_fold_addr_expr_loc (clause_loc, new_var);
7982 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7983 }
7984 gimplify_assign (ref, x, slist);
7985
7986 ref = build_receiver_ref (var, false, ctx);
7987 if (by_ref)
7988 {
7989 ref = fold_convert_loc (clause_loc,
7990 build_pointer_type (TREE_TYPE (new_var)),
7991 ref);
7992 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7993 }
7994 if (omp_privatize_by_reference (var))
7995 {
7996 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7997 ref = build_simple_mem_ref_loc (clause_loc, ref);
7998 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7999 }
8000 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
8001 gimplify_and_add (x, rlist);
8002 }
8003 }
8004
8005
8006 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
8007 and REDUCTION from the sender (aka parent) side. */
8008
8009 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)8010 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
8011 omp_context *ctx)
8012 {
8013 tree c, t;
8014 int ignored_looptemp = 0;
8015 bool is_taskloop = false;
8016
8017 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
8018 by GOMP_taskloop. */
8019 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
8020 {
8021 ignored_looptemp = 2;
8022 is_taskloop = true;
8023 }
8024
8025 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8026 {
8027 tree val, ref, x, var;
8028 bool by_ref, do_in = false, do_out = false;
8029 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8030
8031 switch (OMP_CLAUSE_CODE (c))
8032 {
8033 case OMP_CLAUSE_PRIVATE:
8034 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8035 break;
8036 continue;
8037 case OMP_CLAUSE_FIRSTPRIVATE:
8038 case OMP_CLAUSE_COPYIN:
8039 case OMP_CLAUSE_LASTPRIVATE:
8040 case OMP_CLAUSE_IN_REDUCTION:
8041 case OMP_CLAUSE__REDUCTEMP_:
8042 break;
8043 case OMP_CLAUSE_REDUCTION:
8044 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
8045 continue;
8046 break;
8047 case OMP_CLAUSE_SHARED:
8048 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8049 break;
8050 continue;
8051 case OMP_CLAUSE__LOOPTEMP_:
8052 if (ignored_looptemp)
8053 {
8054 ignored_looptemp--;
8055 continue;
8056 }
8057 break;
8058 default:
8059 continue;
8060 }
8061
8062 val = OMP_CLAUSE_DECL (c);
8063 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8064 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8065 && TREE_CODE (val) == MEM_REF)
8066 {
8067 val = TREE_OPERAND (val, 0);
8068 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8069 val = TREE_OPERAND (val, 0);
8070 if (TREE_CODE (val) == INDIRECT_REF
8071 || TREE_CODE (val) == ADDR_EXPR)
8072 val = TREE_OPERAND (val, 0);
8073 if (is_variable_sized (val))
8074 continue;
8075 }
8076
8077 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8078 outer taskloop region. */
8079 omp_context *ctx_for_o = ctx;
8080 if (is_taskloop
8081 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8082 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8083 ctx_for_o = ctx->outer;
8084
8085 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8086
8087 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8088 && is_global_var (var)
8089 && (val == OMP_CLAUSE_DECL (c)
8090 || !is_task_ctx (ctx)
8091 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8092 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8093 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8094 != POINTER_TYPE)))))
8095 continue;
8096
8097 t = omp_member_access_dummy_var (var);
8098 if (t)
8099 {
8100 var = DECL_VALUE_EXPR (var);
8101 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8102 if (o != t)
8103 var = unshare_and_remap (var, t, o);
8104 else
8105 var = unshare_expr (var);
8106 }
8107
8108 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8109 {
8110 /* Handle taskloop firstprivate/lastprivate, where the
8111 lastprivate on GIMPLE_OMP_TASK is represented as
8112 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
8113 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8114 x = omp_build_component_ref (ctx->sender_decl, f);
8115 if (use_pointer_for_field (val, ctx))
8116 var = build_fold_addr_expr (var);
8117 gimplify_assign (x, var, ilist);
8118 DECL_ABSTRACT_ORIGIN (f) = NULL;
8119 continue;
8120 }
8121
8122 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8123 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8124 || val == OMP_CLAUSE_DECL (c))
8125 && is_variable_sized (val))
8126 continue;
8127 by_ref = use_pointer_for_field (val, NULL);
8128
8129 switch (OMP_CLAUSE_CODE (c))
8130 {
8131 case OMP_CLAUSE_FIRSTPRIVATE:
8132 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8133 && !by_ref
8134 && is_task_ctx (ctx))
8135 suppress_warning (var);
8136 do_in = true;
8137 break;
8138
8139 case OMP_CLAUSE_PRIVATE:
8140 case OMP_CLAUSE_COPYIN:
8141 case OMP_CLAUSE__LOOPTEMP_:
8142 case OMP_CLAUSE__REDUCTEMP_:
8143 do_in = true;
8144 break;
8145
8146 case OMP_CLAUSE_LASTPRIVATE:
8147 if (by_ref || omp_privatize_by_reference (val))
8148 {
8149 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8150 continue;
8151 do_in = true;
8152 }
8153 else
8154 {
8155 do_out = true;
8156 if (lang_hooks.decls.omp_private_outer_ref (val))
8157 do_in = true;
8158 }
8159 break;
8160
8161 case OMP_CLAUSE_REDUCTION:
8162 case OMP_CLAUSE_IN_REDUCTION:
8163 do_in = true;
8164 if (val == OMP_CLAUSE_DECL (c))
8165 {
8166 if (is_task_ctx (ctx))
8167 by_ref = use_pointer_for_field (val, ctx);
8168 else
8169 do_out = !(by_ref || omp_privatize_by_reference (val));
8170 }
8171 else
8172 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8173 break;
8174
8175 default:
8176 gcc_unreachable ();
8177 }
8178
8179 if (do_in)
8180 {
8181 ref = build_sender_ref (val, ctx);
8182 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8183 gimplify_assign (ref, x, ilist);
8184 if (is_task_ctx (ctx))
8185 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8186 }
8187
8188 if (do_out)
8189 {
8190 ref = build_sender_ref (val, ctx);
8191 gimplify_assign (var, ref, olist);
8192 }
8193 }
8194 }
8195
8196 /* Generate code to implement SHARED from the sender (aka parent)
8197 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8198 list things that got automatically shared. */
8199
8200 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)8201 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8202 {
8203 tree var, ovar, nvar, t, f, x, record_type;
8204
8205 if (ctx->record_type == NULL)
8206 return;
8207
8208 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8209 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8210 {
8211 ovar = DECL_ABSTRACT_ORIGIN (f);
8212 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8213 continue;
8214
8215 nvar = maybe_lookup_decl (ovar, ctx);
8216 if (!nvar
8217 || !DECL_HAS_VALUE_EXPR_P (nvar)
8218 || (ctx->allocate_map
8219 && ctx->allocate_map->get (ovar)))
8220 continue;
8221
8222 /* If CTX is a nested parallel directive. Find the immediately
8223 enclosing parallel or workshare construct that contains a
8224 mapping for OVAR. */
8225 var = lookup_decl_in_outer_ctx (ovar, ctx);
8226
8227 t = omp_member_access_dummy_var (var);
8228 if (t)
8229 {
8230 var = DECL_VALUE_EXPR (var);
8231 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8232 if (o != t)
8233 var = unshare_and_remap (var, t, o);
8234 else
8235 var = unshare_expr (var);
8236 }
8237
8238 if (use_pointer_for_field (ovar, ctx))
8239 {
8240 x = build_sender_ref (ovar, ctx);
8241 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8242 && TREE_TYPE (f) == TREE_TYPE (ovar))
8243 {
8244 gcc_assert (is_parallel_ctx (ctx)
8245 && DECL_ARTIFICIAL (ovar));
8246 /* _condtemp_ clause. */
8247 var = build_constructor (TREE_TYPE (x), NULL);
8248 }
8249 else
8250 var = build_fold_addr_expr (var);
8251 gimplify_assign (x, var, ilist);
8252 }
8253 else
8254 {
8255 x = build_sender_ref (ovar, ctx);
8256 gimplify_assign (x, var, ilist);
8257
8258 if (!TREE_READONLY (var)
8259 /* We don't need to receive a new reference to a result
8260 or parm decl. In fact we may not store to it as we will
8261 invalidate any pending RSO and generate wrong gimple
8262 during inlining. */
8263 && !((TREE_CODE (var) == RESULT_DECL
8264 || TREE_CODE (var) == PARM_DECL)
8265 && DECL_BY_REFERENCE (var)))
8266 {
8267 x = build_sender_ref (ovar, ctx);
8268 gimplify_assign (var, x, olist);
8269 }
8270 }
8271 }
8272 }
8273
8274 /* Emit an OpenACC head marker call, encapulating the partitioning and
8275 other information that must be processed by the target compiler.
8276 Return the maximum number of dimensions the associated loop might
8277 be partitioned over. */
8278
8279 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)8280 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8281 gimple_seq *seq, omp_context *ctx)
8282 {
8283 unsigned levels = 0;
8284 unsigned tag = 0;
8285 tree gang_static = NULL_TREE;
8286 auto_vec<tree, 5> args;
8287
8288 args.quick_push (build_int_cst
8289 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8290 args.quick_push (ddvar);
8291 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8292 {
8293 switch (OMP_CLAUSE_CODE (c))
8294 {
8295 case OMP_CLAUSE_GANG:
8296 tag |= OLF_DIM_GANG;
8297 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8298 /* static:* is represented by -1, and we can ignore it, as
8299 scheduling is always static. */
8300 if (gang_static && integer_minus_onep (gang_static))
8301 gang_static = NULL_TREE;
8302 levels++;
8303 break;
8304
8305 case OMP_CLAUSE_WORKER:
8306 tag |= OLF_DIM_WORKER;
8307 levels++;
8308 break;
8309
8310 case OMP_CLAUSE_VECTOR:
8311 tag |= OLF_DIM_VECTOR;
8312 levels++;
8313 break;
8314
8315 case OMP_CLAUSE_SEQ:
8316 tag |= OLF_SEQ;
8317 break;
8318
8319 case OMP_CLAUSE_AUTO:
8320 tag |= OLF_AUTO;
8321 break;
8322
8323 case OMP_CLAUSE_INDEPENDENT:
8324 tag |= OLF_INDEPENDENT;
8325 break;
8326
8327 case OMP_CLAUSE_TILE:
8328 tag |= OLF_TILE;
8329 break;
8330
8331 case OMP_CLAUSE_REDUCTION:
8332 tag |= OLF_REDUCTION;
8333 break;
8334
8335 default:
8336 continue;
8337 }
8338 }
8339
8340 if (gang_static)
8341 {
8342 if (DECL_P (gang_static))
8343 gang_static = build_outer_var_ref (gang_static, ctx);
8344 tag |= OLF_GANG_STATIC;
8345 }
8346
8347 omp_context *tgt = enclosing_target_ctx (ctx);
8348 if (!tgt || is_oacc_parallel_or_serial (tgt))
8349 ;
8350 else if (is_oacc_kernels (tgt))
8351 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8352 gcc_unreachable ();
8353 else if (is_oacc_kernels_decomposed_part (tgt))
8354 ;
8355 else
8356 gcc_unreachable ();
8357
8358 /* In a parallel region, loops are implicitly INDEPENDENT. */
8359 if (!tgt || is_oacc_parallel_or_serial (tgt))
8360 tag |= OLF_INDEPENDENT;
8361
8362 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8363 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8364 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8365 {
8366 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8367 gcc_assert (!(tag & OLF_AUTO));
8368 }
8369
8370 if (tag & OLF_TILE)
8371 /* Tiling could use all 3 levels. */
8372 levels = 3;
8373 else
8374 {
8375 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8376 Ensure at least one level, or 2 for possible auto
8377 partitioning */
8378 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8379 << OLF_DIM_BASE) | OLF_SEQ));
8380
8381 if (levels < 1u + maybe_auto)
8382 levels = 1u + maybe_auto;
8383 }
8384
8385 args.quick_push (build_int_cst (integer_type_node, levels));
8386 args.quick_push (build_int_cst (integer_type_node, tag));
8387 if (gang_static)
8388 args.quick_push (gang_static);
8389
8390 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8391 gimple_set_location (call, loc);
8392 gimple_set_lhs (call, ddvar);
8393 gimple_seq_add_stmt (seq, call);
8394
8395 return levels;
8396 }
8397
8398 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8399 partitioning level of the enclosed region. */
8400
8401 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)8402 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8403 tree tofollow, gimple_seq *seq)
8404 {
8405 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8406 : IFN_UNIQUE_OACC_TAIL_MARK);
8407 tree marker = build_int_cst (integer_type_node, marker_kind);
8408 int nargs = 2 + (tofollow != NULL_TREE);
8409 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8410 marker, ddvar, tofollow);
8411 gimple_set_location (call, loc);
8412 gimple_set_lhs (call, ddvar);
8413 gimple_seq_add_stmt (seq, call);
8414 }
8415
8416 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8417 the loop clauses, from which we extract reductions. Initialize
8418 HEAD and TAIL. */
8419
8420 static void
lower_oacc_head_tail(location_t loc,tree clauses,gcall * private_marker,gimple_seq * head,gimple_seq * tail,omp_context * ctx)8421 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8422 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8423 {
8424 bool inner = false;
8425 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8426 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8427
8428 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8429
8430 if (private_marker)
8431 {
8432 gimple_set_location (private_marker, loc);
8433 gimple_call_set_lhs (private_marker, ddvar);
8434 gimple_call_set_arg (private_marker, 1, ddvar);
8435 }
8436
8437 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8438 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8439
8440 gcc_assert (count);
8441 for (unsigned done = 1; count; count--, done++)
8442 {
8443 gimple_seq fork_seq = NULL;
8444 gimple_seq join_seq = NULL;
8445
8446 tree place = build_int_cst (integer_type_node, -1);
8447 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8448 fork_kind, ddvar, place);
8449 gimple_set_location (fork, loc);
8450 gimple_set_lhs (fork, ddvar);
8451
8452 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8453 join_kind, ddvar, place);
8454 gimple_set_location (join, loc);
8455 gimple_set_lhs (join, ddvar);
8456
8457 /* Mark the beginning of this level sequence. */
8458 if (inner)
8459 lower_oacc_loop_marker (loc, ddvar, true,
8460 build_int_cst (integer_type_node, count),
8461 &fork_seq);
8462 lower_oacc_loop_marker (loc, ddvar, false,
8463 build_int_cst (integer_type_node, done),
8464 &join_seq);
8465
8466 lower_oacc_reductions (loc, clauses, place, inner,
8467 fork, (count == 1) ? private_marker : NULL,
8468 join, &fork_seq, &join_seq, ctx);
8469
8470 /* Append this level to head. */
8471 gimple_seq_add_seq (head, fork_seq);
8472 /* Prepend it to tail. */
8473 gimple_seq_add_seq (&join_seq, *tail);
8474 *tail = join_seq;
8475
8476 inner = true;
8477 }
8478
8479 /* Mark the end of the sequence. */
8480 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8481 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8482 }
8483
8484 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8485 catch handler and return it. This prevents programs from violating the
8486 structured block semantics with throws. */
8487
8488 static gimple_seq
maybe_catch_exception(gimple_seq body)8489 maybe_catch_exception (gimple_seq body)
8490 {
8491 gimple *g;
8492 tree decl;
8493
8494 if (!flag_exceptions)
8495 return body;
8496
8497 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8498 decl = lang_hooks.eh_protect_cleanup_actions ();
8499 else
8500 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8501
8502 g = gimple_build_eh_must_not_throw (decl);
8503 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8504 GIMPLE_TRY_CATCH);
8505
8506 return gimple_seq_alloc_with_stmt (g);
8507 }
8508
8509
8510 /* Routines to lower OMP directives into OMP-GIMPLE. */
8511
8512 /* If ctx is a worksharing context inside of a cancellable parallel
8513 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8514 and conditional branch to parallel's cancel_label to handle
8515 cancellation in the implicit barrier. */
8516
8517 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple * omp_return,gimple_seq * body)8518 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8519 gimple_seq *body)
8520 {
8521 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8522 if (gimple_omp_return_nowait_p (omp_return))
8523 return;
8524 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8525 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8526 && outer->cancellable)
8527 {
8528 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8529 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8530 tree lhs = create_tmp_var (c_bool_type);
8531 gimple_omp_return_set_lhs (omp_return, lhs);
8532 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8533 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8534 fold_convert (c_bool_type,
8535 boolean_false_node),
8536 outer->cancel_label, fallthru_label);
8537 gimple_seq_add_stmt (body, g);
8538 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8539 }
8540 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8541 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8542 return;
8543 }
8544
8545 /* Find the first task_reduction or reduction clause or return NULL
8546 if there are none. */
8547
8548 static inline tree
omp_task_reductions_find_first(tree clauses,enum tree_code code,enum omp_clause_code ccode)8549 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8550 enum omp_clause_code ccode)
8551 {
8552 while (1)
8553 {
8554 clauses = omp_find_clause (clauses, ccode);
8555 if (clauses == NULL_TREE)
8556 return NULL_TREE;
8557 if (ccode != OMP_CLAUSE_REDUCTION
8558 || code == OMP_TASKLOOP
8559 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8560 return clauses;
8561 clauses = OMP_CLAUSE_CHAIN (clauses);
8562 }
8563 }
8564
8565 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8566 gimple_seq *, gimple_seq *);
8567
8568 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8569 CTX is the enclosing OMP context for the current statement. */
8570
8571 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)8572 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8573 {
8574 tree block, control;
8575 gimple_stmt_iterator tgsi;
8576 gomp_sections *stmt;
8577 gimple *t;
8578 gbind *new_stmt, *bind;
8579 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8580
8581 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8582
8583 push_gimplify_context ();
8584
8585 dlist = NULL;
8586 ilist = NULL;
8587
8588 tree rclauses
8589 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8590 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8591 tree rtmp = NULL_TREE;
8592 if (rclauses)
8593 {
8594 tree type = build_pointer_type (pointer_sized_int_node);
8595 tree temp = create_tmp_var (type);
8596 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8597 OMP_CLAUSE_DECL (c) = temp;
8598 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8599 gimple_omp_sections_set_clauses (stmt, c);
8600 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8601 gimple_omp_sections_clauses (stmt),
8602 &ilist, &tred_dlist);
8603 rclauses = c;
8604 rtmp = make_ssa_name (type);
8605 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8606 }
8607
8608 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8609 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8610
8611 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8612 &ilist, &dlist, ctx, NULL);
8613
8614 control = create_tmp_var (unsigned_type_node, ".section");
8615 gimple_omp_sections_set_control (stmt, control);
8616
8617 new_body = gimple_omp_body (stmt);
8618 gimple_omp_set_body (stmt, NULL);
8619 tgsi = gsi_start (new_body);
8620 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8621 {
8622 omp_context *sctx;
8623 gimple *sec_start;
8624
8625 sec_start = gsi_stmt (tgsi);
8626 sctx = maybe_lookup_ctx (sec_start);
8627 gcc_assert (sctx);
8628
8629 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8630 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8631 GSI_CONTINUE_LINKING);
8632 gimple_omp_set_body (sec_start, NULL);
8633
8634 if (gsi_one_before_end_p (tgsi))
8635 {
8636 gimple_seq l = NULL;
8637 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8638 &ilist, &l, &clist, ctx);
8639 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8640 gimple_omp_section_set_last (sec_start);
8641 }
8642
8643 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8644 GSI_CONTINUE_LINKING);
8645 }
8646
8647 block = make_node (BLOCK);
8648 bind = gimple_build_bind (NULL, new_body, block);
8649
8650 olist = NULL;
8651 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8652 &clist, ctx);
8653 if (clist)
8654 {
8655 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8656 gcall *g = gimple_build_call (fndecl, 0);
8657 gimple_seq_add_stmt (&olist, g);
8658 gimple_seq_add_seq (&olist, clist);
8659 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8660 g = gimple_build_call (fndecl, 0);
8661 gimple_seq_add_stmt (&olist, g);
8662 }
8663
8664 block = make_node (BLOCK);
8665 new_stmt = gimple_build_bind (NULL, NULL, block);
8666 gsi_replace (gsi_p, new_stmt, true);
8667
8668 pop_gimplify_context (new_stmt);
8669 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8670 BLOCK_VARS (block) = gimple_bind_vars (bind);
8671 if (BLOCK_VARS (block))
8672 TREE_USED (block) = 1;
8673
8674 new_body = NULL;
8675 gimple_seq_add_seq (&new_body, ilist);
8676 gimple_seq_add_stmt (&new_body, stmt);
8677 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8678 gimple_seq_add_stmt (&new_body, bind);
8679
8680 t = gimple_build_omp_continue (control, control);
8681 gimple_seq_add_stmt (&new_body, t);
8682
8683 gimple_seq_add_seq (&new_body, olist);
8684 if (ctx->cancellable)
8685 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8686 gimple_seq_add_seq (&new_body, dlist);
8687
8688 new_body = maybe_catch_exception (new_body);
8689
8690 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8691 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8692 t = gimple_build_omp_return (nowait);
8693 gimple_seq_add_stmt (&new_body, t);
8694 gimple_seq_add_seq (&new_body, tred_dlist);
8695 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8696
8697 if (rclauses)
8698 OMP_CLAUSE_DECL (rclauses) = rtmp;
8699
8700 gimple_bind_set_body (new_stmt, new_body);
8701 }
8702
8703
8704 /* A subroutine of lower_omp_single. Expand the simple form of
8705 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8706
8707 if (GOMP_single_start ())
8708 BODY;
8709 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8710
8711 FIXME. It may be better to delay expanding the logic of this until
8712 pass_expand_omp. The expanded logic may make the job more difficult
8713 to a synchronization analysis pass. */
8714
8715 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)8716 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8717 {
8718 location_t loc = gimple_location (single_stmt);
8719 tree tlabel = create_artificial_label (loc);
8720 tree flabel = create_artificial_label (loc);
8721 gimple *call, *cond;
8722 tree lhs, decl;
8723
8724 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8725 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8726 call = gimple_build_call (decl, 0);
8727 gimple_call_set_lhs (call, lhs);
8728 gimple_seq_add_stmt (pre_p, call);
8729
8730 cond = gimple_build_cond (EQ_EXPR, lhs,
8731 fold_convert_loc (loc, TREE_TYPE (lhs),
8732 boolean_true_node),
8733 tlabel, flabel);
8734 gimple_seq_add_stmt (pre_p, cond);
8735 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8736 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8737 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8738 }
8739
8740
8741 /* A subroutine of lower_omp_single. Expand the simple form of
8742 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8743
8744 #pragma omp single copyprivate (a, b, c)
8745
8746 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8747
8748 {
8749 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8750 {
8751 BODY;
8752 copyout.a = a;
8753 copyout.b = b;
8754 copyout.c = c;
8755 GOMP_single_copy_end (©out);
8756 }
8757 else
8758 {
8759 a = copyout_p->a;
8760 b = copyout_p->b;
8761 c = copyout_p->c;
8762 }
8763 GOMP_barrier ();
8764 }
8765
8766 FIXME. It may be better to delay expanding the logic of this until
8767 pass_expand_omp. The expanded logic may make the job more difficult
8768 to a synchronization analysis pass. */
8769
8770 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)8771 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8772 omp_context *ctx)
8773 {
8774 tree ptr_type, t, l0, l1, l2, bfn_decl;
8775 gimple_seq copyin_seq;
8776 location_t loc = gimple_location (single_stmt);
8777
8778 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8779
8780 ptr_type = build_pointer_type (ctx->record_type);
8781 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8782
8783 l0 = create_artificial_label (loc);
8784 l1 = create_artificial_label (loc);
8785 l2 = create_artificial_label (loc);
8786
8787 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8788 t = build_call_expr_loc (loc, bfn_decl, 0);
8789 t = fold_convert_loc (loc, ptr_type, t);
8790 gimplify_assign (ctx->receiver_decl, t, pre_p);
8791
8792 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8793 build_int_cst (ptr_type, 0));
8794 t = build3 (COND_EXPR, void_type_node, t,
8795 build_and_jump (&l0), build_and_jump (&l1));
8796 gimplify_and_add (t, pre_p);
8797
8798 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8799
8800 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8801
8802 copyin_seq = NULL;
8803 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8804 ©in_seq, ctx);
8805
8806 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8807 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8808 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8809 gimplify_and_add (t, pre_p);
8810
8811 t = build_and_jump (&l2);
8812 gimplify_and_add (t, pre_p);
8813
8814 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8815
8816 gimple_seq_add_seq (pre_p, copyin_seq);
8817
8818 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8819 }
8820
8821
8822 /* Expand code for an OpenMP single directive. */
8823
8824 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)8825 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8826 {
8827 tree block;
8828 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8829 gbind *bind;
8830 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8831
8832 push_gimplify_context ();
8833
8834 block = make_node (BLOCK);
8835 bind = gimple_build_bind (NULL, NULL, block);
8836 gsi_replace (gsi_p, bind, true);
8837 bind_body = NULL;
8838 dlist = NULL;
8839 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8840 &bind_body, &dlist, ctx, NULL);
8841 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8842
8843 gimple_seq_add_stmt (&bind_body, single_stmt);
8844
8845 if (ctx->record_type)
8846 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8847 else
8848 lower_omp_single_simple (single_stmt, &bind_body);
8849
8850 gimple_omp_set_body (single_stmt, NULL);
8851
8852 gimple_seq_add_seq (&bind_body, dlist);
8853
8854 bind_body = maybe_catch_exception (bind_body);
8855
8856 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8857 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8858 gimple *g = gimple_build_omp_return (nowait);
8859 gimple_seq_add_stmt (&bind_body_tail, g);
8860 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8861 if (ctx->record_type)
8862 {
8863 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8864 tree clobber = build_clobber (ctx->record_type);
8865 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8866 clobber), GSI_SAME_STMT);
8867 }
8868 gimple_seq_add_seq (&bind_body, bind_body_tail);
8869 gimple_bind_set_body (bind, bind_body);
8870
8871 pop_gimplify_context (bind);
8872
8873 gimple_bind_append_vars (bind, ctx->block_vars);
8874 BLOCK_VARS (block) = ctx->block_vars;
8875 if (BLOCK_VARS (block))
8876 TREE_USED (block) = 1;
8877 }
8878
8879
8880 /* Lower code for an OMP scope directive. */
8881
8882 static void
lower_omp_scope(gimple_stmt_iterator * gsi_p,omp_context * ctx)8883 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8884 {
8885 tree block;
8886 gimple *scope_stmt = gsi_stmt (*gsi_p);
8887 gbind *bind;
8888 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8889 gimple_seq tred_dlist = NULL;
8890
8891 push_gimplify_context ();
8892
8893 block = make_node (BLOCK);
8894 bind = gimple_build_bind (NULL, NULL, block);
8895 gsi_replace (gsi_p, bind, true);
8896 bind_body = NULL;
8897 dlist = NULL;
8898
8899 tree rclauses
8900 = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8901 OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8902 if (rclauses)
8903 {
8904 tree type = build_pointer_type (pointer_sized_int_node);
8905 tree temp = create_tmp_var (type);
8906 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8907 OMP_CLAUSE_DECL (c) = temp;
8908 OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8909 gimple_omp_scope_set_clauses (scope_stmt, c);
8910 lower_omp_task_reductions (ctx, OMP_SCOPE,
8911 gimple_omp_scope_clauses (scope_stmt),
8912 &bind_body, &tred_dlist);
8913 rclauses = c;
8914 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8915 gimple *stmt = gimple_build_call (fndecl, 1, temp);
8916 gimple_seq_add_stmt (&bind_body, stmt);
8917 }
8918
8919 lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8920 &bind_body, &dlist, ctx, NULL);
8921 lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8922
8923 gimple_seq_add_stmt (&bind_body, scope_stmt);
8924
8925 gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8926
8927 gimple_omp_set_body (scope_stmt, NULL);
8928
8929 gimple_seq clist = NULL;
8930 lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8931 &bind_body, &clist, ctx);
8932 if (clist)
8933 {
8934 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8935 gcall *g = gimple_build_call (fndecl, 0);
8936 gimple_seq_add_stmt (&bind_body, g);
8937 gimple_seq_add_seq (&bind_body, clist);
8938 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8939 g = gimple_build_call (fndecl, 0);
8940 gimple_seq_add_stmt (&bind_body, g);
8941 }
8942
8943 gimple_seq_add_seq (&bind_body, dlist);
8944
8945 bind_body = maybe_catch_exception (bind_body);
8946
8947 bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8948 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8949 gimple *g = gimple_build_omp_return (nowait);
8950 gimple_seq_add_stmt (&bind_body_tail, g);
8951 gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8952 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8953 if (ctx->record_type)
8954 {
8955 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8956 tree clobber = build_clobber (ctx->record_type);
8957 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8958 clobber), GSI_SAME_STMT);
8959 }
8960 gimple_seq_add_seq (&bind_body, bind_body_tail);
8961
8962 gimple_bind_set_body (bind, bind_body);
8963
8964 pop_gimplify_context (bind);
8965
8966 gimple_bind_append_vars (bind, ctx->block_vars);
8967 BLOCK_VARS (block) = ctx->block_vars;
8968 if (BLOCK_VARS (block))
8969 TREE_USED (block) = 1;
8970 }
8971 /* Expand code for an OpenMP master or masked directive. */
8972
8973 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)8974 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8975 {
8976 tree block, lab = NULL, x, bfn_decl;
8977 gimple *stmt = gsi_stmt (*gsi_p);
8978 gbind *bind;
8979 location_t loc = gimple_location (stmt);
8980 gimple_seq tseq;
8981 tree filter = integer_zero_node;
8982
8983 push_gimplify_context ();
8984
8985 if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8986 {
8987 filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8988 OMP_CLAUSE_FILTER);
8989 if (filter)
8990 filter = fold_convert (integer_type_node,
8991 OMP_CLAUSE_FILTER_EXPR (filter));
8992 else
8993 filter = integer_zero_node;
8994 }
8995 block = make_node (BLOCK);
8996 bind = gimple_build_bind (NULL, NULL, block);
8997 gsi_replace (gsi_p, bind, true);
8998 gimple_bind_add_stmt (bind, stmt);
8999
9000 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9001 x = build_call_expr_loc (loc, bfn_decl, 0);
9002 x = build2 (EQ_EXPR, boolean_type_node, x, filter);
9003 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
9004 tseq = NULL;
9005 gimplify_and_add (x, &tseq);
9006 gimple_bind_add_seq (bind, tseq);
9007
9008 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9009 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9010 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9011 gimple_omp_set_body (stmt, NULL);
9012
9013 gimple_bind_add_stmt (bind, gimple_build_label (lab));
9014
9015 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9016
9017 pop_gimplify_context (bind);
9018
9019 gimple_bind_append_vars (bind, ctx->block_vars);
9020 BLOCK_VARS (block) = ctx->block_vars;
9021 }
9022
9023 /* Helper function for lower_omp_task_reductions. For a specific PASS
9024 find out the current clause it should be processed, or return false
9025 if all have been processed already. */
9026
9027 static inline bool
omp_task_reduction_iterate(int pass,enum tree_code code,enum omp_clause_code ccode,tree * c,tree * decl,tree * type,tree * next)9028 omp_task_reduction_iterate (int pass, enum tree_code code,
9029 enum omp_clause_code ccode, tree *c, tree *decl,
9030 tree *type, tree *next)
9031 {
9032 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
9033 {
9034 if (ccode == OMP_CLAUSE_REDUCTION
9035 && code != OMP_TASKLOOP
9036 && !OMP_CLAUSE_REDUCTION_TASK (*c))
9037 continue;
9038 *decl = OMP_CLAUSE_DECL (*c);
9039 *type = TREE_TYPE (*decl);
9040 if (TREE_CODE (*decl) == MEM_REF)
9041 {
9042 if (pass != 1)
9043 continue;
9044 }
9045 else
9046 {
9047 if (omp_privatize_by_reference (*decl))
9048 *type = TREE_TYPE (*type);
9049 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
9050 continue;
9051 }
9052 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
9053 return true;
9054 }
9055 *decl = NULL_TREE;
9056 *type = NULL_TREE;
9057 *next = NULL_TREE;
9058 return false;
9059 }
9060
9061 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9062 OMP_TASKGROUP only with task modifier). Register mapping of those in
9063 START sequence and reducing them and unregister them in the END sequence. */
9064
9065 static void
lower_omp_task_reductions(omp_context * ctx,enum tree_code code,tree clauses,gimple_seq * start,gimple_seq * end)9066 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9067 gimple_seq *start, gimple_seq *end)
9068 {
9069 enum omp_clause_code ccode
9070 = (code == OMP_TASKGROUP
9071 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9072 tree cancellable = NULL_TREE;
9073 clauses = omp_task_reductions_find_first (clauses, code, ccode);
9074 if (clauses == NULL_TREE)
9075 return;
9076 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9077 {
9078 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9079 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9080 && outer->cancellable)
9081 {
9082 cancellable = error_mark_node;
9083 break;
9084 }
9085 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9086 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9087 break;
9088 }
9089 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9090 tree *last = &TYPE_FIELDS (record_type);
9091 unsigned cnt = 0;
9092 if (cancellable)
9093 {
9094 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9095 ptr_type_node);
9096 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9097 integer_type_node);
9098 *last = field;
9099 DECL_CHAIN (field) = ifield;
9100 last = &DECL_CHAIN (ifield);
9101 DECL_CONTEXT (field) = record_type;
9102 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9103 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9104 DECL_CONTEXT (ifield) = record_type;
9105 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9106 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9107 }
9108 for (int pass = 0; pass < 2; pass++)
9109 {
9110 tree decl, type, next;
9111 for (tree c = clauses;
9112 omp_task_reduction_iterate (pass, code, ccode,
9113 &c, &decl, &type, &next); c = next)
9114 {
9115 ++cnt;
9116 tree new_type = type;
9117 if (ctx->outer)
9118 new_type = remap_type (type, &ctx->outer->cb);
9119 tree field
9120 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9121 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9122 new_type);
9123 if (DECL_P (decl) && type == TREE_TYPE (decl))
9124 {
9125 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9126 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9127 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9128 }
9129 else
9130 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9131 DECL_CONTEXT (field) = record_type;
9132 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9133 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9134 *last = field;
9135 last = &DECL_CHAIN (field);
9136 tree bfield
9137 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9138 boolean_type_node);
9139 DECL_CONTEXT (bfield) = record_type;
9140 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9141 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9142 *last = bfield;
9143 last = &DECL_CHAIN (bfield);
9144 }
9145 }
9146 *last = NULL_TREE;
9147 layout_type (record_type);
9148
9149 /* Build up an array which registers with the runtime all the reductions
9150 and deregisters them at the end. Format documented in libgomp/task.c. */
9151 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9152 tree avar = create_tmp_var_raw (atype);
9153 gimple_add_tmp_var (avar);
9154 TREE_ADDRESSABLE (avar) = 1;
9155 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9156 NULL_TREE, NULL_TREE);
9157 tree t = build_int_cst (pointer_sized_int_node, cnt);
9158 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9159 gimple_seq seq = NULL;
9160 tree sz = fold_convert (pointer_sized_int_node,
9161 TYPE_SIZE_UNIT (record_type));
9162 int cachesz = 64;
9163 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9164 build_int_cst (pointer_sized_int_node, cachesz - 1));
9165 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9166 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9167 ctx->task_reductions.create (1 + cnt);
9168 ctx->task_reduction_map = new hash_map<tree, unsigned>;
9169 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9170 ? sz : NULL_TREE);
9171 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9172 gimple_seq_add_seq (start, seq);
9173 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9174 NULL_TREE, NULL_TREE);
9175 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9176 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9177 NULL_TREE, NULL_TREE);
9178 t = build_int_cst (pointer_sized_int_node,
9179 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9180 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9181 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9182 NULL_TREE, NULL_TREE);
9183 t = build_int_cst (pointer_sized_int_node, -1);
9184 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9185 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9186 NULL_TREE, NULL_TREE);
9187 t = build_int_cst (pointer_sized_int_node, 0);
9188 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9189
9190 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9191 and for each task reduction checks a bool right after the private variable
9192 within that thread's chunk; if the bool is clear, it hasn't been
9193 initialized and thus isn't going to be reduced nor destructed, otherwise
9194 reduce and destruct it. */
9195 tree idx = create_tmp_var (size_type_node);
9196 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9197 tree num_thr_sz = create_tmp_var (size_type_node);
9198 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9199 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9200 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9201 gimple *g;
9202 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9203 {
9204 /* For worksharing constructs or scope, only perform it in the master
9205 thread, with the exception of cancelled implicit barriers - then only
9206 handle the current thread. */
9207 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9208 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9209 tree thr_num = create_tmp_var (integer_type_node);
9210 g = gimple_build_call (t, 0);
9211 gimple_call_set_lhs (g, thr_num);
9212 gimple_seq_add_stmt (end, g);
9213 if (cancellable)
9214 {
9215 tree c;
9216 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9217 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9218 lab3 = create_artificial_label (UNKNOWN_LOCATION);
9219 if (code == OMP_FOR)
9220 c = gimple_omp_for_clauses (ctx->stmt);
9221 else if (code == OMP_SECTIONS)
9222 c = gimple_omp_sections_clauses (ctx->stmt);
9223 else /* if (code == OMP_SCOPE) */
9224 c = gimple_omp_scope_clauses (ctx->stmt);
9225 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9226 cancellable = c;
9227 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9228 lab5, lab6);
9229 gimple_seq_add_stmt (end, g);
9230 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9231 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9232 gimple_seq_add_stmt (end, g);
9233 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9234 build_one_cst (TREE_TYPE (idx)));
9235 gimple_seq_add_stmt (end, g);
9236 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9237 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9238 }
9239 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9240 gimple_seq_add_stmt (end, g);
9241 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9242 }
9243 if (code != OMP_PARALLEL)
9244 {
9245 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9246 tree num_thr = create_tmp_var (integer_type_node);
9247 g = gimple_build_call (t, 0);
9248 gimple_call_set_lhs (g, num_thr);
9249 gimple_seq_add_stmt (end, g);
9250 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9251 gimple_seq_add_stmt (end, g);
9252 if (cancellable)
9253 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9254 }
9255 else
9256 {
9257 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9258 OMP_CLAUSE__REDUCTEMP_);
9259 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9260 t = fold_convert (size_type_node, t);
9261 gimplify_assign (num_thr_sz, t, end);
9262 }
9263 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9264 NULL_TREE, NULL_TREE);
9265 tree data = create_tmp_var (pointer_sized_int_node);
9266 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9267 if (code == OMP_TASKLOOP)
9268 {
9269 lab7 = create_artificial_label (UNKNOWN_LOCATION);
9270 g = gimple_build_cond (NE_EXPR, data,
9271 build_zero_cst (pointer_sized_int_node),
9272 lab1, lab7);
9273 gimple_seq_add_stmt (end, g);
9274 }
9275 gimple_seq_add_stmt (end, gimple_build_label (lab1));
9276 tree ptr;
9277 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9278 ptr = create_tmp_var (build_pointer_type (record_type));
9279 else
9280 ptr = create_tmp_var (ptr_type_node);
9281 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9282
9283 tree field = TYPE_FIELDS (record_type);
9284 cnt = 0;
9285 if (cancellable)
9286 field = DECL_CHAIN (DECL_CHAIN (field));
9287 for (int pass = 0; pass < 2; pass++)
9288 {
9289 tree decl, type, next;
9290 for (tree c = clauses;
9291 omp_task_reduction_iterate (pass, code, ccode,
9292 &c, &decl, &type, &next); c = next)
9293 {
9294 tree var = decl, ref;
9295 if (TREE_CODE (decl) == MEM_REF)
9296 {
9297 var = TREE_OPERAND (var, 0);
9298 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9299 var = TREE_OPERAND (var, 0);
9300 tree v = var;
9301 if (TREE_CODE (var) == ADDR_EXPR)
9302 var = TREE_OPERAND (var, 0);
9303 else if (TREE_CODE (var) == INDIRECT_REF)
9304 var = TREE_OPERAND (var, 0);
9305 tree orig_var = var;
9306 if (is_variable_sized (var))
9307 {
9308 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9309 var = DECL_VALUE_EXPR (var);
9310 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9311 var = TREE_OPERAND (var, 0);
9312 gcc_assert (DECL_P (var));
9313 }
9314 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9315 if (orig_var != var)
9316 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9317 else if (TREE_CODE (v) == ADDR_EXPR)
9318 t = build_fold_addr_expr (t);
9319 else if (TREE_CODE (v) == INDIRECT_REF)
9320 t = build_fold_indirect_ref (t);
9321 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9322 {
9323 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9324 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9325 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9326 }
9327 if (!integer_zerop (TREE_OPERAND (decl, 1)))
9328 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9329 fold_convert (size_type_node,
9330 TREE_OPERAND (decl, 1)));
9331 }
9332 else
9333 {
9334 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9335 if (!omp_privatize_by_reference (decl))
9336 t = build_fold_addr_expr (t);
9337 }
9338 t = fold_convert (pointer_sized_int_node, t);
9339 seq = NULL;
9340 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9341 gimple_seq_add_seq (start, seq);
9342 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9343 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9344 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9345 t = unshare_expr (byte_position (field));
9346 t = fold_convert (pointer_sized_int_node, t);
9347 ctx->task_reduction_map->put (c, cnt);
9348 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9349 ? t : NULL_TREE);
9350 seq = NULL;
9351 t = force_gimple_operand (t, &seq, true, NULL_TREE);
9352 gimple_seq_add_seq (start, seq);
9353 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9354 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9355 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9356
9357 tree bfield = DECL_CHAIN (field);
9358 tree cond;
9359 if (code == OMP_PARALLEL
9360 || code == OMP_FOR
9361 || code == OMP_SECTIONS
9362 || code == OMP_SCOPE)
9363 /* In parallel, worksharing or scope all threads unconditionally
9364 initialize all their task reduction private variables. */
9365 cond = boolean_true_node;
9366 else if (TREE_TYPE (ptr) == ptr_type_node)
9367 {
9368 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9369 unshare_expr (byte_position (bfield)));
9370 seq = NULL;
9371 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9372 gimple_seq_add_seq (end, seq);
9373 tree pbool = build_pointer_type (TREE_TYPE (bfield));
9374 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9375 build_int_cst (pbool, 0));
9376 }
9377 else
9378 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9379 build_simple_mem_ref (ptr), bfield, NULL_TREE);
9380 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9381 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9382 tree condv = create_tmp_var (boolean_type_node);
9383 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9384 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9385 lab3, lab4);
9386 gimple_seq_add_stmt (end, g);
9387 gimple_seq_add_stmt (end, gimple_build_label (lab3));
9388 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9389 {
9390 /* If this reduction doesn't need destruction and parallel
9391 has been cancelled, there is nothing to do for this
9392 reduction, so jump around the merge operation. */
9393 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9394 g = gimple_build_cond (NE_EXPR, cancellable,
9395 build_zero_cst (TREE_TYPE (cancellable)),
9396 lab4, lab5);
9397 gimple_seq_add_stmt (end, g);
9398 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9399 }
9400
9401 tree new_var;
9402 if (TREE_TYPE (ptr) == ptr_type_node)
9403 {
9404 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9405 unshare_expr (byte_position (field)));
9406 seq = NULL;
9407 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9408 gimple_seq_add_seq (end, seq);
9409 tree pbool = build_pointer_type (TREE_TYPE (field));
9410 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9411 build_int_cst (pbool, 0));
9412 }
9413 else
9414 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9415 build_simple_mem_ref (ptr), field, NULL_TREE);
9416
9417 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9418 if (TREE_CODE (decl) != MEM_REF
9419 && omp_privatize_by_reference (decl))
9420 ref = build_simple_mem_ref (ref);
9421 /* reduction(-:var) sums up the partial results, so it acts
9422 identically to reduction(+:var). */
9423 if (rcode == MINUS_EXPR)
9424 rcode = PLUS_EXPR;
9425 if (TREE_CODE (decl) == MEM_REF)
9426 {
9427 tree type = TREE_TYPE (new_var);
9428 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9429 tree i = create_tmp_var (TREE_TYPE (v));
9430 tree ptype = build_pointer_type (TREE_TYPE (type));
9431 if (DECL_P (v))
9432 {
9433 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9434 tree vv = create_tmp_var (TREE_TYPE (v));
9435 gimplify_assign (vv, v, start);
9436 v = vv;
9437 }
9438 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9439 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9440 new_var = build_fold_addr_expr (new_var);
9441 new_var = fold_convert (ptype, new_var);
9442 ref = fold_convert (ptype, ref);
9443 tree m = create_tmp_var (ptype);
9444 gimplify_assign (m, new_var, end);
9445 new_var = m;
9446 m = create_tmp_var (ptype);
9447 gimplify_assign (m, ref, end);
9448 ref = m;
9449 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9450 tree body = create_artificial_label (UNKNOWN_LOCATION);
9451 tree endl = create_artificial_label (UNKNOWN_LOCATION);
9452 gimple_seq_add_stmt (end, gimple_build_label (body));
9453 tree priv = build_simple_mem_ref (new_var);
9454 tree out = build_simple_mem_ref (ref);
9455 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9456 {
9457 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9458 tree decl_placeholder
9459 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9460 tree lab6 = NULL_TREE;
9461 if (cancellable)
9462 {
9463 /* If this reduction needs destruction and parallel
9464 has been cancelled, jump around the merge operation
9465 to the destruction. */
9466 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9467 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9468 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9469 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9470 lab6, lab5);
9471 gimple_seq_add_stmt (end, g);
9472 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9473 }
9474 SET_DECL_VALUE_EXPR (placeholder, out);
9475 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9476 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9477 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9478 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9479 gimple_seq_add_seq (end,
9480 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9481 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9482 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9483 {
9484 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9485 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9486 }
9487 if (cancellable)
9488 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9489 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9490 if (x)
9491 {
9492 gimple_seq tseq = NULL;
9493 gimplify_stmt (&x, &tseq);
9494 gimple_seq_add_seq (end, tseq);
9495 }
9496 }
9497 else
9498 {
9499 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9500 out = unshare_expr (out);
9501 gimplify_assign (out, x, end);
9502 }
9503 gimple *g
9504 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9505 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9506 gimple_seq_add_stmt (end, g);
9507 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9508 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9509 gimple_seq_add_stmt (end, g);
9510 g = gimple_build_assign (i, PLUS_EXPR, i,
9511 build_int_cst (TREE_TYPE (i), 1));
9512 gimple_seq_add_stmt (end, g);
9513 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9514 gimple_seq_add_stmt (end, g);
9515 gimple_seq_add_stmt (end, gimple_build_label (endl));
9516 }
9517 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9518 {
9519 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9520 tree oldv = NULL_TREE;
9521 tree lab6 = NULL_TREE;
9522 if (cancellable)
9523 {
9524 /* If this reduction needs destruction and parallel
9525 has been cancelled, jump around the merge operation
9526 to the destruction. */
9527 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9528 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9529 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9530 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9531 lab6, lab5);
9532 gimple_seq_add_stmt (end, g);
9533 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9534 }
9535 if (omp_privatize_by_reference (decl)
9536 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9537 TREE_TYPE (ref)))
9538 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9539 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9540 tree refv = create_tmp_var (TREE_TYPE (ref));
9541 gimplify_assign (refv, ref, end);
9542 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9543 SET_DECL_VALUE_EXPR (placeholder, ref);
9544 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9545 tree d = maybe_lookup_decl (decl, ctx);
9546 gcc_assert (d);
9547 if (DECL_HAS_VALUE_EXPR_P (d))
9548 oldv = DECL_VALUE_EXPR (d);
9549 if (omp_privatize_by_reference (var))
9550 {
9551 tree v = fold_convert (TREE_TYPE (d),
9552 build_fold_addr_expr (new_var));
9553 SET_DECL_VALUE_EXPR (d, v);
9554 }
9555 else
9556 SET_DECL_VALUE_EXPR (d, new_var);
9557 DECL_HAS_VALUE_EXPR_P (d) = 1;
9558 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9559 if (oldv)
9560 SET_DECL_VALUE_EXPR (d, oldv);
9561 else
9562 {
9563 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9564 DECL_HAS_VALUE_EXPR_P (d) = 0;
9565 }
9566 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9567 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9569 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9570 if (cancellable)
9571 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9572 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9573 if (x)
9574 {
9575 gimple_seq tseq = NULL;
9576 gimplify_stmt (&x, &tseq);
9577 gimple_seq_add_seq (end, tseq);
9578 }
9579 }
9580 else
9581 {
9582 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9583 ref = unshare_expr (ref);
9584 gimplify_assign (ref, x, end);
9585 }
9586 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9587 ++cnt;
9588 field = DECL_CHAIN (bfield);
9589 }
9590 }
9591
9592 if (code == OMP_TASKGROUP)
9593 {
9594 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9595 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9596 gimple_seq_add_stmt (start, g);
9597 }
9598 else
9599 {
9600 tree c;
9601 if (code == OMP_FOR)
9602 c = gimple_omp_for_clauses (ctx->stmt);
9603 else if (code == OMP_SECTIONS)
9604 c = gimple_omp_sections_clauses (ctx->stmt);
9605 else if (code == OMP_SCOPE)
9606 c = gimple_omp_scope_clauses (ctx->stmt);
9607 else
9608 c = gimple_omp_taskreg_clauses (ctx->stmt);
9609 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9610 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9611 build_fold_addr_expr (avar));
9612 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9613 }
9614
9615 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9616 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9617 size_one_node));
9618 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9619 gimple_seq_add_stmt (end, g);
9620 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9621 if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9622 {
9623 enum built_in_function bfn
9624 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9625 t = builtin_decl_explicit (bfn);
9626 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9627 tree arg;
9628 if (cancellable)
9629 {
9630 arg = create_tmp_var (c_bool_type);
9631 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9632 cancellable));
9633 }
9634 else
9635 arg = build_int_cst (c_bool_type, 0);
9636 g = gimple_build_call (t, 1, arg);
9637 }
9638 else
9639 {
9640 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9641 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9642 }
9643 gimple_seq_add_stmt (end, g);
9644 if (lab7)
9645 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9646 t = build_constructor (atype, NULL);
9647 TREE_THIS_VOLATILE (t) = 1;
9648 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9649 }
9650
9651 /* Expand code for an OpenMP taskgroup directive. */
9652
9653 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)9654 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9655 {
9656 gimple *stmt = gsi_stmt (*gsi_p);
9657 gcall *x;
9658 gbind *bind;
9659 gimple_seq dseq = NULL;
9660 tree block = make_node (BLOCK);
9661
9662 bind = gimple_build_bind (NULL, NULL, block);
9663 gsi_replace (gsi_p, bind, true);
9664 gimple_bind_add_stmt (bind, stmt);
9665
9666 push_gimplify_context ();
9667
9668 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9669 0);
9670 gimple_bind_add_stmt (bind, x);
9671
9672 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9673 gimple_omp_taskgroup_clauses (stmt),
9674 gimple_bind_body_ptr (bind), &dseq);
9675
9676 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9677 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9678 gimple_omp_set_body (stmt, NULL);
9679
9680 gimple_bind_add_seq (bind, dseq);
9681
9682 pop_gimplify_context (bind);
9683
9684 gimple_bind_append_vars (bind, ctx->block_vars);
9685 BLOCK_VARS (block) = ctx->block_vars;
9686 }
9687
9688
9689 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9690
9691 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)9692 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9693 omp_context *ctx)
9694 {
9695 struct omp_for_data fd;
9696 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9697 return;
9698
9699 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9700 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9701 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9702 if (!fd.ordered)
9703 return;
9704
9705 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9706 tree c = gimple_omp_ordered_clauses (ord_stmt);
9707 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9708 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9709 {
9710 /* Merge depend clauses from multiple adjacent
9711 #pragma omp ordered depend(sink:...) constructs
9712 into one #pragma omp ordered depend(sink:...), so that
9713 we can optimize them together. */
9714 gimple_stmt_iterator gsi = *gsi_p;
9715 gsi_next (&gsi);
9716 while (!gsi_end_p (gsi))
9717 {
9718 gimple *stmt = gsi_stmt (gsi);
9719 if (is_gimple_debug (stmt)
9720 || gimple_code (stmt) == GIMPLE_NOP)
9721 {
9722 gsi_next (&gsi);
9723 continue;
9724 }
9725 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9726 break;
9727 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9728 c = gimple_omp_ordered_clauses (ord_stmt2);
9729 if (c == NULL_TREE
9730 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9731 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9732 break;
9733 while (*list_p)
9734 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9735 *list_p = c;
9736 gsi_remove (&gsi, true);
9737 }
9738 }
9739
9740 /* Canonicalize sink dependence clauses into one folded clause if
9741 possible.
9742
9743 The basic algorithm is to create a sink vector whose first
9744 element is the GCD of all the first elements, and whose remaining
9745 elements are the minimum of the subsequent columns.
9746
9747 We ignore dependence vectors whose first element is zero because
9748 such dependencies are known to be executed by the same thread.
9749
9750 We take into account the direction of the loop, so a minimum
9751 becomes a maximum if the loop is iterating forwards. We also
9752 ignore sink clauses where the loop direction is unknown, or where
9753 the offsets are clearly invalid because they are not a multiple
9754 of the loop increment.
9755
9756 For example:
9757
9758 #pragma omp for ordered(2)
9759 for (i=0; i < N; ++i)
9760 for (j=0; j < M; ++j)
9761 {
9762 #pragma omp ordered \
9763 depend(sink:i-8,j-2) \
9764 depend(sink:i,j-1) \ // Completely ignored because i+0.
9765 depend(sink:i-4,j-3) \
9766 depend(sink:i-6,j-4)
9767 #pragma omp ordered depend(source)
9768 }
9769
9770 Folded clause is:
9771
9772 depend(sink:-gcd(8,4,6),-min(2,3,4))
9773 -or-
9774 depend(sink:-2,-2)
9775 */
9776
9777 /* FIXME: Computing GCD's where the first element is zero is
9778 non-trivial in the presence of collapsed loops. Do this later. */
9779 if (fd.collapse > 1)
9780 return;
9781
9782 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9783
9784 /* wide_int is not a POD so it must be default-constructed. */
9785 for (unsigned i = 0; i != 2 * len - 1; ++i)
9786 new (static_cast<void*>(folded_deps + i)) wide_int ();
9787
9788 tree folded_dep = NULL_TREE;
9789 /* TRUE if the first dimension's offset is negative. */
9790 bool neg_offset_p = false;
9791
9792 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9793 unsigned int i;
9794 while ((c = *list_p) != NULL)
9795 {
9796 bool remove = false;
9797
9798 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9799 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9800 goto next_ordered_clause;
9801
9802 tree vec;
9803 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9804 vec && TREE_CODE (vec) == TREE_LIST;
9805 vec = TREE_CHAIN (vec), ++i)
9806 {
9807 gcc_assert (i < len);
9808
9809 /* omp_extract_for_data has canonicalized the condition. */
9810 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9811 || fd.loops[i].cond_code == GT_EXPR);
9812 bool forward = fd.loops[i].cond_code == LT_EXPR;
9813 bool maybe_lexically_later = true;
9814
9815 /* While the committee makes up its mind, bail if we have any
9816 non-constant steps. */
9817 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9818 goto lower_omp_ordered_ret;
9819
9820 tree itype = TREE_TYPE (TREE_VALUE (vec));
9821 if (POINTER_TYPE_P (itype))
9822 itype = sizetype;
9823 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9824 TYPE_PRECISION (itype),
9825 TYPE_SIGN (itype));
9826
9827 /* Ignore invalid offsets that are not multiples of the step. */
9828 if (!wi::multiple_of_p (wi::abs (offset),
9829 wi::abs (wi::to_wide (fd.loops[i].step)),
9830 UNSIGNED))
9831 {
9832 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9833 "ignoring sink clause with offset that is not "
9834 "a multiple of the loop step");
9835 remove = true;
9836 goto next_ordered_clause;
9837 }
9838
9839 /* Calculate the first dimension. The first dimension of
9840 the folded dependency vector is the GCD of the first
9841 elements, while ignoring any first elements whose offset
9842 is 0. */
9843 if (i == 0)
9844 {
9845 /* Ignore dependence vectors whose first dimension is 0. */
9846 if (offset == 0)
9847 {
9848 remove = true;
9849 goto next_ordered_clause;
9850 }
9851 else
9852 {
9853 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9854 {
9855 error_at (OMP_CLAUSE_LOCATION (c),
9856 "first offset must be in opposite direction "
9857 "of loop iterations");
9858 goto lower_omp_ordered_ret;
9859 }
9860 if (forward)
9861 offset = -offset;
9862 neg_offset_p = forward;
9863 /* Initialize the first time around. */
9864 if (folded_dep == NULL_TREE)
9865 {
9866 folded_dep = c;
9867 folded_deps[0] = offset;
9868 }
9869 else
9870 folded_deps[0] = wi::gcd (folded_deps[0],
9871 offset, UNSIGNED);
9872 }
9873 }
9874 /* Calculate minimum for the remaining dimensions. */
9875 else
9876 {
9877 folded_deps[len + i - 1] = offset;
9878 if (folded_dep == c)
9879 folded_deps[i] = offset;
9880 else if (maybe_lexically_later
9881 && !wi::eq_p (folded_deps[i], offset))
9882 {
9883 if (forward ^ wi::gts_p (folded_deps[i], offset))
9884 {
9885 unsigned int j;
9886 folded_dep = c;
9887 for (j = 1; j <= i; j++)
9888 folded_deps[j] = folded_deps[len + j - 1];
9889 }
9890 else
9891 maybe_lexically_later = false;
9892 }
9893 }
9894 }
9895 gcc_assert (i == len);
9896
9897 remove = true;
9898
9899 next_ordered_clause:
9900 if (remove)
9901 *list_p = OMP_CLAUSE_CHAIN (c);
9902 else
9903 list_p = &OMP_CLAUSE_CHAIN (c);
9904 }
9905
9906 if (folded_dep)
9907 {
9908 if (neg_offset_p)
9909 folded_deps[0] = -folded_deps[0];
9910
9911 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9912 if (POINTER_TYPE_P (itype))
9913 itype = sizetype;
9914
9915 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9916 = wide_int_to_tree (itype, folded_deps[0]);
9917 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9918 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9919 }
9920
9921 lower_omp_ordered_ret:
9922
9923 /* Ordered without clauses is #pragma omp threads, while we want
9924 a nop instead if we remove all clauses. */
9925 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9926 gsi_replace (gsi_p, gimple_build_nop (), true);
9927 }
9928
9929
9930 /* Expand code for an OpenMP ordered directive. */
9931
9932 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)9933 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9934 {
9935 tree block;
9936 gimple *stmt = gsi_stmt (*gsi_p), *g;
9937 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9938 gcall *x;
9939 gbind *bind;
9940 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9941 OMP_CLAUSE_SIMD);
9942 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9943 loop. */
9944 bool maybe_simt
9945 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9946 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9947 OMP_CLAUSE_THREADS);
9948
9949 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9950 OMP_CLAUSE_DEPEND))
9951 {
9952 /* FIXME: This is needs to be moved to the expansion to verify various
9953 conditions only testable on cfg with dominators computed, and also
9954 all the depend clauses to be merged still might need to be available
9955 for the runtime checks. */
9956 if (0)
9957 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9958 return;
9959 }
9960
9961 push_gimplify_context ();
9962
9963 block = make_node (BLOCK);
9964 bind = gimple_build_bind (NULL, NULL, block);
9965 gsi_replace (gsi_p, bind, true);
9966 gimple_bind_add_stmt (bind, stmt);
9967
9968 if (simd)
9969 {
9970 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9971 build_int_cst (NULL_TREE, threads));
9972 cfun->has_simduid_loops = true;
9973 }
9974 else
9975 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9976 0);
9977 gimple_bind_add_stmt (bind, x);
9978
9979 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9980 if (maybe_simt)
9981 {
9982 counter = create_tmp_var (integer_type_node);
9983 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9984 gimple_call_set_lhs (g, counter);
9985 gimple_bind_add_stmt (bind, g);
9986
9987 body = create_artificial_label (UNKNOWN_LOCATION);
9988 test = create_artificial_label (UNKNOWN_LOCATION);
9989 gimple_bind_add_stmt (bind, gimple_build_label (body));
9990
9991 tree simt_pred = create_tmp_var (integer_type_node);
9992 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9993 gimple_call_set_lhs (g, simt_pred);
9994 gimple_bind_add_stmt (bind, g);
9995
9996 tree t = create_artificial_label (UNKNOWN_LOCATION);
9997 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9998 gimple_bind_add_stmt (bind, g);
9999
10000 gimple_bind_add_stmt (bind, gimple_build_label (t));
10001 }
10002 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10003 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10004 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10005 gimple_omp_set_body (stmt, NULL);
10006
10007 if (maybe_simt)
10008 {
10009 gimple_bind_add_stmt (bind, gimple_build_label (test));
10010 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
10011 gimple_bind_add_stmt (bind, g);
10012
10013 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
10014 tree nonneg = create_tmp_var (integer_type_node);
10015 gimple_seq tseq = NULL;
10016 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
10017 gimple_bind_add_seq (bind, tseq);
10018
10019 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
10020 gimple_call_set_lhs (g, nonneg);
10021 gimple_bind_add_stmt (bind, g);
10022
10023 tree end = create_artificial_label (UNKNOWN_LOCATION);
10024 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
10025 gimple_bind_add_stmt (bind, g);
10026
10027 gimple_bind_add_stmt (bind, gimple_build_label (end));
10028 }
10029 if (simd)
10030 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
10031 build_int_cst (NULL_TREE, threads));
10032 else
10033 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
10034 0);
10035 gimple_bind_add_stmt (bind, x);
10036
10037 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10038
10039 pop_gimplify_context (bind);
10040
10041 gimple_bind_append_vars (bind, ctx->block_vars);
10042 BLOCK_VARS (block) = gimple_bind_vars (bind);
10043 }
10044
10045
10046 /* Expand code for an OpenMP scan directive and the structured block
10047 before the scan directive. */
10048
10049 static void
lower_omp_scan(gimple_stmt_iterator * gsi_p,omp_context * ctx)10050 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10051 {
10052 gimple *stmt = gsi_stmt (*gsi_p);
10053 bool has_clauses
10054 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
10055 tree lane = NULL_TREE;
10056 gimple_seq before = NULL;
10057 omp_context *octx = ctx->outer;
10058 gcc_assert (octx);
10059 if (octx->scan_exclusive && !has_clauses)
10060 {
10061 gimple_stmt_iterator gsi2 = *gsi_p;
10062 gsi_next (&gsi2);
10063 gimple *stmt2 = gsi_stmt (gsi2);
10064 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10065 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10066 the one with exclusive clause(s), comes first. */
10067 if (stmt2
10068 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10069 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10070 {
10071 gsi_remove (gsi_p, false);
10072 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10073 ctx = maybe_lookup_ctx (stmt2);
10074 gcc_assert (ctx);
10075 lower_omp_scan (gsi_p, ctx);
10076 return;
10077 }
10078 }
10079
10080 bool input_phase = has_clauses ^ octx->scan_inclusive;
10081 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10082 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10083 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10084 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10085 && !gimple_omp_for_combined_p (octx->stmt));
10086 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10087 if (is_for_simd && octx->for_simd_scan_phase)
10088 is_simd = false;
10089 if (is_simd)
10090 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10091 OMP_CLAUSE__SIMDUID_))
10092 {
10093 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10094 lane = create_tmp_var (unsigned_type_node);
10095 tree t = build_int_cst (integer_type_node,
10096 input_phase ? 1
10097 : octx->scan_inclusive ? 2 : 3);
10098 gimple *g
10099 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10100 gimple_call_set_lhs (g, lane);
10101 gimple_seq_add_stmt (&before, g);
10102 }
10103
10104 if (is_simd || is_for)
10105 {
10106 for (tree c = gimple_omp_for_clauses (octx->stmt);
10107 c; c = OMP_CLAUSE_CHAIN (c))
10108 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10109 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10110 {
10111 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10112 tree var = OMP_CLAUSE_DECL (c);
10113 tree new_var = lookup_decl (var, octx);
10114 tree val = new_var;
10115 tree var2 = NULL_TREE;
10116 tree var3 = NULL_TREE;
10117 tree var4 = NULL_TREE;
10118 tree lane0 = NULL_TREE;
10119 tree new_vard = new_var;
10120 if (omp_privatize_by_reference (var))
10121 {
10122 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10123 val = new_var;
10124 }
10125 if (DECL_HAS_VALUE_EXPR_P (new_vard))
10126 {
10127 val = DECL_VALUE_EXPR (new_vard);
10128 if (new_vard != new_var)
10129 {
10130 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10131 val = TREE_OPERAND (val, 0);
10132 }
10133 if (TREE_CODE (val) == ARRAY_REF
10134 && VAR_P (TREE_OPERAND (val, 0)))
10135 {
10136 tree v = TREE_OPERAND (val, 0);
10137 if (lookup_attribute ("omp simd array",
10138 DECL_ATTRIBUTES (v)))
10139 {
10140 val = unshare_expr (val);
10141 lane0 = TREE_OPERAND (val, 1);
10142 TREE_OPERAND (val, 1) = lane;
10143 var2 = lookup_decl (v, octx);
10144 if (octx->scan_exclusive)
10145 var4 = lookup_decl (var2, octx);
10146 if (input_phase
10147 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10148 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10149 if (!input_phase)
10150 {
10151 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10152 var2, lane, NULL_TREE, NULL_TREE);
10153 TREE_THIS_NOTRAP (var2) = 1;
10154 if (octx->scan_exclusive)
10155 {
10156 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10157 var4, lane, NULL_TREE,
10158 NULL_TREE);
10159 TREE_THIS_NOTRAP (var4) = 1;
10160 }
10161 }
10162 else
10163 var2 = val;
10164 }
10165 }
10166 gcc_assert (var2);
10167 }
10168 else
10169 {
10170 var2 = build_outer_var_ref (var, octx);
10171 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10172 {
10173 var3 = maybe_lookup_decl (new_vard, octx);
10174 if (var3 == new_vard || var3 == NULL_TREE)
10175 var3 = NULL_TREE;
10176 else if (is_simd && octx->scan_exclusive && !input_phase)
10177 {
10178 var4 = maybe_lookup_decl (var3, octx);
10179 if (var4 == var3 || var4 == NULL_TREE)
10180 {
10181 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10182 {
10183 var4 = var3;
10184 var3 = NULL_TREE;
10185 }
10186 else
10187 var4 = NULL_TREE;
10188 }
10189 }
10190 }
10191 if (is_simd
10192 && octx->scan_exclusive
10193 && !input_phase
10194 && var4 == NULL_TREE)
10195 var4 = create_tmp_var (TREE_TYPE (val));
10196 }
10197 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10198 {
10199 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10200 if (input_phase)
10201 {
10202 if (var3)
10203 {
10204 /* If we've added a separate identity element
10205 variable, copy it over into val. */
10206 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10207 var3);
10208 gimplify_and_add (x, &before);
10209 }
10210 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10211 {
10212 /* Otherwise, assign to it the identity element. */
10213 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10214 if (is_for)
10215 tseq = copy_gimple_seq_and_replace_locals (tseq);
10216 tree ref = build_outer_var_ref (var, octx);
10217 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10218 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10219 if (x)
10220 {
10221 if (new_vard != new_var)
10222 val = build_fold_addr_expr_loc (clause_loc, val);
10223 SET_DECL_VALUE_EXPR (new_vard, val);
10224 }
10225 SET_DECL_VALUE_EXPR (placeholder, ref);
10226 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10227 lower_omp (&tseq, octx);
10228 if (x)
10229 SET_DECL_VALUE_EXPR (new_vard, x);
10230 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10231 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10232 gimple_seq_add_seq (&before, tseq);
10233 if (is_simd)
10234 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10235 }
10236 }
10237 else if (is_simd)
10238 {
10239 tree x;
10240 if (octx->scan_exclusive)
10241 {
10242 tree v4 = unshare_expr (var4);
10243 tree v2 = unshare_expr (var2);
10244 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10245 gimplify_and_add (x, &before);
10246 }
10247 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10248 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10249 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10250 tree vexpr = val;
10251 if (x && new_vard != new_var)
10252 vexpr = build_fold_addr_expr_loc (clause_loc, val);
10253 if (x)
10254 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10255 SET_DECL_VALUE_EXPR (placeholder, var2);
10256 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10257 lower_omp (&tseq, octx);
10258 gimple_seq_add_seq (&before, tseq);
10259 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10260 if (x)
10261 SET_DECL_VALUE_EXPR (new_vard, x);
10262 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10263 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10264 if (octx->scan_inclusive)
10265 {
10266 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10267 var2);
10268 gimplify_and_add (x, &before);
10269 }
10270 else if (lane0 == NULL_TREE)
10271 {
10272 x = lang_hooks.decls.omp_clause_assign_op (c, val,
10273 var4);
10274 gimplify_and_add (x, &before);
10275 }
10276 }
10277 }
10278 else
10279 {
10280 if (input_phase)
10281 {
10282 /* input phase. Set val to initializer before
10283 the body. */
10284 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10285 gimplify_assign (val, x, &before);
10286 }
10287 else if (is_simd)
10288 {
10289 /* scan phase. */
10290 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10291 if (code == MINUS_EXPR)
10292 code = PLUS_EXPR;
10293
10294 tree x = build2 (code, TREE_TYPE (var2),
10295 unshare_expr (var2), unshare_expr (val));
10296 if (octx->scan_inclusive)
10297 {
10298 gimplify_assign (unshare_expr (var2), x, &before);
10299 gimplify_assign (val, var2, &before);
10300 }
10301 else
10302 {
10303 gimplify_assign (unshare_expr (var4),
10304 unshare_expr (var2), &before);
10305 gimplify_assign (var2, x, &before);
10306 if (lane0 == NULL_TREE)
10307 gimplify_assign (val, var4, &before);
10308 }
10309 }
10310 }
10311 if (octx->scan_exclusive && !input_phase && lane0)
10312 {
10313 tree vexpr = unshare_expr (var4);
10314 TREE_OPERAND (vexpr, 1) = lane0;
10315 if (new_vard != new_var)
10316 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10317 SET_DECL_VALUE_EXPR (new_vard, vexpr);
10318 }
10319 }
10320 }
10321 if (is_simd && !is_for_simd)
10322 {
10323 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10324 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10325 gsi_replace (gsi_p, gimple_build_nop (), true);
10326 return;
10327 }
10328 lower_omp (gimple_omp_body_ptr (stmt), octx);
10329 if (before)
10330 {
10331 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10332 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10333 }
10334 }
10335
10336
10337 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
10338 substitution of a couple of function calls. But in the NAMED case,
10339 requires that languages coordinate a symbol name. It is therefore
10340 best put here in common code. */
10341
10342 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10343
10344 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)10345 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10346 {
10347 tree block;
10348 tree name, lock, unlock;
10349 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10350 gbind *bind;
10351 location_t loc = gimple_location (stmt);
10352 gimple_seq tbody;
10353
10354 name = gimple_omp_critical_name (stmt);
10355 if (name)
10356 {
10357 tree decl;
10358
10359 if (!critical_name_mutexes)
10360 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10361
10362 tree *n = critical_name_mutexes->get (name);
10363 if (n == NULL)
10364 {
10365 char *new_str;
10366
10367 decl = create_tmp_var_raw (ptr_type_node);
10368
10369 new_str = ACONCAT ((".gomp_critical_user_",
10370 IDENTIFIER_POINTER (name), NULL));
10371 DECL_NAME (decl) = get_identifier (new_str);
10372 TREE_PUBLIC (decl) = 1;
10373 TREE_STATIC (decl) = 1;
10374 DECL_COMMON (decl) = 1;
10375 DECL_ARTIFICIAL (decl) = 1;
10376 DECL_IGNORED_P (decl) = 1;
10377
10378 varpool_node::finalize_decl (decl);
10379
10380 critical_name_mutexes->put (name, decl);
10381 }
10382 else
10383 decl = *n;
10384
10385 /* If '#pragma omp critical' is inside offloaded region or
10386 inside function marked as offloadable, the symbol must be
10387 marked as offloadable too. */
10388 omp_context *octx;
10389 if (cgraph_node::get (current_function_decl)->offloadable)
10390 varpool_node::get_create (decl)->offloadable = 1;
10391 else
10392 for (octx = ctx->outer; octx; octx = octx->outer)
10393 if (is_gimple_omp_offloaded (octx->stmt))
10394 {
10395 varpool_node::get_create (decl)->offloadable = 1;
10396 break;
10397 }
10398
10399 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10400 lock = build_call_expr_loc (loc, lock, 1,
10401 build_fold_addr_expr_loc (loc, decl));
10402
10403 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10404 unlock = build_call_expr_loc (loc, unlock, 1,
10405 build_fold_addr_expr_loc (loc, decl));
10406 }
10407 else
10408 {
10409 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10410 lock = build_call_expr_loc (loc, lock, 0);
10411
10412 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10413 unlock = build_call_expr_loc (loc, unlock, 0);
10414 }
10415
10416 push_gimplify_context ();
10417
10418 block = make_node (BLOCK);
10419 bind = gimple_build_bind (NULL, NULL, block);
10420 gsi_replace (gsi_p, bind, true);
10421 gimple_bind_add_stmt (bind, stmt);
10422
10423 tbody = gimple_bind_body (bind);
10424 gimplify_and_add (lock, &tbody);
10425 gimple_bind_set_body (bind, tbody);
10426
10427 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10428 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10429 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10430 gimple_omp_set_body (stmt, NULL);
10431
10432 tbody = gimple_bind_body (bind);
10433 gimplify_and_add (unlock, &tbody);
10434 gimple_bind_set_body (bind, tbody);
10435
10436 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10437
10438 pop_gimplify_context (bind);
10439 gimple_bind_append_vars (bind, ctx->block_vars);
10440 BLOCK_VARS (block) = gimple_bind_vars (bind);
10441 }
10442
10443 /* A subroutine of lower_omp_for. Generate code to emit the predicate
10444 for a lastprivate clause. Given a loop control predicate of (V
10445 cond N2), we gate the clause on (!(V cond N2)). The lowered form
10446 is appended to *DLIST, iterator initialization is appended to
10447 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
10448 to be emitted in a critical section. */
10449
10450 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,gimple_seq * clist,struct omp_context * ctx)10451 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10452 gimple_seq *dlist, gimple_seq *clist,
10453 struct omp_context *ctx)
10454 {
10455 tree clauses, cond, vinit;
10456 enum tree_code cond_code;
10457 gimple_seq stmts;
10458
10459 cond_code = fd->loop.cond_code;
10460 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10461
10462 /* When possible, use a strict equality expression. This can let VRP
10463 type optimizations deduce the value and remove a copy. */
10464 if (tree_fits_shwi_p (fd->loop.step))
10465 {
10466 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10467 if (step == 1 || step == -1)
10468 cond_code = EQ_EXPR;
10469 }
10470
10471 tree n2 = fd->loop.n2;
10472 if (fd->collapse > 1
10473 && TREE_CODE (n2) != INTEGER_CST
10474 && gimple_omp_for_combined_into_p (fd->for_stmt))
10475 {
10476 struct omp_context *taskreg_ctx = NULL;
10477 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10478 {
10479 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10480 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10481 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10482 {
10483 if (gimple_omp_for_combined_into_p (gfor))
10484 {
10485 gcc_assert (ctx->outer->outer
10486 && is_parallel_ctx (ctx->outer->outer));
10487 taskreg_ctx = ctx->outer->outer;
10488 }
10489 else
10490 {
10491 struct omp_for_data outer_fd;
10492 omp_extract_for_data (gfor, &outer_fd, NULL);
10493 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10494 }
10495 }
10496 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10497 taskreg_ctx = ctx->outer->outer;
10498 }
10499 else if (is_taskreg_ctx (ctx->outer))
10500 taskreg_ctx = ctx->outer;
10501 if (taskreg_ctx)
10502 {
10503 int i;
10504 tree taskreg_clauses
10505 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10506 tree innerc = omp_find_clause (taskreg_clauses,
10507 OMP_CLAUSE__LOOPTEMP_);
10508 gcc_assert (innerc);
10509 int count = fd->collapse;
10510 if (fd->non_rect
10511 && fd->last_nonrect == fd->first_nonrect + 1)
10512 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10513 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10514 count += 4;
10515 for (i = 0; i < count; i++)
10516 {
10517 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10518 OMP_CLAUSE__LOOPTEMP_);
10519 gcc_assert (innerc);
10520 }
10521 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10522 OMP_CLAUSE__LOOPTEMP_);
10523 if (innerc)
10524 n2 = fold_convert (TREE_TYPE (n2),
10525 lookup_decl (OMP_CLAUSE_DECL (innerc),
10526 taskreg_ctx));
10527 }
10528 }
10529 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10530
10531 clauses = gimple_omp_for_clauses (fd->for_stmt);
10532 stmts = NULL;
10533 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10534 if (!gimple_seq_empty_p (stmts))
10535 {
10536 gimple_seq_add_seq (&stmts, *dlist);
10537 *dlist = stmts;
10538
10539 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10540 vinit = fd->loop.n1;
10541 if (cond_code == EQ_EXPR
10542 && tree_fits_shwi_p (fd->loop.n2)
10543 && ! integer_zerop (fd->loop.n2))
10544 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10545 else
10546 vinit = unshare_expr (vinit);
10547
10548 /* Initialize the iterator variable, so that threads that don't execute
10549 any iterations don't execute the lastprivate clauses by accident. */
10550 gimplify_assign (fd->loop.v, vinit, body_p);
10551 }
10552 }
10553
10554 /* OpenACC privatization.
10555
10556 Or, in other words, *sharing* at the respective OpenACC level of
10557 parallelism.
10558
10559 From a correctness perspective, a non-addressable variable can't be accessed
10560 outside the current thread, so it can go in a (faster than shared memory)
10561 register -- though that register may need to be broadcast in some
10562 circumstances. A variable can only meaningfully be "shared" across workers
10563 or vector lanes if its address is taken, e.g. by a call to an atomic
10564 builtin.
10565
10566 From an optimisation perspective, the answer might be fuzzier: maybe
10567 sometimes, using shared memory directly would be faster than
10568 broadcasting. */
10569
10570 static void
oacc_privatization_begin_diagnose_var(const dump_flags_t l_dump_flags,const location_t loc,const tree c,const tree decl)10571 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10572 const location_t loc, const tree c,
10573 const tree decl)
10574 {
10575 const dump_user_location_t d_u_loc
10576 = dump_user_location_t::from_location_t (loc);
10577 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10578 #if __GNUC__ >= 10
10579 # pragma GCC diagnostic push
10580 # pragma GCC diagnostic ignored "-Wformat"
10581 #endif
10582 dump_printf_loc (l_dump_flags, d_u_loc,
10583 "variable %<%T%> ", decl);
10584 #if __GNUC__ >= 10
10585 # pragma GCC diagnostic pop
10586 #endif
10587 if (c)
10588 dump_printf (l_dump_flags,
10589 "in %qs clause ",
10590 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10591 else
10592 dump_printf (l_dump_flags,
10593 "declared in block ");
10594 }
10595
10596 static bool
oacc_privatization_candidate_p(const location_t loc,const tree c,const tree decl)10597 oacc_privatization_candidate_p (const location_t loc, const tree c,
10598 const tree decl)
10599 {
10600 dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10601
10602 /* There is some differentiation depending on block vs. clause. */
10603 bool block = !c;
10604
10605 bool res = true;
10606
10607 if (res && !VAR_P (decl))
10608 {
10609 /* A PARM_DECL (appearing in a 'private' clause) is expected to have been
10610 privatized into a new VAR_DECL. */
10611 gcc_checking_assert (TREE_CODE (decl) != PARM_DECL);
10612
10613 res = false;
10614
10615 if (dump_enabled_p ())
10616 {
10617 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10618 dump_printf (l_dump_flags,
10619 "potentially has improper OpenACC privatization level: %qs\n",
10620 get_tree_code_name (TREE_CODE (decl)));
10621 }
10622 }
10623
10624 if (res && block && TREE_STATIC (decl))
10625 {
10626 res = false;
10627
10628 if (dump_enabled_p ())
10629 {
10630 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10631 dump_printf (l_dump_flags,
10632 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10633 "static");
10634 }
10635 }
10636
10637 if (res && block && DECL_EXTERNAL (decl))
10638 {
10639 res = false;
10640
10641 if (dump_enabled_p ())
10642 {
10643 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10644 dump_printf (l_dump_flags,
10645 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10646 "external");
10647 }
10648 }
10649
10650 if (res && !TREE_ADDRESSABLE (decl))
10651 {
10652 res = false;
10653
10654 if (dump_enabled_p ())
10655 {
10656 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10657 dump_printf (l_dump_flags,
10658 "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10659 "not addressable");
10660 }
10661 }
10662
10663 /* If an artificial variable has been added to a bind, e.g.
10664 a compiler-generated temporary structure used by the Fortran front-end, do
10665 not consider it as a privatization candidate. Note that variables on
10666 the stack are private per-thread by default: making them "gang-private"
10667 for OpenACC actually means to share a single instance of a variable
10668 amongst all workers and threads spawned within each gang.
10669 At present, no compiler-generated artificial variables require such
10670 sharing semantics, so this is safe. */
10671
10672 if (res && block && DECL_ARTIFICIAL (decl))
10673 {
10674 res = false;
10675
10676 if (dump_enabled_p ())
10677 {
10678 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10679 dump_printf (l_dump_flags,
10680 "isn%'t candidate for adjusting OpenACC privatization "
10681 "level: %s\n", "artificial");
10682 }
10683 }
10684
10685 if (res)
10686 {
10687 if (dump_enabled_p ())
10688 {
10689 oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10690 dump_printf (l_dump_flags,
10691 "is candidate for adjusting OpenACC privatization level\n");
10692 }
10693 }
10694
10695 if (dump_file && (dump_flags & TDF_DETAILS))
10696 {
10697 print_generic_decl (dump_file, decl, dump_flags);
10698 fprintf (dump_file, "\n");
10699 }
10700
10701 return res;
10702 }
10703
10704 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10705 CTX. */
10706
10707 static void
oacc_privatization_scan_clause_chain(omp_context * ctx,tree clauses)10708 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10709 {
10710 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10711 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10712 {
10713 tree decl = OMP_CLAUSE_DECL (c);
10714
10715 tree new_decl = lookup_decl (decl, ctx);
10716
10717 if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c,
10718 new_decl))
10719 continue;
10720
10721 gcc_checking_assert
10722 (!ctx->oacc_privatization_candidates.contains (new_decl));
10723 ctx->oacc_privatization_candidates.safe_push (new_decl);
10724 }
10725 }
10726
10727 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10728 CTX. */
10729
10730 static void
oacc_privatization_scan_decl_chain(omp_context * ctx,tree decls)10731 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10732 {
10733 for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10734 {
10735 tree new_decl = lookup_decl (decl, ctx);
10736 gcc_checking_assert (new_decl == decl);
10737
10738 if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL,
10739 new_decl))
10740 continue;
10741
10742 gcc_checking_assert
10743 (!ctx->oacc_privatization_candidates.contains (new_decl));
10744 ctx->oacc_privatization_candidates.safe_push (new_decl);
10745 }
10746 }
10747
10748 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10749
10750 static tree
omp_find_scan(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10751 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10752 struct walk_stmt_info *wi)
10753 {
10754 gimple *stmt = gsi_stmt (*gsi_p);
10755
10756 *handled_ops_p = true;
10757 switch (gimple_code (stmt))
10758 {
10759 WALK_SUBSTMTS;
10760
10761 case GIMPLE_OMP_FOR:
10762 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10763 && gimple_omp_for_combined_into_p (stmt))
10764 *handled_ops_p = false;
10765 break;
10766
10767 case GIMPLE_OMP_SCAN:
10768 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10769 return integer_zero_node;
10770 default:
10771 break;
10772 }
10773 return NULL;
10774 }
10775
10776 /* Helper function for lower_omp_for, add transformations for a worksharing
10777 loop with scan directives inside of it.
10778 For worksharing loop not combined with simd, transform:
10779 #pragma omp for reduction(inscan,+:r) private(i)
10780 for (i = 0; i < n; i = i + 1)
10781 {
10782 {
10783 update (r);
10784 }
10785 #pragma omp scan inclusive(r)
10786 {
10787 use (r);
10788 }
10789 }
10790
10791 into two worksharing loops + code to merge results:
10792
10793 num_threads = omp_get_num_threads ();
10794 thread_num = omp_get_thread_num ();
10795 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10796 <D.2099>:
10797 var2 = r;
10798 goto <D.2101>;
10799 <D.2100>:
10800 // For UDRs this is UDR init, or if ctors are needed, copy from
10801 // var3 that has been constructed to contain the neutral element.
10802 var2 = 0;
10803 <D.2101>:
10804 ivar = 0;
10805 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10806 // a shared array with num_threads elements and rprivb to a local array
10807 // number of elements equal to the number of (contiguous) iterations the
10808 // current thread will perform. controlb and controlp variables are
10809 // temporaries to handle deallocation of rprivb at the end of second
10810 // GOMP_FOR.
10811 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10812 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10813 for (i = 0; i < n; i = i + 1)
10814 {
10815 {
10816 // For UDRs this is UDR init or copy from var3.
10817 r = 0;
10818 // This is the input phase from user code.
10819 update (r);
10820 }
10821 {
10822 // For UDRs this is UDR merge.
10823 var2 = var2 + r;
10824 // Rather than handing it over to the user, save to local thread's
10825 // array.
10826 rprivb[ivar] = var2;
10827 // For exclusive scan, the above two statements are swapped.
10828 ivar = ivar + 1;
10829 }
10830 }
10831 // And remember the final value from this thread's into the shared
10832 // rpriva array.
10833 rpriva[(sizetype) thread_num] = var2;
10834 // If more than one thread, compute using Work-Efficient prefix sum
10835 // the inclusive parallel scan of the rpriva array.
10836 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10837 <D.2102>:
10838 GOMP_barrier ();
10839 down = 0;
10840 k = 1;
10841 num_threadsu = (unsigned int) num_threads;
10842 thread_numup1 = (unsigned int) thread_num + 1;
10843 <D.2108>:
10844 twok = k << 1;
10845 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10846 <D.2110>:
10847 down = 4294967295;
10848 k = k >> 1;
10849 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10850 <D.2112>:
10851 k = k >> 1;
10852 <D.2111>:
10853 twok = k << 1;
10854 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10855 mul = REALPART_EXPR <cplx>;
10856 ovf = IMAGPART_EXPR <cplx>;
10857 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10858 <D.2116>:
10859 andv = k & down;
10860 andvm1 = andv + 4294967295;
10861 l = mul + andvm1;
10862 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10863 <D.2120>:
10864 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10865 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10866 rpriva[l] = rpriva[l - k] + rpriva[l];
10867 <D.2117>:
10868 if (down == 0) goto <D.2121>; else goto <D.2122>;
10869 <D.2121>:
10870 k = k << 1;
10871 goto <D.2123>;
10872 <D.2122>:
10873 k = k >> 1;
10874 <D.2123>:
10875 GOMP_barrier ();
10876 if (k != 0) goto <D.2108>; else goto <D.2103>;
10877 <D.2103>:
10878 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10879 <D.2124>:
10880 // For UDRs this is UDR init or copy from var3.
10881 var2 = 0;
10882 goto <D.2126>;
10883 <D.2125>:
10884 var2 = rpriva[thread_num - 1];
10885 <D.2126>:
10886 ivar = 0;
10887 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10888 reduction(inscan,+:r) private(i)
10889 for (i = 0; i < n; i = i + 1)
10890 {
10891 {
10892 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10893 r = var2 + rprivb[ivar];
10894 }
10895 {
10896 // This is the scan phase from user code.
10897 use (r);
10898 // Plus a bump of the iterator.
10899 ivar = ivar + 1;
10900 }
10901 } */
10902
10903 static void
lower_omp_for_scan(gimple_seq * body_p,gimple_seq * dlist,gomp_for * stmt,struct omp_for_data * fd,omp_context * ctx)10904 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10905 struct omp_for_data *fd, omp_context *ctx)
10906 {
10907 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10908 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10909
10910 gimple_seq body = gimple_omp_body (stmt);
10911 gimple_stmt_iterator input1_gsi = gsi_none ();
10912 struct walk_stmt_info wi;
10913 memset (&wi, 0, sizeof (wi));
10914 wi.val_only = true;
10915 wi.info = (void *) &input1_gsi;
10916 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10917 gcc_assert (!gsi_end_p (input1_gsi));
10918
10919 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10920 gimple_stmt_iterator gsi = input1_gsi;
10921 gsi_next (&gsi);
10922 gimple_stmt_iterator scan1_gsi = gsi;
10923 gimple *scan_stmt1 = gsi_stmt (gsi);
10924 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10925
10926 gimple_seq input_body = gimple_omp_body (input_stmt1);
10927 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10928 gimple_omp_set_body (input_stmt1, NULL);
10929 gimple_omp_set_body (scan_stmt1, NULL);
10930 gimple_omp_set_body (stmt, NULL);
10931
10932 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10933 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10934 gimple_omp_set_body (stmt, body);
10935 gimple_omp_set_body (input_stmt1, input_body);
10936
10937 gimple_stmt_iterator input2_gsi = gsi_none ();
10938 memset (&wi, 0, sizeof (wi));
10939 wi.val_only = true;
10940 wi.info = (void *) &input2_gsi;
10941 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10942 gcc_assert (!gsi_end_p (input2_gsi));
10943
10944 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10945 gsi = input2_gsi;
10946 gsi_next (&gsi);
10947 gimple_stmt_iterator scan2_gsi = gsi;
10948 gimple *scan_stmt2 = gsi_stmt (gsi);
10949 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10950 gimple_omp_set_body (scan_stmt2, scan_body);
10951
10952 gimple_stmt_iterator input3_gsi = gsi_none ();
10953 gimple_stmt_iterator scan3_gsi = gsi_none ();
10954 gimple_stmt_iterator input4_gsi = gsi_none ();
10955 gimple_stmt_iterator scan4_gsi = gsi_none ();
10956 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10957 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10958 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10959 if (is_for_simd)
10960 {
10961 memset (&wi, 0, sizeof (wi));
10962 wi.val_only = true;
10963 wi.info = (void *) &input3_gsi;
10964 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10965 gcc_assert (!gsi_end_p (input3_gsi));
10966
10967 input_stmt3 = gsi_stmt (input3_gsi);
10968 gsi = input3_gsi;
10969 gsi_next (&gsi);
10970 scan3_gsi = gsi;
10971 scan_stmt3 = gsi_stmt (gsi);
10972 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10973
10974 memset (&wi, 0, sizeof (wi));
10975 wi.val_only = true;
10976 wi.info = (void *) &input4_gsi;
10977 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10978 gcc_assert (!gsi_end_p (input4_gsi));
10979
10980 input_stmt4 = gsi_stmt (input4_gsi);
10981 gsi = input4_gsi;
10982 gsi_next (&gsi);
10983 scan4_gsi = gsi;
10984 scan_stmt4 = gsi_stmt (gsi);
10985 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10986
10987 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10988 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10989 }
10990
10991 tree num_threads = create_tmp_var (integer_type_node);
10992 tree thread_num = create_tmp_var (integer_type_node);
10993 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10994 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10995 gimple *g = gimple_build_call (nthreads_decl, 0);
10996 gimple_call_set_lhs (g, num_threads);
10997 gimple_seq_add_stmt (body_p, g);
10998 g = gimple_build_call (threadnum_decl, 0);
10999 gimple_call_set_lhs (g, thread_num);
11000 gimple_seq_add_stmt (body_p, g);
11001
11002 tree ivar = create_tmp_var (sizetype);
11003 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
11004 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
11005 tree k = create_tmp_var (unsigned_type_node);
11006 tree l = create_tmp_var (unsigned_type_node);
11007
11008 gimple_seq clist = NULL, mdlist = NULL;
11009 gimple_seq thr01_list = NULL, thrn1_list = NULL;
11010 gimple_seq thr02_list = NULL, thrn2_list = NULL;
11011 gimple_seq scan1_list = NULL, input2_list = NULL;
11012 gimple_seq last_list = NULL, reduc_list = NULL;
11013 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11014 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
11015 && OMP_CLAUSE_REDUCTION_INSCAN (c))
11016 {
11017 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
11018 tree var = OMP_CLAUSE_DECL (c);
11019 tree new_var = lookup_decl (var, ctx);
11020 tree var3 = NULL_TREE;
11021 tree new_vard = new_var;
11022 if (omp_privatize_by_reference (var))
11023 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
11024 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11025 {
11026 var3 = maybe_lookup_decl (new_vard, ctx);
11027 if (var3 == new_vard)
11028 var3 = NULL_TREE;
11029 }
11030
11031 tree ptype = build_pointer_type (TREE_TYPE (new_var));
11032 tree rpriva = create_tmp_var (ptype);
11033 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11034 OMP_CLAUSE_DECL (nc) = rpriva;
11035 *cp1 = nc;
11036 cp1 = &OMP_CLAUSE_CHAIN (nc);
11037
11038 tree rprivb = create_tmp_var (ptype);
11039 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
11040 OMP_CLAUSE_DECL (nc) = rprivb;
11041 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
11042 *cp1 = nc;
11043 cp1 = &OMP_CLAUSE_CHAIN (nc);
11044
11045 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
11046 if (new_vard != new_var)
11047 TREE_ADDRESSABLE (var2) = 1;
11048 gimple_add_tmp_var (var2);
11049
11050 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
11051 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11052 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11053 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11054 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
11055
11056 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
11057 thread_num, integer_minus_one_node);
11058 x = fold_convert_loc (clause_loc, sizetype, x);
11059 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11060 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11061 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11062 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
11063
11064 x = fold_convert_loc (clause_loc, sizetype, l);
11065 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11066 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11067 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11068 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
11069
11070 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
11071 x = fold_convert_loc (clause_loc, sizetype, x);
11072 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
11073 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11074 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
11075 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
11076
11077 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
11078 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
11079 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
11080 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
11081
11082 tree var4 = is_for_simd ? new_var : var2;
11083 tree var5 = NULL_TREE, var6 = NULL_TREE;
11084 if (is_for_simd)
11085 {
11086 var5 = lookup_decl (var, input_simd_ctx);
11087 var6 = lookup_decl (var, scan_simd_ctx);
11088 if (new_vard != new_var)
11089 {
11090 var5 = build_simple_mem_ref_loc (clause_loc, var5);
11091 var6 = build_simple_mem_ref_loc (clause_loc, var6);
11092 }
11093 }
11094 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11095 {
11096 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11097 tree val = var2;
11098
11099 x = lang_hooks.decls.omp_clause_default_ctor
11100 (c, var2, build_outer_var_ref (var, ctx));
11101 if (x)
11102 gimplify_and_add (x, &clist);
11103
11104 x = build_outer_var_ref (var, ctx);
11105 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11106 x);
11107 gimplify_and_add (x, &thr01_list);
11108
11109 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11110 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11111 if (var3)
11112 {
11113 x = unshare_expr (var4);
11114 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11115 gimplify_and_add (x, &thrn1_list);
11116 x = unshare_expr (var4);
11117 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11118 gimplify_and_add (x, &thr02_list);
11119 }
11120 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11121 {
11122 /* Otherwise, assign to it the identity element. */
11123 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11124 tseq = copy_gimple_seq_and_replace_locals (tseq);
11125 if (!is_for_simd)
11126 {
11127 if (new_vard != new_var)
11128 val = build_fold_addr_expr_loc (clause_loc, val);
11129 SET_DECL_VALUE_EXPR (new_vard, val);
11130 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11131 }
11132 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11133 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11134 lower_omp (&tseq, ctx);
11135 gimple_seq_add_seq (&thrn1_list, tseq);
11136 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11137 lower_omp (&tseq, ctx);
11138 gimple_seq_add_seq (&thr02_list, tseq);
11139 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11140 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11141 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11142 if (y)
11143 SET_DECL_VALUE_EXPR (new_vard, y);
11144 else
11145 {
11146 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11147 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11148 }
11149 }
11150
11151 x = unshare_expr (var4);
11152 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11153 gimplify_and_add (x, &thrn2_list);
11154
11155 if (is_for_simd)
11156 {
11157 x = unshare_expr (rprivb_ref);
11158 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11159 gimplify_and_add (x, &scan1_list);
11160 }
11161 else
11162 {
11163 if (ctx->scan_exclusive)
11164 {
11165 x = unshare_expr (rprivb_ref);
11166 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11167 gimplify_and_add (x, &scan1_list);
11168 }
11169
11170 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11171 tseq = copy_gimple_seq_and_replace_locals (tseq);
11172 SET_DECL_VALUE_EXPR (placeholder, var2);
11173 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11174 lower_omp (&tseq, ctx);
11175 gimple_seq_add_seq (&scan1_list, tseq);
11176
11177 if (ctx->scan_inclusive)
11178 {
11179 x = unshare_expr (rprivb_ref);
11180 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11181 gimplify_and_add (x, &scan1_list);
11182 }
11183 }
11184
11185 x = unshare_expr (rpriva_ref);
11186 x = lang_hooks.decls.omp_clause_assign_op (c, x,
11187 unshare_expr (var4));
11188 gimplify_and_add (x, &mdlist);
11189
11190 x = unshare_expr (is_for_simd ? var6 : new_var);
11191 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11192 gimplify_and_add (x, &input2_list);
11193
11194 val = rprivb_ref;
11195 if (new_vard != new_var)
11196 val = build_fold_addr_expr_loc (clause_loc, val);
11197
11198 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11199 tseq = copy_gimple_seq_and_replace_locals (tseq);
11200 SET_DECL_VALUE_EXPR (new_vard, val);
11201 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11202 if (is_for_simd)
11203 {
11204 SET_DECL_VALUE_EXPR (placeholder, var6);
11205 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11206 }
11207 else
11208 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11209 lower_omp (&tseq, ctx);
11210 if (y)
11211 SET_DECL_VALUE_EXPR (new_vard, y);
11212 else
11213 {
11214 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11215 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11216 }
11217 if (!is_for_simd)
11218 {
11219 SET_DECL_VALUE_EXPR (placeholder, new_var);
11220 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11221 lower_omp (&tseq, ctx);
11222 }
11223 gimple_seq_add_seq (&input2_list, tseq);
11224
11225 x = build_outer_var_ref (var, ctx);
11226 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11227 gimplify_and_add (x, &last_list);
11228
11229 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11230 gimplify_and_add (x, &reduc_list);
11231 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11232 tseq = copy_gimple_seq_and_replace_locals (tseq);
11233 val = rprival_ref;
11234 if (new_vard != new_var)
11235 val = build_fold_addr_expr_loc (clause_loc, val);
11236 SET_DECL_VALUE_EXPR (new_vard, val);
11237 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11238 SET_DECL_VALUE_EXPR (placeholder, var2);
11239 lower_omp (&tseq, ctx);
11240 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11241 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11242 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11243 if (y)
11244 SET_DECL_VALUE_EXPR (new_vard, y);
11245 else
11246 {
11247 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11248 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11249 }
11250 gimple_seq_add_seq (&reduc_list, tseq);
11251 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11252 gimplify_and_add (x, &reduc_list);
11253
11254 x = lang_hooks.decls.omp_clause_dtor (c, var2);
11255 if (x)
11256 gimplify_and_add (x, dlist);
11257 }
11258 else
11259 {
11260 x = build_outer_var_ref (var, ctx);
11261 gimplify_assign (unshare_expr (var4), x, &thr01_list);
11262
11263 x = omp_reduction_init (c, TREE_TYPE (new_var));
11264 gimplify_assign (unshare_expr (var4), unshare_expr (x),
11265 &thrn1_list);
11266 gimplify_assign (unshare_expr (var4), x, &thr02_list);
11267
11268 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11269
11270 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11271 if (code == MINUS_EXPR)
11272 code = PLUS_EXPR;
11273
11274 if (is_for_simd)
11275 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11276 else
11277 {
11278 if (ctx->scan_exclusive)
11279 gimplify_assign (unshare_expr (rprivb_ref), var2,
11280 &scan1_list);
11281 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11282 gimplify_assign (var2, x, &scan1_list);
11283 if (ctx->scan_inclusive)
11284 gimplify_assign (unshare_expr (rprivb_ref), var2,
11285 &scan1_list);
11286 }
11287
11288 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11289 &mdlist);
11290
11291 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11292 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11293
11294 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11295 &last_list);
11296
11297 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11298 unshare_expr (rprival_ref));
11299 gimplify_assign (rprival_ref, x, &reduc_list);
11300 }
11301 }
11302
11303 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11304 gimple_seq_add_stmt (&scan1_list, g);
11305 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11306 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11307 ? scan_stmt4 : scan_stmt2), g);
11308
11309 tree controlb = create_tmp_var (boolean_type_node);
11310 tree controlp = create_tmp_var (ptr_type_node);
11311 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11312 OMP_CLAUSE_DECL (nc) = controlb;
11313 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11314 *cp1 = nc;
11315 cp1 = &OMP_CLAUSE_CHAIN (nc);
11316 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11317 OMP_CLAUSE_DECL (nc) = controlp;
11318 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11319 *cp1 = nc;
11320 cp1 = &OMP_CLAUSE_CHAIN (nc);
11321 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11322 OMP_CLAUSE_DECL (nc) = controlb;
11323 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11324 *cp2 = nc;
11325 cp2 = &OMP_CLAUSE_CHAIN (nc);
11326 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11327 OMP_CLAUSE_DECL (nc) = controlp;
11328 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11329 *cp2 = nc;
11330 cp2 = &OMP_CLAUSE_CHAIN (nc);
11331
11332 *cp1 = gimple_omp_for_clauses (stmt);
11333 gimple_omp_for_set_clauses (stmt, new_clauses1);
11334 *cp2 = gimple_omp_for_clauses (new_stmt);
11335 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11336
11337 if (is_for_simd)
11338 {
11339 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11340 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11341
11342 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11343 GSI_SAME_STMT);
11344 gsi_remove (&input3_gsi, true);
11345 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11346 GSI_SAME_STMT);
11347 gsi_remove (&scan3_gsi, true);
11348 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11349 GSI_SAME_STMT);
11350 gsi_remove (&input4_gsi, true);
11351 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11352 GSI_SAME_STMT);
11353 gsi_remove (&scan4_gsi, true);
11354 }
11355 else
11356 {
11357 gimple_omp_set_body (scan_stmt1, scan1_list);
11358 gimple_omp_set_body (input_stmt2, input2_list);
11359 }
11360
11361 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11362 GSI_SAME_STMT);
11363 gsi_remove (&input1_gsi, true);
11364 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11365 GSI_SAME_STMT);
11366 gsi_remove (&scan1_gsi, true);
11367 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11368 GSI_SAME_STMT);
11369 gsi_remove (&input2_gsi, true);
11370 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11371 GSI_SAME_STMT);
11372 gsi_remove (&scan2_gsi, true);
11373
11374 gimple_seq_add_seq (body_p, clist);
11375
11376 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11377 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11378 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11379 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11380 gimple_seq_add_stmt (body_p, g);
11381 g = gimple_build_label (lab1);
11382 gimple_seq_add_stmt (body_p, g);
11383 gimple_seq_add_seq (body_p, thr01_list);
11384 g = gimple_build_goto (lab3);
11385 gimple_seq_add_stmt (body_p, g);
11386 g = gimple_build_label (lab2);
11387 gimple_seq_add_stmt (body_p, g);
11388 gimple_seq_add_seq (body_p, thrn1_list);
11389 g = gimple_build_label (lab3);
11390 gimple_seq_add_stmt (body_p, g);
11391
11392 g = gimple_build_assign (ivar, size_zero_node);
11393 gimple_seq_add_stmt (body_p, g);
11394
11395 gimple_seq_add_stmt (body_p, stmt);
11396 gimple_seq_add_seq (body_p, body);
11397 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11398 fd->loop.v));
11399
11400 g = gimple_build_omp_return (true);
11401 gimple_seq_add_stmt (body_p, g);
11402 gimple_seq_add_seq (body_p, mdlist);
11403
11404 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11405 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11406 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11407 gimple_seq_add_stmt (body_p, g);
11408 g = gimple_build_label (lab1);
11409 gimple_seq_add_stmt (body_p, g);
11410
11411 g = omp_build_barrier (NULL);
11412 gimple_seq_add_stmt (body_p, g);
11413
11414 tree down = create_tmp_var (unsigned_type_node);
11415 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11416 gimple_seq_add_stmt (body_p, g);
11417
11418 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11419 gimple_seq_add_stmt (body_p, g);
11420
11421 tree num_threadsu = create_tmp_var (unsigned_type_node);
11422 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11423 gimple_seq_add_stmt (body_p, g);
11424
11425 tree thread_numu = create_tmp_var (unsigned_type_node);
11426 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11427 gimple_seq_add_stmt (body_p, g);
11428
11429 tree thread_nump1 = create_tmp_var (unsigned_type_node);
11430 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11431 build_int_cst (unsigned_type_node, 1));
11432 gimple_seq_add_stmt (body_p, g);
11433
11434 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11435 g = gimple_build_label (lab3);
11436 gimple_seq_add_stmt (body_p, g);
11437
11438 tree twok = create_tmp_var (unsigned_type_node);
11439 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11440 gimple_seq_add_stmt (body_p, g);
11441
11442 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11443 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11444 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11445 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11446 gimple_seq_add_stmt (body_p, g);
11447 g = gimple_build_label (lab4);
11448 gimple_seq_add_stmt (body_p, g);
11449 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11450 gimple_seq_add_stmt (body_p, g);
11451 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11452 gimple_seq_add_stmt (body_p, g);
11453
11454 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11455 gimple_seq_add_stmt (body_p, g);
11456 g = gimple_build_label (lab6);
11457 gimple_seq_add_stmt (body_p, g);
11458
11459 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11460 gimple_seq_add_stmt (body_p, g);
11461
11462 g = gimple_build_label (lab5);
11463 gimple_seq_add_stmt (body_p, g);
11464
11465 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11466 gimple_seq_add_stmt (body_p, g);
11467
11468 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11469 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11470 gimple_call_set_lhs (g, cplx);
11471 gimple_seq_add_stmt (body_p, g);
11472 tree mul = create_tmp_var (unsigned_type_node);
11473 g = gimple_build_assign (mul, REALPART_EXPR,
11474 build1 (REALPART_EXPR, unsigned_type_node, cplx));
11475 gimple_seq_add_stmt (body_p, g);
11476 tree ovf = create_tmp_var (unsigned_type_node);
11477 g = gimple_build_assign (ovf, IMAGPART_EXPR,
11478 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11479 gimple_seq_add_stmt (body_p, g);
11480
11481 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11482 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11483 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11484 lab7, lab8);
11485 gimple_seq_add_stmt (body_p, g);
11486 g = gimple_build_label (lab7);
11487 gimple_seq_add_stmt (body_p, g);
11488
11489 tree andv = create_tmp_var (unsigned_type_node);
11490 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11491 gimple_seq_add_stmt (body_p, g);
11492 tree andvm1 = create_tmp_var (unsigned_type_node);
11493 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11494 build_minus_one_cst (unsigned_type_node));
11495 gimple_seq_add_stmt (body_p, g);
11496
11497 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11498 gimple_seq_add_stmt (body_p, g);
11499
11500 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11501 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11502 gimple_seq_add_stmt (body_p, g);
11503 g = gimple_build_label (lab9);
11504 gimple_seq_add_stmt (body_p, g);
11505 gimple_seq_add_seq (body_p, reduc_list);
11506 g = gimple_build_label (lab8);
11507 gimple_seq_add_stmt (body_p, g);
11508
11509 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11510 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11511 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11512 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11513 lab10, lab11);
11514 gimple_seq_add_stmt (body_p, g);
11515 g = gimple_build_label (lab10);
11516 gimple_seq_add_stmt (body_p, g);
11517 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11518 gimple_seq_add_stmt (body_p, g);
11519 g = gimple_build_goto (lab12);
11520 gimple_seq_add_stmt (body_p, g);
11521 g = gimple_build_label (lab11);
11522 gimple_seq_add_stmt (body_p, g);
11523 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11524 gimple_seq_add_stmt (body_p, g);
11525 g = gimple_build_label (lab12);
11526 gimple_seq_add_stmt (body_p, g);
11527
11528 g = omp_build_barrier (NULL);
11529 gimple_seq_add_stmt (body_p, g);
11530
11531 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11532 lab3, lab2);
11533 gimple_seq_add_stmt (body_p, g);
11534
11535 g = gimple_build_label (lab2);
11536 gimple_seq_add_stmt (body_p, g);
11537
11538 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11539 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11540 lab3 = create_artificial_label (UNKNOWN_LOCATION);
11541 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11542 gimple_seq_add_stmt (body_p, g);
11543 g = gimple_build_label (lab1);
11544 gimple_seq_add_stmt (body_p, g);
11545 gimple_seq_add_seq (body_p, thr02_list);
11546 g = gimple_build_goto (lab3);
11547 gimple_seq_add_stmt (body_p, g);
11548 g = gimple_build_label (lab2);
11549 gimple_seq_add_stmt (body_p, g);
11550 gimple_seq_add_seq (body_p, thrn2_list);
11551 g = gimple_build_label (lab3);
11552 gimple_seq_add_stmt (body_p, g);
11553
11554 g = gimple_build_assign (ivar, size_zero_node);
11555 gimple_seq_add_stmt (body_p, g);
11556 gimple_seq_add_stmt (body_p, new_stmt);
11557 gimple_seq_add_seq (body_p, new_body);
11558
11559 gimple_seq new_dlist = NULL;
11560 lab1 = create_artificial_label (UNKNOWN_LOCATION);
11561 lab2 = create_artificial_label (UNKNOWN_LOCATION);
11562 tree num_threadsm1 = create_tmp_var (integer_type_node);
11563 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11564 integer_minus_one_node);
11565 gimple_seq_add_stmt (&new_dlist, g);
11566 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11567 gimple_seq_add_stmt (&new_dlist, g);
11568 g = gimple_build_label (lab1);
11569 gimple_seq_add_stmt (&new_dlist, g);
11570 gimple_seq_add_seq (&new_dlist, last_list);
11571 g = gimple_build_label (lab2);
11572 gimple_seq_add_stmt (&new_dlist, g);
11573 gimple_seq_add_seq (&new_dlist, *dlist);
11574 *dlist = new_dlist;
11575 }
11576
11577 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11578 the addresses of variables to be made private at the surrounding
11579 parallelism level. Such functions appear in the gimple code stream in two
11580 forms, e.g. for a partitioned loop:
11581
11582 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11583 .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11584 .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11585 .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11586
11587 or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11588 not as part of a HEAD_MARK sequence:
11589
11590 .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11591
11592 For such stand-alone appearances, the 3rd argument is always 0, denoting
11593 gang partitioning. */
11594
11595 static gcall *
lower_oacc_private_marker(omp_context * ctx)11596 lower_oacc_private_marker (omp_context *ctx)
11597 {
11598 if (ctx->oacc_privatization_candidates.length () == 0)
11599 return NULL;
11600
11601 auto_vec<tree, 5> args;
11602
11603 args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11604 args.quick_push (integer_zero_node);
11605 args.quick_push (integer_minus_one_node);
11606
11607 int i;
11608 tree decl;
11609 FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11610 {
11611 gcc_checking_assert (TREE_ADDRESSABLE (decl));
11612 tree addr = build_fold_addr_expr (decl);
11613 args.safe_push (addr);
11614 }
11615
11616 return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11617 }
11618
11619 /* Lower code for an OMP loop directive. */
11620
11621 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)11622 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11623 {
11624 tree *rhs_p, block;
11625 struct omp_for_data fd, *fdp = NULL;
11626 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11627 gbind *new_stmt;
11628 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11629 gimple_seq cnt_list = NULL, clist = NULL;
11630 gimple_seq oacc_head = NULL, oacc_tail = NULL;
11631 size_t i;
11632
11633 push_gimplify_context ();
11634
11635 if (is_gimple_omp_oacc (ctx->stmt))
11636 oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11637
11638 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11639
11640 block = make_node (BLOCK);
11641 new_stmt = gimple_build_bind (NULL, NULL, block);
11642 /* Replace at gsi right away, so that 'stmt' is no member
11643 of a sequence anymore as we're going to add to a different
11644 one below. */
11645 gsi_replace (gsi_p, new_stmt, true);
11646
11647 /* Move declaration of temporaries in the loop body before we make
11648 it go away. */
11649 omp_for_body = gimple_omp_body (stmt);
11650 if (!gimple_seq_empty_p (omp_for_body)
11651 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11652 {
11653 gbind *inner_bind
11654 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11655 tree vars = gimple_bind_vars (inner_bind);
11656 if (is_gimple_omp_oacc (ctx->stmt))
11657 oacc_privatization_scan_decl_chain (ctx, vars);
11658 gimple_bind_append_vars (new_stmt, vars);
11659 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11660 keep them on the inner_bind and it's block. */
11661 gimple_bind_set_vars (inner_bind, NULL_TREE);
11662 if (gimple_bind_block (inner_bind))
11663 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11664 }
11665
11666 if (gimple_omp_for_combined_into_p (stmt))
11667 {
11668 omp_extract_for_data (stmt, &fd, NULL);
11669 fdp = &fd;
11670
11671 /* We need two temporaries with fd.loop.v type (istart/iend)
11672 and then (fd.collapse - 1) temporaries with the same
11673 type for count2 ... countN-1 vars if not constant. */
11674 size_t count = 2;
11675 tree type = fd.iter_type;
11676 if (fd.collapse > 1
11677 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11678 count += fd.collapse - 1;
11679 size_t count2 = 0;
11680 tree type2 = NULL_TREE;
11681 bool taskreg_for
11682 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11683 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11684 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11685 tree simtc = NULL;
11686 tree clauses = *pc;
11687 if (fd.collapse > 1
11688 && fd.non_rect
11689 && fd.last_nonrect == fd.first_nonrect + 1
11690 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11691 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11692 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11693 {
11694 v = gimple_omp_for_index (stmt, fd.first_nonrect);
11695 type2 = TREE_TYPE (v);
11696 count++;
11697 count2 = 3;
11698 }
11699 if (taskreg_for)
11700 outerc
11701 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11702 OMP_CLAUSE__LOOPTEMP_);
11703 if (ctx->simt_stmt)
11704 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11705 OMP_CLAUSE__LOOPTEMP_);
11706 for (i = 0; i < count + count2; i++)
11707 {
11708 tree temp;
11709 if (taskreg_for)
11710 {
11711 gcc_assert (outerc);
11712 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11713 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11714 OMP_CLAUSE__LOOPTEMP_);
11715 }
11716 else
11717 {
11718 /* If there are 2 adjacent SIMD stmts, one with _simt_
11719 clause, another without, make sure they have the same
11720 decls in _looptemp_ clauses, because the outer stmt
11721 they are combined into will look up just one inner_stmt. */
11722 if (ctx->simt_stmt)
11723 temp = OMP_CLAUSE_DECL (simtc);
11724 else
11725 temp = create_tmp_var (i >= count ? type2 : type);
11726 insert_decl_map (&ctx->outer->cb, temp, temp);
11727 }
11728 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11729 OMP_CLAUSE_DECL (*pc) = temp;
11730 pc = &OMP_CLAUSE_CHAIN (*pc);
11731 if (ctx->simt_stmt)
11732 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11733 OMP_CLAUSE__LOOPTEMP_);
11734 }
11735 *pc = clauses;
11736 }
11737
11738 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11739 dlist = NULL;
11740 body = NULL;
11741 tree rclauses
11742 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11743 OMP_CLAUSE_REDUCTION);
11744 tree rtmp = NULL_TREE;
11745 if (rclauses)
11746 {
11747 tree type = build_pointer_type (pointer_sized_int_node);
11748 tree temp = create_tmp_var (type);
11749 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11750 OMP_CLAUSE_DECL (c) = temp;
11751 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11752 gimple_omp_for_set_clauses (stmt, c);
11753 lower_omp_task_reductions (ctx, OMP_FOR,
11754 gimple_omp_for_clauses (stmt),
11755 &tred_ilist, &tred_dlist);
11756 rclauses = c;
11757 rtmp = make_ssa_name (type);
11758 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11759 }
11760
11761 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11762 ctx);
11763
11764 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11765 fdp);
11766 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11767 gimple_omp_for_pre_body (stmt));
11768
11769 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11770
11771 gcall *private_marker = NULL;
11772 if (is_gimple_omp_oacc (ctx->stmt)
11773 && !gimple_seq_empty_p (omp_for_body))
11774 private_marker = lower_oacc_private_marker (ctx);
11775
11776 /* Lower the header expressions. At this point, we can assume that
11777 the header is of the form:
11778
11779 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11780
11781 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11782 using the .omp_data_s mapping, if needed. */
11783 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11784 {
11785 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11786 if (TREE_CODE (*rhs_p) == TREE_VEC)
11787 {
11788 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11789 TREE_VEC_ELT (*rhs_p, 1)
11790 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11791 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11792 TREE_VEC_ELT (*rhs_p, 2)
11793 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11794 }
11795 else if (!is_gimple_min_invariant (*rhs_p))
11796 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11797 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11798 recompute_tree_invariant_for_addr_expr (*rhs_p);
11799
11800 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11801 if (TREE_CODE (*rhs_p) == TREE_VEC)
11802 {
11803 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11804 TREE_VEC_ELT (*rhs_p, 1)
11805 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11806 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11807 TREE_VEC_ELT (*rhs_p, 2)
11808 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11809 }
11810 else if (!is_gimple_min_invariant (*rhs_p))
11811 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11812 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11813 recompute_tree_invariant_for_addr_expr (*rhs_p);
11814
11815 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11816 if (!is_gimple_min_invariant (*rhs_p))
11817 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11818 }
11819 if (rclauses)
11820 gimple_seq_add_seq (&tred_ilist, cnt_list);
11821 else
11822 gimple_seq_add_seq (&body, cnt_list);
11823
11824 /* Once lowered, extract the bounds and clauses. */
11825 omp_extract_for_data (stmt, &fd, NULL);
11826
11827 if (is_gimple_omp_oacc (ctx->stmt)
11828 && !ctx_in_oacc_kernels_region (ctx))
11829 lower_oacc_head_tail (gimple_location (stmt),
11830 gimple_omp_for_clauses (stmt), private_marker,
11831 &oacc_head, &oacc_tail, ctx);
11832
11833 /* Add OpenACC partitioning and reduction markers just before the loop. */
11834 if (oacc_head)
11835 gimple_seq_add_seq (&body, oacc_head);
11836
11837 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11838
11839 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11840 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11841 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11842 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11843 {
11844 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11845 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11846 OMP_CLAUSE_LINEAR_STEP (c)
11847 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11848 ctx);
11849 }
11850
11851 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11852 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11853 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11854 else
11855 {
11856 gimple_seq_add_stmt (&body, stmt);
11857 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11858 }
11859
11860 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11861 fd.loop.v));
11862
11863 /* After the loop, add exit clauses. */
11864 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11865
11866 if (clist)
11867 {
11868 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11869 gcall *g = gimple_build_call (fndecl, 0);
11870 gimple_seq_add_stmt (&body, g);
11871 gimple_seq_add_seq (&body, clist);
11872 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11873 g = gimple_build_call (fndecl, 0);
11874 gimple_seq_add_stmt (&body, g);
11875 }
11876
11877 if (ctx->cancellable)
11878 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11879
11880 gimple_seq_add_seq (&body, dlist);
11881
11882 if (rclauses)
11883 {
11884 gimple_seq_add_seq (&tred_ilist, body);
11885 body = tred_ilist;
11886 }
11887
11888 body = maybe_catch_exception (body);
11889
11890 /* Region exit marker goes at the end of the loop body. */
11891 gimple *g = gimple_build_omp_return (fd.have_nowait);
11892 gimple_seq_add_stmt (&body, g);
11893
11894 gimple_seq_add_seq (&body, tred_dlist);
11895
11896 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11897
11898 if (rclauses)
11899 OMP_CLAUSE_DECL (rclauses) = rtmp;
11900
11901 /* Add OpenACC joining and reduction markers just after the loop. */
11902 if (oacc_tail)
11903 gimple_seq_add_seq (&body, oacc_tail);
11904
11905 pop_gimplify_context (new_stmt);
11906
11907 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11908 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11909 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11910 if (BLOCK_VARS (block))
11911 TREE_USED (block) = 1;
11912
11913 gimple_bind_set_body (new_stmt, body);
11914 gimple_omp_set_body (stmt, NULL);
11915 gimple_omp_for_set_pre_body (stmt, NULL);
11916 }
11917
11918 /* Callback for walk_stmts. Check if the current statement only contains
11919 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11920
11921 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)11922 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11923 bool *handled_ops_p,
11924 struct walk_stmt_info *wi)
11925 {
11926 int *info = (int *) wi->info;
11927 gimple *stmt = gsi_stmt (*gsi_p);
11928
11929 *handled_ops_p = true;
11930 switch (gimple_code (stmt))
11931 {
11932 WALK_SUBSTMTS;
11933
11934 case GIMPLE_DEBUG:
11935 break;
11936 case GIMPLE_OMP_FOR:
11937 case GIMPLE_OMP_SECTIONS:
11938 *info = *info == 0 ? 1 : -1;
11939 break;
11940 default:
11941 *info = -1;
11942 break;
11943 }
11944 return NULL;
11945 }
11946
11947 struct omp_taskcopy_context
11948 {
11949 /* This field must be at the beginning, as we do "inheritance": Some
11950 callback functions for tree-inline.cc (e.g., omp_copy_decl)
11951 receive a copy_body_data pointer that is up-casted to an
11952 omp_context pointer. */
11953 copy_body_data cb;
11954 omp_context *ctx;
11955 };
11956
11957 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)11958 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11959 {
11960 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11961
11962 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11963 return create_tmp_var (TREE_TYPE (var));
11964
11965 return var;
11966 }
11967
11968 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)11969 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11970 {
11971 tree name, new_fields = NULL, type, f;
11972
11973 type = lang_hooks.types.make_type (RECORD_TYPE);
11974 name = DECL_NAME (TYPE_NAME (orig_type));
11975 name = build_decl (gimple_location (tcctx->ctx->stmt),
11976 TYPE_DECL, name, type);
11977 TYPE_NAME (type) = name;
11978
11979 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11980 {
11981 tree new_f = copy_node (f);
11982 DECL_CONTEXT (new_f) = type;
11983 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11984 TREE_CHAIN (new_f) = new_fields;
11985 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11986 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11987 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11988 &tcctx->cb, NULL);
11989 new_fields = new_f;
11990 tcctx->cb.decl_map->put (f, new_f);
11991 }
11992 TYPE_FIELDS (type) = nreverse (new_fields);
11993 layout_type (type);
11994 return type;
11995 }
11996
11997 /* Create task copyfn. */
11998
11999 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)12000 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
12001 {
12002 struct function *child_cfun;
12003 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
12004 tree record_type, srecord_type, bind, list;
12005 bool record_needs_remap = false, srecord_needs_remap = false;
12006 splay_tree_node n;
12007 struct omp_taskcopy_context tcctx;
12008 location_t loc = gimple_location (task_stmt);
12009 size_t looptempno = 0;
12010
12011 child_fn = gimple_omp_task_copy_fn (task_stmt);
12012 task_cpyfns.safe_push (task_stmt);
12013 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
12014 gcc_assert (child_cfun->cfg == NULL);
12015 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
12016
12017 /* Reset DECL_CONTEXT on function arguments. */
12018 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
12019 DECL_CONTEXT (t) = child_fn;
12020
12021 /* Populate the function. */
12022 push_gimplify_context ();
12023 push_cfun (child_cfun);
12024
12025 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
12026 TREE_SIDE_EFFECTS (bind) = 1;
12027 list = NULL;
12028 DECL_SAVED_TREE (child_fn) = bind;
12029 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
12030
12031 /* Remap src and dst argument types if needed. */
12032 record_type = ctx->record_type;
12033 srecord_type = ctx->srecord_type;
12034 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
12035 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12036 {
12037 record_needs_remap = true;
12038 break;
12039 }
12040 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
12041 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
12042 {
12043 srecord_needs_remap = true;
12044 break;
12045 }
12046
12047 if (record_needs_remap || srecord_needs_remap)
12048 {
12049 memset (&tcctx, '\0', sizeof (tcctx));
12050 tcctx.cb.src_fn = ctx->cb.src_fn;
12051 tcctx.cb.dst_fn = child_fn;
12052 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
12053 gcc_checking_assert (tcctx.cb.src_node);
12054 tcctx.cb.dst_node = tcctx.cb.src_node;
12055 tcctx.cb.src_cfun = ctx->cb.src_cfun;
12056 tcctx.cb.copy_decl = task_copyfn_copy_decl;
12057 tcctx.cb.eh_lp_nr = 0;
12058 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
12059 tcctx.cb.decl_map = new hash_map<tree, tree>;
12060 tcctx.ctx = ctx;
12061
12062 if (record_needs_remap)
12063 record_type = task_copyfn_remap_type (&tcctx, record_type);
12064 if (srecord_needs_remap)
12065 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
12066 }
12067 else
12068 tcctx.cb.decl_map = NULL;
12069
12070 arg = DECL_ARGUMENTS (child_fn);
12071 TREE_TYPE (arg) = build_pointer_type (record_type);
12072 sarg = DECL_CHAIN (arg);
12073 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
12074
12075 /* First pass: initialize temporaries used in record_type and srecord_type
12076 sizes and field offsets. */
12077 if (tcctx.cb.decl_map)
12078 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12079 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12080 {
12081 tree *p;
12082
12083 decl = OMP_CLAUSE_DECL (c);
12084 p = tcctx.cb.decl_map->get (decl);
12085 if (p == NULL)
12086 continue;
12087 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12088 sf = (tree) n->value;
12089 sf = *tcctx.cb.decl_map->get (sf);
12090 src = build_simple_mem_ref_loc (loc, sarg);
12091 src = omp_build_component_ref (src, sf);
12092 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12093 append_to_statement_list (t, &list);
12094 }
12095
12096 /* Second pass: copy shared var pointers and copy construct non-VLA
12097 firstprivate vars. */
12098 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12099 switch (OMP_CLAUSE_CODE (c))
12100 {
12101 splay_tree_key key;
12102 case OMP_CLAUSE_SHARED:
12103 decl = OMP_CLAUSE_DECL (c);
12104 key = (splay_tree_key) decl;
12105 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12106 key = (splay_tree_key) &DECL_UID (decl);
12107 n = splay_tree_lookup (ctx->field_map, key);
12108 if (n == NULL)
12109 break;
12110 f = (tree) n->value;
12111 if (tcctx.cb.decl_map)
12112 f = *tcctx.cb.decl_map->get (f);
12113 n = splay_tree_lookup (ctx->sfield_map, key);
12114 sf = (tree) n->value;
12115 if (tcctx.cb.decl_map)
12116 sf = *tcctx.cb.decl_map->get (sf);
12117 src = build_simple_mem_ref_loc (loc, sarg);
12118 src = omp_build_component_ref (src, sf);
12119 dst = build_simple_mem_ref_loc (loc, arg);
12120 dst = omp_build_component_ref (dst, f);
12121 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12122 append_to_statement_list (t, &list);
12123 break;
12124 case OMP_CLAUSE_REDUCTION:
12125 case OMP_CLAUSE_IN_REDUCTION:
12126 decl = OMP_CLAUSE_DECL (c);
12127 if (TREE_CODE (decl) == MEM_REF)
12128 {
12129 decl = TREE_OPERAND (decl, 0);
12130 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12131 decl = TREE_OPERAND (decl, 0);
12132 if (TREE_CODE (decl) == INDIRECT_REF
12133 || TREE_CODE (decl) == ADDR_EXPR)
12134 decl = TREE_OPERAND (decl, 0);
12135 }
12136 key = (splay_tree_key) decl;
12137 n = splay_tree_lookup (ctx->field_map, key);
12138 if (n == NULL)
12139 break;
12140 f = (tree) n->value;
12141 if (tcctx.cb.decl_map)
12142 f = *tcctx.cb.decl_map->get (f);
12143 n = splay_tree_lookup (ctx->sfield_map, key);
12144 sf = (tree) n->value;
12145 if (tcctx.cb.decl_map)
12146 sf = *tcctx.cb.decl_map->get (sf);
12147 src = build_simple_mem_ref_loc (loc, sarg);
12148 src = omp_build_component_ref (src, sf);
12149 if (decl != OMP_CLAUSE_DECL (c)
12150 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12151 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12152 src = build_simple_mem_ref_loc (loc, src);
12153 dst = build_simple_mem_ref_loc (loc, arg);
12154 dst = omp_build_component_ref (dst, f);
12155 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12156 append_to_statement_list (t, &list);
12157 break;
12158 case OMP_CLAUSE__LOOPTEMP_:
12159 /* Fields for first two _looptemp_ clauses are initialized by
12160 GOMP_taskloop*, the rest are handled like firstprivate. */
12161 if (looptempno < 2)
12162 {
12163 looptempno++;
12164 break;
12165 }
12166 /* FALLTHRU */
12167 case OMP_CLAUSE__REDUCTEMP_:
12168 case OMP_CLAUSE_FIRSTPRIVATE:
12169 decl = OMP_CLAUSE_DECL (c);
12170 if (is_variable_sized (decl))
12171 break;
12172 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12173 if (n == NULL)
12174 break;
12175 f = (tree) n->value;
12176 if (tcctx.cb.decl_map)
12177 f = *tcctx.cb.decl_map->get (f);
12178 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12179 if (n != NULL)
12180 {
12181 sf = (tree) n->value;
12182 if (tcctx.cb.decl_map)
12183 sf = *tcctx.cb.decl_map->get (sf);
12184 src = build_simple_mem_ref_loc (loc, sarg);
12185 src = omp_build_component_ref (src, sf);
12186 if (use_pointer_for_field (decl, NULL)
12187 || omp_privatize_by_reference (decl))
12188 src = build_simple_mem_ref_loc (loc, src);
12189 }
12190 else
12191 src = decl;
12192 dst = build_simple_mem_ref_loc (loc, arg);
12193 dst = omp_build_component_ref (dst, f);
12194 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12195 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12196 else
12197 {
12198 if (ctx->allocate_map)
12199 if (tree *allocatorp = ctx->allocate_map->get (decl))
12200 {
12201 tree allocator = *allocatorp;
12202 HOST_WIDE_INT ialign = 0;
12203 if (TREE_CODE (allocator) == TREE_LIST)
12204 {
12205 ialign = tree_to_uhwi (TREE_VALUE (allocator));
12206 allocator = TREE_PURPOSE (allocator);
12207 }
12208 if (TREE_CODE (allocator) != INTEGER_CST)
12209 {
12210 n = splay_tree_lookup (ctx->sfield_map,
12211 (splay_tree_key) allocator);
12212 allocator = (tree) n->value;
12213 if (tcctx.cb.decl_map)
12214 allocator = *tcctx.cb.decl_map->get (allocator);
12215 tree a = build_simple_mem_ref_loc (loc, sarg);
12216 allocator = omp_build_component_ref (a, allocator);
12217 }
12218 allocator = fold_convert (pointer_sized_int_node, allocator);
12219 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12220 tree align = build_int_cst (size_type_node,
12221 MAX (ialign,
12222 DECL_ALIGN_UNIT (decl)));
12223 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12224 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12225 allocator);
12226 ptr = fold_convert (TREE_TYPE (dst), ptr);
12227 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12228 append_to_statement_list (t, &list);
12229 dst = build_simple_mem_ref_loc (loc, dst);
12230 }
12231 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12232 }
12233 append_to_statement_list (t, &list);
12234 break;
12235 case OMP_CLAUSE_PRIVATE:
12236 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12237 break;
12238 decl = OMP_CLAUSE_DECL (c);
12239 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12240 f = (tree) n->value;
12241 if (tcctx.cb.decl_map)
12242 f = *tcctx.cb.decl_map->get (f);
12243 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12244 if (n != NULL)
12245 {
12246 sf = (tree) n->value;
12247 if (tcctx.cb.decl_map)
12248 sf = *tcctx.cb.decl_map->get (sf);
12249 src = build_simple_mem_ref_loc (loc, sarg);
12250 src = omp_build_component_ref (src, sf);
12251 if (use_pointer_for_field (decl, NULL))
12252 src = build_simple_mem_ref_loc (loc, src);
12253 }
12254 else
12255 src = decl;
12256 dst = build_simple_mem_ref_loc (loc, arg);
12257 dst = omp_build_component_ref (dst, f);
12258 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12259 append_to_statement_list (t, &list);
12260 break;
12261 default:
12262 break;
12263 }
12264
12265 /* Last pass: handle VLA firstprivates. */
12266 if (tcctx.cb.decl_map)
12267 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12268 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12269 {
12270 tree ind, ptr, df;
12271
12272 decl = OMP_CLAUSE_DECL (c);
12273 if (!is_variable_sized (decl))
12274 continue;
12275 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12276 if (n == NULL)
12277 continue;
12278 f = (tree) n->value;
12279 f = *tcctx.cb.decl_map->get (f);
12280 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12281 ind = DECL_VALUE_EXPR (decl);
12282 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12283 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12284 n = splay_tree_lookup (ctx->sfield_map,
12285 (splay_tree_key) TREE_OPERAND (ind, 0));
12286 sf = (tree) n->value;
12287 sf = *tcctx.cb.decl_map->get (sf);
12288 src = build_simple_mem_ref_loc (loc, sarg);
12289 src = omp_build_component_ref (src, sf);
12290 src = build_simple_mem_ref_loc (loc, src);
12291 dst = build_simple_mem_ref_loc (loc, arg);
12292 dst = omp_build_component_ref (dst, f);
12293 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12294 append_to_statement_list (t, &list);
12295 n = splay_tree_lookup (ctx->field_map,
12296 (splay_tree_key) TREE_OPERAND (ind, 0));
12297 df = (tree) n->value;
12298 df = *tcctx.cb.decl_map->get (df);
12299 ptr = build_simple_mem_ref_loc (loc, arg);
12300 ptr = omp_build_component_ref (ptr, df);
12301 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12302 build_fold_addr_expr_loc (loc, dst));
12303 append_to_statement_list (t, &list);
12304 }
12305
12306 t = build1 (RETURN_EXPR, void_type_node, NULL);
12307 append_to_statement_list (t, &list);
12308
12309 if (tcctx.cb.decl_map)
12310 delete tcctx.cb.decl_map;
12311 pop_gimplify_context (NULL);
12312 BIND_EXPR_BODY (bind) = list;
12313 pop_cfun ();
12314 }
12315
12316 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)12317 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12318 {
12319 tree c, clauses;
12320 gimple *g;
12321 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12322
12323 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12324 gcc_assert (clauses);
12325 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12326 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12327 switch (OMP_CLAUSE_DEPEND_KIND (c))
12328 {
12329 case OMP_CLAUSE_DEPEND_LAST:
12330 /* Lowering already done at gimplification. */
12331 return;
12332 case OMP_CLAUSE_DEPEND_IN:
12333 cnt[2]++;
12334 break;
12335 case OMP_CLAUSE_DEPEND_OUT:
12336 case OMP_CLAUSE_DEPEND_INOUT:
12337 cnt[0]++;
12338 break;
12339 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12340 cnt[1]++;
12341 break;
12342 case OMP_CLAUSE_DEPEND_DEPOBJ:
12343 cnt[3]++;
12344 break;
12345 case OMP_CLAUSE_DEPEND_SOURCE:
12346 case OMP_CLAUSE_DEPEND_SINK:
12347 /* FALLTHRU */
12348 default:
12349 gcc_unreachable ();
12350 }
12351 if (cnt[1] || cnt[3])
12352 idx = 5;
12353 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12354 tree type = build_array_type_nelts (ptr_type_node, total + idx);
12355 tree array = create_tmp_var (type);
12356 TREE_ADDRESSABLE (array) = 1;
12357 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12358 NULL_TREE);
12359 if (idx == 5)
12360 {
12361 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12362 gimple_seq_add_stmt (iseq, g);
12363 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12364 NULL_TREE);
12365 }
12366 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12367 gimple_seq_add_stmt (iseq, g);
12368 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12369 {
12370 r = build4 (ARRAY_REF, ptr_type_node, array,
12371 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12372 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12373 gimple_seq_add_stmt (iseq, g);
12374 }
12375 for (i = 0; i < 4; i++)
12376 {
12377 if (cnt[i] == 0)
12378 continue;
12379 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12380 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12381 continue;
12382 else
12383 {
12384 switch (OMP_CLAUSE_DEPEND_KIND (c))
12385 {
12386 case OMP_CLAUSE_DEPEND_IN:
12387 if (i != 2)
12388 continue;
12389 break;
12390 case OMP_CLAUSE_DEPEND_OUT:
12391 case OMP_CLAUSE_DEPEND_INOUT:
12392 if (i != 0)
12393 continue;
12394 break;
12395 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12396 if (i != 1)
12397 continue;
12398 break;
12399 case OMP_CLAUSE_DEPEND_DEPOBJ:
12400 if (i != 3)
12401 continue;
12402 break;
12403 default:
12404 gcc_unreachable ();
12405 }
12406 tree t = OMP_CLAUSE_DECL (c);
12407 t = fold_convert (ptr_type_node, t);
12408 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12409 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12410 NULL_TREE, NULL_TREE);
12411 g = gimple_build_assign (r, t);
12412 gimple_seq_add_stmt (iseq, g);
12413 }
12414 }
12415 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12416 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12417 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12418 OMP_CLAUSE_CHAIN (c) = *pclauses;
12419 *pclauses = c;
12420 tree clobber = build_clobber (type);
12421 g = gimple_build_assign (array, clobber);
12422 gimple_seq_add_stmt (oseq, g);
12423 }
12424
12425 /* Lower the OpenMP parallel or task directive in the current statement
12426 in GSI_P. CTX holds context information for the directive. */
12427
12428 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)12429 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12430 {
12431 tree clauses;
12432 tree child_fn, t;
12433 gimple *stmt = gsi_stmt (*gsi_p);
12434 gbind *par_bind, *bind, *dep_bind = NULL;
12435 gimple_seq par_body;
12436 location_t loc = gimple_location (stmt);
12437
12438 clauses = gimple_omp_taskreg_clauses (stmt);
12439 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12440 && gimple_omp_task_taskwait_p (stmt))
12441 {
12442 par_bind = NULL;
12443 par_body = NULL;
12444 }
12445 else
12446 {
12447 par_bind
12448 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12449 par_body = gimple_bind_body (par_bind);
12450 }
12451 child_fn = ctx->cb.dst_fn;
12452 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12453 && !gimple_omp_parallel_combined_p (stmt))
12454 {
12455 struct walk_stmt_info wi;
12456 int ws_num = 0;
12457
12458 memset (&wi, 0, sizeof (wi));
12459 wi.info = &ws_num;
12460 wi.val_only = true;
12461 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12462 if (ws_num == 1)
12463 gimple_omp_parallel_set_combined_p (stmt, true);
12464 }
12465 gimple_seq dep_ilist = NULL;
12466 gimple_seq dep_olist = NULL;
12467 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12468 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12469 {
12470 push_gimplify_context ();
12471 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12472 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12473 &dep_ilist, &dep_olist);
12474 }
12475
12476 if (gimple_code (stmt) == GIMPLE_OMP_TASK
12477 && gimple_omp_task_taskwait_p (stmt))
12478 {
12479 if (dep_bind)
12480 {
12481 gsi_replace (gsi_p, dep_bind, true);
12482 gimple_bind_add_seq (dep_bind, dep_ilist);
12483 gimple_bind_add_stmt (dep_bind, stmt);
12484 gimple_bind_add_seq (dep_bind, dep_olist);
12485 pop_gimplify_context (dep_bind);
12486 }
12487 return;
12488 }
12489
12490 if (ctx->srecord_type)
12491 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12492
12493 gimple_seq tskred_ilist = NULL;
12494 gimple_seq tskred_olist = NULL;
12495 if ((is_task_ctx (ctx)
12496 && gimple_omp_task_taskloop_p (ctx->stmt)
12497 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12498 OMP_CLAUSE_REDUCTION))
12499 || (is_parallel_ctx (ctx)
12500 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12501 OMP_CLAUSE__REDUCTEMP_)))
12502 {
12503 if (dep_bind == NULL)
12504 {
12505 push_gimplify_context ();
12506 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12507 }
12508 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12509 : OMP_PARALLEL,
12510 gimple_omp_taskreg_clauses (ctx->stmt),
12511 &tskred_ilist, &tskred_olist);
12512 }
12513
12514 push_gimplify_context ();
12515
12516 gimple_seq par_olist = NULL;
12517 gimple_seq par_ilist = NULL;
12518 gimple_seq par_rlist = NULL;
12519 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12520 lower_omp (&par_body, ctx);
12521 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12522 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12523
12524 /* Declare all the variables created by mapping and the variables
12525 declared in the scope of the parallel body. */
12526 record_vars_into (ctx->block_vars, child_fn);
12527 maybe_remove_omp_member_access_dummy_vars (par_bind);
12528 record_vars_into (gimple_bind_vars (par_bind), child_fn);
12529
12530 if (ctx->record_type)
12531 {
12532 ctx->sender_decl
12533 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12534 : ctx->record_type, ".omp_data_o");
12535 DECL_NAMELESS (ctx->sender_decl) = 1;
12536 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12537 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12538 }
12539
12540 gimple_seq olist = NULL;
12541 gimple_seq ilist = NULL;
12542 lower_send_clauses (clauses, &ilist, &olist, ctx);
12543 lower_send_shared_vars (&ilist, &olist, ctx);
12544
12545 if (ctx->record_type)
12546 {
12547 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12548 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12549 clobber));
12550 }
12551
12552 /* Once all the expansions are done, sequence all the different
12553 fragments inside gimple_omp_body. */
12554
12555 gimple_seq new_body = NULL;
12556
12557 if (ctx->record_type)
12558 {
12559 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12560 /* fixup_child_record_type might have changed receiver_decl's type. */
12561 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12562 gimple_seq_add_stmt (&new_body,
12563 gimple_build_assign (ctx->receiver_decl, t));
12564 }
12565
12566 gimple_seq_add_seq (&new_body, par_ilist);
12567 gimple_seq_add_seq (&new_body, par_body);
12568 gimple_seq_add_seq (&new_body, par_rlist);
12569 if (ctx->cancellable)
12570 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12571 gimple_seq_add_seq (&new_body, par_olist);
12572 new_body = maybe_catch_exception (new_body);
12573 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12574 gimple_seq_add_stmt (&new_body,
12575 gimple_build_omp_continue (integer_zero_node,
12576 integer_zero_node));
12577 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12578 gimple_omp_set_body (stmt, new_body);
12579
12580 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12581 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12582 else
12583 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12584 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12585 gimple_bind_add_seq (bind, ilist);
12586 gimple_bind_add_stmt (bind, stmt);
12587 gimple_bind_add_seq (bind, olist);
12588
12589 pop_gimplify_context (NULL);
12590
12591 if (dep_bind)
12592 {
12593 gimple_bind_add_seq (dep_bind, dep_ilist);
12594 gimple_bind_add_seq (dep_bind, tskred_ilist);
12595 gimple_bind_add_stmt (dep_bind, bind);
12596 gimple_bind_add_seq (dep_bind, tskred_olist);
12597 gimple_bind_add_seq (dep_bind, dep_olist);
12598 pop_gimplify_context (dep_bind);
12599 }
12600 }
12601
12602 /* Lower the GIMPLE_OMP_TARGET in the current statement
12603 in GSI_P. CTX holds context information for the directive. */
12604
12605 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)12606 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12607 {
12608 tree clauses;
12609 tree child_fn, t, c;
12610 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12611 gbind *tgt_bind, *bind, *dep_bind = NULL;
12612 gimple_seq tgt_body, olist, ilist, fplist, new_body;
12613 location_t loc = gimple_location (stmt);
12614 bool offloaded, data_region;
12615 unsigned int map_cnt = 0;
12616 tree in_reduction_clauses = NULL_TREE;
12617
12618 offloaded = is_gimple_omp_offloaded (stmt);
12619 switch (gimple_omp_target_kind (stmt))
12620 {
12621 case GF_OMP_TARGET_KIND_REGION:
12622 tree *p, *q;
12623 q = &in_reduction_clauses;
12624 for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12625 if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12626 {
12627 *q = *p;
12628 q = &OMP_CLAUSE_CHAIN (*q);
12629 *p = OMP_CLAUSE_CHAIN (*p);
12630 }
12631 else
12632 p = &OMP_CLAUSE_CHAIN (*p);
12633 *q = NULL_TREE;
12634 *p = in_reduction_clauses;
12635 /* FALLTHRU */
12636 case GF_OMP_TARGET_KIND_UPDATE:
12637 case GF_OMP_TARGET_KIND_ENTER_DATA:
12638 case GF_OMP_TARGET_KIND_EXIT_DATA:
12639 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12640 case GF_OMP_TARGET_KIND_OACC_KERNELS:
12641 case GF_OMP_TARGET_KIND_OACC_SERIAL:
12642 case GF_OMP_TARGET_KIND_OACC_UPDATE:
12643 case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12644 case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12645 case GF_OMP_TARGET_KIND_OACC_DECLARE:
12646 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12647 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12648 data_region = false;
12649 break;
12650 case GF_OMP_TARGET_KIND_DATA:
12651 case GF_OMP_TARGET_KIND_OACC_DATA:
12652 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12653 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12654 data_region = true;
12655 break;
12656 default:
12657 gcc_unreachable ();
12658 }
12659
12660 clauses = gimple_omp_target_clauses (stmt);
12661
12662 gimple_seq dep_ilist = NULL;
12663 gimple_seq dep_olist = NULL;
12664 bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12665 if (has_depend || in_reduction_clauses)
12666 {
12667 push_gimplify_context ();
12668 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12669 if (has_depend)
12670 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12671 &dep_ilist, &dep_olist);
12672 if (in_reduction_clauses)
12673 lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12674 ctx, NULL);
12675 }
12676
12677 tgt_bind = NULL;
12678 tgt_body = NULL;
12679 if (offloaded)
12680 {
12681 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12682 tgt_body = gimple_bind_body (tgt_bind);
12683 }
12684 else if (data_region)
12685 tgt_body = gimple_omp_body (stmt);
12686 child_fn = ctx->cb.dst_fn;
12687
12688 push_gimplify_context ();
12689 fplist = NULL;
12690
12691 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12692 switch (OMP_CLAUSE_CODE (c))
12693 {
12694 tree var, x;
12695
12696 default:
12697 break;
12698 case OMP_CLAUSE_MAP:
12699 #if CHECKING_P
12700 /* First check what we're prepared to handle in the following. */
12701 switch (OMP_CLAUSE_MAP_KIND (c))
12702 {
12703 case GOMP_MAP_ALLOC:
12704 case GOMP_MAP_TO:
12705 case GOMP_MAP_FROM:
12706 case GOMP_MAP_TOFROM:
12707 case GOMP_MAP_POINTER:
12708 case GOMP_MAP_TO_PSET:
12709 case GOMP_MAP_DELETE:
12710 case GOMP_MAP_RELEASE:
12711 case GOMP_MAP_ALWAYS_TO:
12712 case GOMP_MAP_ALWAYS_FROM:
12713 case GOMP_MAP_ALWAYS_TOFROM:
12714 case GOMP_MAP_FIRSTPRIVATE_POINTER:
12715 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12716 case GOMP_MAP_STRUCT:
12717 case GOMP_MAP_ALWAYS_POINTER:
12718 case GOMP_MAP_ATTACH:
12719 case GOMP_MAP_DETACH:
12720 case GOMP_MAP_ATTACH_ZERO_LENGTH_ARRAY_SECTION:
12721 case GOMP_MAP_POINTER_TO_ZERO_LENGTH_ARRAY_SECTION:
12722 break;
12723 case GOMP_MAP_IF_PRESENT:
12724 case GOMP_MAP_FORCE_ALLOC:
12725 case GOMP_MAP_FORCE_TO:
12726 case GOMP_MAP_FORCE_FROM:
12727 case GOMP_MAP_FORCE_TOFROM:
12728 case GOMP_MAP_FORCE_PRESENT:
12729 case GOMP_MAP_FORCE_DEVICEPTR:
12730 case GOMP_MAP_DEVICE_RESIDENT:
12731 case GOMP_MAP_LINK:
12732 case GOMP_MAP_FORCE_DETACH:
12733 gcc_assert (is_gimple_omp_oacc (stmt));
12734 break;
12735 default:
12736 gcc_unreachable ();
12737 }
12738 #endif
12739 /* FALLTHRU */
12740 case OMP_CLAUSE_TO:
12741 case OMP_CLAUSE_FROM:
12742 oacc_firstprivate:
12743 var = OMP_CLAUSE_DECL (c);
12744 if (!DECL_P (var))
12745 {
12746 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12747 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12748 && (OMP_CLAUSE_MAP_KIND (c)
12749 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12750 map_cnt++;
12751 continue;
12752 }
12753
12754 if (DECL_SIZE (var)
12755 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12756 {
12757 tree var2 = DECL_VALUE_EXPR (var);
12758 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12759 var2 = TREE_OPERAND (var2, 0);
12760 gcc_assert (DECL_P (var2));
12761 var = var2;
12762 }
12763
12764 if (offloaded
12765 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12766 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12767 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12768 {
12769 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12770 {
12771 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12772 && varpool_node::get_create (var)->offloadable)
12773 continue;
12774
12775 tree type = build_pointer_type (TREE_TYPE (var));
12776 tree new_var = lookup_decl (var, ctx);
12777 x = create_tmp_var_raw (type, get_name (new_var));
12778 gimple_add_tmp_var (x);
12779 x = build_simple_mem_ref (x);
12780 SET_DECL_VALUE_EXPR (new_var, x);
12781 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12782 }
12783 continue;
12784 }
12785
12786 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12787 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12788 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12789 && is_omp_target (stmt))
12790 {
12791 gcc_assert (maybe_lookup_field (c, ctx));
12792 map_cnt++;
12793 continue;
12794 }
12795
12796 if (!maybe_lookup_field (var, ctx))
12797 continue;
12798
12799 /* Don't remap compute constructs' reduction variables, because the
12800 intermediate result must be local to each gang. */
12801 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12802 && is_gimple_omp_oacc (ctx->stmt)
12803 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12804 {
12805 x = build_receiver_ref (var, true, ctx);
12806 tree new_var = lookup_decl (var, ctx);
12807
12808 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12809 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12810 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12811 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12812 x = build_simple_mem_ref (x);
12813 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12814 {
12815 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12816 if (omp_privatize_by_reference (new_var)
12817 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12818 || DECL_BY_REFERENCE (var)))
12819 {
12820 /* Create a local object to hold the instance
12821 value. */
12822 tree type = TREE_TYPE (TREE_TYPE (new_var));
12823 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12824 tree inst = create_tmp_var (type, id);
12825 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12826 x = build_fold_addr_expr (inst);
12827 }
12828 gimplify_assign (new_var, x, &fplist);
12829 }
12830 else if (DECL_P (new_var))
12831 {
12832 SET_DECL_VALUE_EXPR (new_var, x);
12833 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12834 }
12835 else
12836 gcc_unreachable ();
12837 }
12838 map_cnt++;
12839 break;
12840
12841 case OMP_CLAUSE_FIRSTPRIVATE:
12842 gcc_checking_assert (offloaded);
12843 if (is_gimple_omp_oacc (ctx->stmt))
12844 {
12845 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12846 gcc_checking_assert (!is_oacc_kernels (ctx));
12847 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12848 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12849
12850 goto oacc_firstprivate;
12851 }
12852 map_cnt++;
12853 var = OMP_CLAUSE_DECL (c);
12854 if (!omp_privatize_by_reference (var)
12855 && !is_gimple_reg_type (TREE_TYPE (var)))
12856 {
12857 tree new_var = lookup_decl (var, ctx);
12858 if (is_variable_sized (var))
12859 {
12860 tree pvar = DECL_VALUE_EXPR (var);
12861 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12862 pvar = TREE_OPERAND (pvar, 0);
12863 gcc_assert (DECL_P (pvar));
12864 tree new_pvar = lookup_decl (pvar, ctx);
12865 x = build_fold_indirect_ref (new_pvar);
12866 TREE_THIS_NOTRAP (x) = 1;
12867 }
12868 else
12869 x = build_receiver_ref (var, true, ctx);
12870 SET_DECL_VALUE_EXPR (new_var, x);
12871 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12872 }
12873 break;
12874
12875 case OMP_CLAUSE_PRIVATE:
12876 gcc_checking_assert (offloaded);
12877 if (is_gimple_omp_oacc (ctx->stmt))
12878 {
12879 /* No 'private' clauses on OpenACC 'kernels'. */
12880 gcc_checking_assert (!is_oacc_kernels (ctx));
12881 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12882 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12883
12884 break;
12885 }
12886 var = OMP_CLAUSE_DECL (c);
12887 if (is_variable_sized (var))
12888 {
12889 tree new_var = lookup_decl (var, ctx);
12890 tree pvar = DECL_VALUE_EXPR (var);
12891 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12892 pvar = TREE_OPERAND (pvar, 0);
12893 gcc_assert (DECL_P (pvar));
12894 tree new_pvar = lookup_decl (pvar, ctx);
12895 x = build_fold_indirect_ref (new_pvar);
12896 TREE_THIS_NOTRAP (x) = 1;
12897 SET_DECL_VALUE_EXPR (new_var, x);
12898 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12899 }
12900 break;
12901
12902 case OMP_CLAUSE_USE_DEVICE_PTR:
12903 case OMP_CLAUSE_USE_DEVICE_ADDR:
12904 case OMP_CLAUSE_HAS_DEVICE_ADDR:
12905 case OMP_CLAUSE_IS_DEVICE_PTR:
12906 var = OMP_CLAUSE_DECL (c);
12907 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12908 {
12909 while (TREE_CODE (var) == INDIRECT_REF
12910 || TREE_CODE (var) == ARRAY_REF)
12911 var = TREE_OPERAND (var, 0);
12912 }
12913 map_cnt++;
12914 if (is_variable_sized (var))
12915 {
12916 tree new_var = lookup_decl (var, ctx);
12917 tree pvar = DECL_VALUE_EXPR (var);
12918 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12919 pvar = TREE_OPERAND (pvar, 0);
12920 gcc_assert (DECL_P (pvar));
12921 tree new_pvar = lookup_decl (pvar, ctx);
12922 x = build_fold_indirect_ref (new_pvar);
12923 TREE_THIS_NOTRAP (x) = 1;
12924 SET_DECL_VALUE_EXPR (new_var, x);
12925 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12926 }
12927 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12928 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
12929 && !omp_privatize_by_reference (var)
12930 && !omp_is_allocatable_or_ptr (var)
12931 && !lang_hooks.decls.omp_array_data (var, true))
12932 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12933 {
12934 tree new_var = lookup_decl (var, ctx);
12935 tree type = build_pointer_type (TREE_TYPE (var));
12936 x = create_tmp_var_raw (type, get_name (new_var));
12937 gimple_add_tmp_var (x);
12938 x = build_simple_mem_ref (x);
12939 SET_DECL_VALUE_EXPR (new_var, x);
12940 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12941 }
12942 else
12943 {
12944 tree new_var = lookup_decl (var, ctx);
12945 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12946 gimple_add_tmp_var (x);
12947 SET_DECL_VALUE_EXPR (new_var, x);
12948 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12949 }
12950 break;
12951 }
12952
12953 if (offloaded)
12954 {
12955 target_nesting_level++;
12956 lower_omp (&tgt_body, ctx);
12957 target_nesting_level--;
12958 }
12959 else if (data_region)
12960 lower_omp (&tgt_body, ctx);
12961
12962 if (offloaded)
12963 {
12964 /* Declare all the variables created by mapping and the variables
12965 declared in the scope of the target body. */
12966 record_vars_into (ctx->block_vars, child_fn);
12967 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12968 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12969 }
12970
12971 olist = NULL;
12972 ilist = NULL;
12973 if (ctx->record_type)
12974 {
12975 ctx->sender_decl
12976 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12977 DECL_NAMELESS (ctx->sender_decl) = 1;
12978 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12979 t = make_tree_vec (3);
12980 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12981 TREE_VEC_ELT (t, 1)
12982 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12983 ".omp_data_sizes");
12984 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12985 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12986 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12987 tree tkind_type = short_unsigned_type_node;
12988 int talign_shift = 8;
12989 TREE_VEC_ELT (t, 2)
12990 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12991 ".omp_data_kinds");
12992 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12993 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12994 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12995 gimple_omp_target_set_data_arg (stmt, t);
12996
12997 vec<constructor_elt, va_gc> *vsize;
12998 vec<constructor_elt, va_gc> *vkind;
12999 vec_alloc (vsize, map_cnt);
13000 vec_alloc (vkind, map_cnt);
13001 unsigned int map_idx = 0;
13002
13003 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13004 switch (OMP_CLAUSE_CODE (c))
13005 {
13006 tree ovar, nc, s, purpose, var, x, type;
13007 unsigned int talign;
13008
13009 default:
13010 break;
13011
13012 case OMP_CLAUSE_MAP:
13013 case OMP_CLAUSE_TO:
13014 case OMP_CLAUSE_FROM:
13015 oacc_firstprivate_map:
13016 nc = c;
13017 ovar = OMP_CLAUSE_DECL (c);
13018 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13019 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13020 || (OMP_CLAUSE_MAP_KIND (c)
13021 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13022 break;
13023 if (!DECL_P (ovar))
13024 {
13025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13026 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
13027 {
13028 nc = OMP_CLAUSE_CHAIN (c);
13029 gcc_checking_assert (OMP_CLAUSE_DECL (nc)
13030 == get_base_address (ovar));
13031 ovar = OMP_CLAUSE_DECL (nc);
13032 }
13033 else
13034 {
13035 tree x = build_sender_ref (ovar, ctx);
13036 tree v = ovar;
13037 if (in_reduction_clauses
13038 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13039 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13040 {
13041 v = unshare_expr (v);
13042 tree *p = &v;
13043 while (handled_component_p (*p)
13044 || TREE_CODE (*p) == INDIRECT_REF
13045 || TREE_CODE (*p) == ADDR_EXPR
13046 || TREE_CODE (*p) == MEM_REF
13047 || TREE_CODE (*p) == NON_LVALUE_EXPR)
13048 p = &TREE_OPERAND (*p, 0);
13049 tree d = *p;
13050 if (is_variable_sized (d))
13051 {
13052 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13053 d = DECL_VALUE_EXPR (d);
13054 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13055 d = TREE_OPERAND (d, 0);
13056 gcc_assert (DECL_P (d));
13057 }
13058 splay_tree_key key
13059 = (splay_tree_key) &DECL_CONTEXT (d);
13060 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13061 key)->value;
13062 if (d == *p)
13063 *p = nd;
13064 else
13065 *p = build_fold_indirect_ref (nd);
13066 }
13067 v = build_fold_addr_expr_with_type (v, ptr_type_node);
13068 gimplify_assign (x, v, &ilist);
13069 nc = NULL_TREE;
13070 }
13071 }
13072 else
13073 {
13074 if (DECL_SIZE (ovar)
13075 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
13076 {
13077 tree ovar2 = DECL_VALUE_EXPR (ovar);
13078 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
13079 ovar2 = TREE_OPERAND (ovar2, 0);
13080 gcc_assert (DECL_P (ovar2));
13081 ovar = ovar2;
13082 }
13083 if (!maybe_lookup_field (ovar, ctx)
13084 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13085 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13086 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
13087 continue;
13088 }
13089
13090 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
13091 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13092 talign = DECL_ALIGN_UNIT (ovar);
13093
13094 var = NULL_TREE;
13095 if (nc)
13096 {
13097 if (in_reduction_clauses
13098 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13099 && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13100 {
13101 tree d = ovar;
13102 if (is_variable_sized (d))
13103 {
13104 gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13105 d = DECL_VALUE_EXPR (d);
13106 gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13107 d = TREE_OPERAND (d, 0);
13108 gcc_assert (DECL_P (d));
13109 }
13110 splay_tree_key key
13111 = (splay_tree_key) &DECL_CONTEXT (d);
13112 tree nd = (tree) splay_tree_lookup (ctx->field_map,
13113 key)->value;
13114 if (d == ovar)
13115 var = nd;
13116 else
13117 var = build_fold_indirect_ref (nd);
13118 }
13119 else
13120 var = lookup_decl_in_outer_ctx (ovar, ctx);
13121 }
13122 if (nc
13123 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13124 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13125 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13126 && is_omp_target (stmt))
13127 {
13128 x = build_sender_ref (c, ctx);
13129 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13130 }
13131 else if (nc)
13132 {
13133 x = build_sender_ref (ovar, ctx);
13134
13135 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13136 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13137 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13138 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13139 {
13140 gcc_assert (offloaded);
13141 tree avar
13142 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13143 mark_addressable (avar);
13144 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13145 talign = DECL_ALIGN_UNIT (avar);
13146 avar = build_fold_addr_expr (avar);
13147 gimplify_assign (x, avar, &ilist);
13148 }
13149 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13150 {
13151 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13152 if (!omp_privatize_by_reference (var))
13153 {
13154 if (is_gimple_reg (var)
13155 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13156 suppress_warning (var);
13157 var = build_fold_addr_expr (var);
13158 }
13159 else
13160 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13161 gimplify_assign (x, var, &ilist);
13162 }
13163 else if (is_gimple_reg (var))
13164 {
13165 gcc_assert (offloaded);
13166 tree avar = create_tmp_var (TREE_TYPE (var));
13167 mark_addressable (avar);
13168 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13169 if (GOMP_MAP_COPY_TO_P (map_kind)
13170 || map_kind == GOMP_MAP_POINTER
13171 || map_kind == GOMP_MAP_TO_PSET
13172 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13173 {
13174 /* If we need to initialize a temporary
13175 with VAR because it is not addressable, and
13176 the variable hasn't been initialized yet, then
13177 we'll get a warning for the store to avar.
13178 Don't warn in that case, the mapping might
13179 be implicit. */
13180 suppress_warning (var, OPT_Wuninitialized);
13181 gimplify_assign (avar, var, &ilist);
13182 }
13183 avar = build_fold_addr_expr (avar);
13184 gimplify_assign (x, avar, &ilist);
13185 if ((GOMP_MAP_COPY_FROM_P (map_kind)
13186 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13187 && !TYPE_READONLY (TREE_TYPE (var)))
13188 {
13189 x = unshare_expr (x);
13190 x = build_simple_mem_ref (x);
13191 gimplify_assign (var, x, &olist);
13192 }
13193 }
13194 else
13195 {
13196 /* While MAP is handled explicitly by the FE,
13197 for 'target update', only the identified is passed. */
13198 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13199 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13200 && (omp_is_allocatable_or_ptr (var)
13201 && omp_check_optional_argument (var, false)))
13202 var = build_fold_indirect_ref (var);
13203 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13204 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13205 || (!omp_is_allocatable_or_ptr (var)
13206 && !omp_check_optional_argument (var, false)))
13207 var = build_fold_addr_expr (var);
13208 gimplify_assign (x, var, &ilist);
13209 }
13210 }
13211 s = NULL_TREE;
13212 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13213 {
13214 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13215 s = TREE_TYPE (ovar);
13216 if (TREE_CODE (s) == REFERENCE_TYPE
13217 || omp_check_optional_argument (ovar, false))
13218 s = TREE_TYPE (s);
13219 s = TYPE_SIZE_UNIT (s);
13220 }
13221 else
13222 s = OMP_CLAUSE_SIZE (c);
13223 if (s == NULL_TREE)
13224 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13225 s = fold_convert (size_type_node, s);
13226 purpose = size_int (map_idx++);
13227 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13228 if (TREE_CODE (s) != INTEGER_CST)
13229 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13230
13231 unsigned HOST_WIDE_INT tkind, tkind_zero;
13232 switch (OMP_CLAUSE_CODE (c))
13233 {
13234 case OMP_CLAUSE_MAP:
13235 tkind = OMP_CLAUSE_MAP_KIND (c);
13236 tkind_zero = tkind;
13237 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13238 switch (tkind)
13239 {
13240 case GOMP_MAP_ALLOC:
13241 case GOMP_MAP_IF_PRESENT:
13242 case GOMP_MAP_TO:
13243 case GOMP_MAP_FROM:
13244 case GOMP_MAP_TOFROM:
13245 case GOMP_MAP_ALWAYS_TO:
13246 case GOMP_MAP_ALWAYS_FROM:
13247 case GOMP_MAP_ALWAYS_TOFROM:
13248 case GOMP_MAP_RELEASE:
13249 case GOMP_MAP_FORCE_TO:
13250 case GOMP_MAP_FORCE_FROM:
13251 case GOMP_MAP_FORCE_TOFROM:
13252 case GOMP_MAP_FORCE_PRESENT:
13253 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13254 break;
13255 case GOMP_MAP_DELETE:
13256 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13257 default:
13258 break;
13259 }
13260 if (tkind_zero != tkind)
13261 {
13262 if (integer_zerop (s))
13263 tkind = tkind_zero;
13264 else if (integer_nonzerop (s))
13265 tkind_zero = tkind;
13266 }
13267 if (tkind_zero == tkind
13268 && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13269 && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13270 & ~GOMP_MAP_IMPLICIT)
13271 == 0))
13272 {
13273 /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13274 bits are not interfered by other special bit encodings,
13275 then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13276 to see. */
13277 tkind |= GOMP_MAP_IMPLICIT;
13278 tkind_zero = tkind;
13279 }
13280 break;
13281 case OMP_CLAUSE_FIRSTPRIVATE:
13282 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13283 tkind = GOMP_MAP_TO;
13284 tkind_zero = tkind;
13285 break;
13286 case OMP_CLAUSE_TO:
13287 tkind = GOMP_MAP_TO;
13288 tkind_zero = tkind;
13289 break;
13290 case OMP_CLAUSE_FROM:
13291 tkind = GOMP_MAP_FROM;
13292 tkind_zero = tkind;
13293 break;
13294 default:
13295 gcc_unreachable ();
13296 }
13297 gcc_checking_assert (tkind
13298 < (HOST_WIDE_INT_C (1U) << talign_shift));
13299 gcc_checking_assert (tkind_zero
13300 < (HOST_WIDE_INT_C (1U) << talign_shift));
13301 talign = ceil_log2 (talign);
13302 tkind |= talign << talign_shift;
13303 tkind_zero |= talign << talign_shift;
13304 gcc_checking_assert (tkind
13305 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13306 gcc_checking_assert (tkind_zero
13307 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13308 if (tkind == tkind_zero)
13309 x = build_int_cstu (tkind_type, tkind);
13310 else
13311 {
13312 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13313 x = build3 (COND_EXPR, tkind_type,
13314 fold_build2 (EQ_EXPR, boolean_type_node,
13315 unshare_expr (s), size_zero_node),
13316 build_int_cstu (tkind_type, tkind_zero),
13317 build_int_cstu (tkind_type, tkind));
13318 }
13319 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13320 if (nc && nc != c)
13321 c = nc;
13322 break;
13323
13324 case OMP_CLAUSE_FIRSTPRIVATE:
13325 if (is_gimple_omp_oacc (ctx->stmt))
13326 goto oacc_firstprivate_map;
13327 ovar = OMP_CLAUSE_DECL (c);
13328 if (omp_privatize_by_reference (ovar))
13329 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13330 else
13331 talign = DECL_ALIGN_UNIT (ovar);
13332 var = lookup_decl_in_outer_ctx (ovar, ctx);
13333 x = build_sender_ref (ovar, ctx);
13334 tkind = GOMP_MAP_FIRSTPRIVATE;
13335 type = TREE_TYPE (ovar);
13336 if (omp_privatize_by_reference (ovar))
13337 type = TREE_TYPE (type);
13338 if ((INTEGRAL_TYPE_P (type)
13339 && TYPE_PRECISION (type) <= POINTER_SIZE)
13340 || TREE_CODE (type) == POINTER_TYPE)
13341 {
13342 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13343 tree t = var;
13344 if (omp_privatize_by_reference (var))
13345 t = build_simple_mem_ref (var);
13346 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13347 suppress_warning (var);
13348 if (TREE_CODE (type) != POINTER_TYPE)
13349 t = fold_convert (pointer_sized_int_node, t);
13350 t = fold_convert (TREE_TYPE (x), t);
13351 gimplify_assign (x, t, &ilist);
13352 }
13353 else if (omp_privatize_by_reference (var))
13354 gimplify_assign (x, var, &ilist);
13355 else if (is_gimple_reg (var))
13356 {
13357 tree avar = create_tmp_var (TREE_TYPE (var));
13358 mark_addressable (avar);
13359 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13360 suppress_warning (var);
13361 gimplify_assign (avar, var, &ilist);
13362 avar = build_fold_addr_expr (avar);
13363 gimplify_assign (x, avar, &ilist);
13364 }
13365 else
13366 {
13367 var = build_fold_addr_expr (var);
13368 gimplify_assign (x, var, &ilist);
13369 }
13370 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13371 s = size_int (0);
13372 else if (omp_privatize_by_reference (ovar))
13373 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13374 else
13375 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13376 s = fold_convert (size_type_node, s);
13377 purpose = size_int (map_idx++);
13378 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13379 if (TREE_CODE (s) != INTEGER_CST)
13380 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13381
13382 gcc_checking_assert (tkind
13383 < (HOST_WIDE_INT_C (1U) << talign_shift));
13384 talign = ceil_log2 (talign);
13385 tkind |= talign << talign_shift;
13386 gcc_checking_assert (tkind
13387 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13388 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13389 build_int_cstu (tkind_type, tkind));
13390 break;
13391
13392 case OMP_CLAUSE_USE_DEVICE_PTR:
13393 case OMP_CLAUSE_USE_DEVICE_ADDR:
13394 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13395 case OMP_CLAUSE_IS_DEVICE_PTR:
13396 ovar = OMP_CLAUSE_DECL (c);
13397 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13398 {
13399 while (TREE_CODE (ovar) == INDIRECT_REF
13400 || TREE_CODE (ovar) == ARRAY_REF)
13401 ovar = TREE_OPERAND (ovar, 0);
13402 }
13403 var = lookup_decl_in_outer_ctx (ovar, ctx);
13404
13405 if (lang_hooks.decls.omp_array_data (ovar, true))
13406 {
13407 tkind = ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13408 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13409 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13410 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13411 }
13412 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13413 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13414 {
13415 tkind = GOMP_MAP_USE_DEVICE_PTR;
13416 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13417 }
13418 else
13419 {
13420 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13421 x = build_sender_ref (ovar, ctx);
13422 }
13423
13424 if (is_gimple_omp_oacc (ctx->stmt))
13425 {
13426 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13427
13428 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13429 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13430 }
13431
13432 type = TREE_TYPE (ovar);
13433 if (lang_hooks.decls.omp_array_data (ovar, true))
13434 var = lang_hooks.decls.omp_array_data (var, false);
13435 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13436 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13437 && !omp_privatize_by_reference (ovar)
13438 && !omp_is_allocatable_or_ptr (ovar))
13439 || TREE_CODE (type) == ARRAY_TYPE)
13440 var = build_fold_addr_expr (var);
13441 else
13442 {
13443 if (omp_privatize_by_reference (ovar)
13444 || omp_check_optional_argument (ovar, false)
13445 || omp_is_allocatable_or_ptr (ovar))
13446 {
13447 type = TREE_TYPE (type);
13448 if (POINTER_TYPE_P (type)
13449 && TREE_CODE (type) != ARRAY_TYPE
13450 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13451 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13452 && !omp_is_allocatable_or_ptr (ovar))
13453 || (omp_privatize_by_reference (ovar)
13454 && omp_is_allocatable_or_ptr (ovar))))
13455 var = build_simple_mem_ref (var);
13456 var = fold_convert (TREE_TYPE (x), var);
13457 }
13458 }
13459 tree present;
13460 present = omp_check_optional_argument (ovar, true);
13461 if (present)
13462 {
13463 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13464 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13465 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13466 tree new_x = unshare_expr (x);
13467 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13468 fb_rvalue);
13469 gcond *cond = gimple_build_cond_from_tree (present,
13470 notnull_label,
13471 null_label);
13472 gimple_seq_add_stmt (&ilist, cond);
13473 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13474 gimplify_assign (new_x, null_pointer_node, &ilist);
13475 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13476 gimple_seq_add_stmt (&ilist,
13477 gimple_build_label (notnull_label));
13478 gimplify_assign (x, var, &ilist);
13479 gimple_seq_add_stmt (&ilist,
13480 gimple_build_label (opt_arg_label));
13481 }
13482 else
13483 gimplify_assign (x, var, &ilist);
13484 s = size_int (0);
13485 purpose = size_int (map_idx++);
13486 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13487 gcc_checking_assert (tkind
13488 < (HOST_WIDE_INT_C (1U) << talign_shift));
13489 gcc_checking_assert (tkind
13490 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13491 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13492 build_int_cstu (tkind_type, tkind));
13493 break;
13494 }
13495
13496 gcc_assert (map_idx == map_cnt);
13497
13498 DECL_INITIAL (TREE_VEC_ELT (t, 1))
13499 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13500 DECL_INITIAL (TREE_VEC_ELT (t, 2))
13501 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13502 for (int i = 1; i <= 2; i++)
13503 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13504 {
13505 gimple_seq initlist = NULL;
13506 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13507 TREE_VEC_ELT (t, i)),
13508 &initlist, true, NULL_TREE);
13509 gimple_seq_add_seq (&ilist, initlist);
13510
13511 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13512 gimple_seq_add_stmt (&olist,
13513 gimple_build_assign (TREE_VEC_ELT (t, i),
13514 clobber));
13515 }
13516 else if (omp_maybe_offloaded_ctx (ctx->outer))
13517 {
13518 tree id = get_identifier ("omp declare target");
13519 tree decl = TREE_VEC_ELT (t, i);
13520 DECL_ATTRIBUTES (decl)
13521 = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13522 varpool_node *node = varpool_node::get (decl);
13523 if (node)
13524 {
13525 node->offloadable = 1;
13526 if (ENABLE_OFFLOADING)
13527 {
13528 g->have_offload = true;
13529 vec_safe_push (offload_vars, t);
13530 }
13531 }
13532 }
13533
13534 tree clobber = build_clobber (ctx->record_type);
13535 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13536 clobber));
13537 }
13538
13539 /* Once all the expansions are done, sequence all the different
13540 fragments inside gimple_omp_body. */
13541
13542 new_body = NULL;
13543
13544 if (offloaded
13545 && ctx->record_type)
13546 {
13547 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13548 /* fixup_child_record_type might have changed receiver_decl's type. */
13549 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13550 gimple_seq_add_stmt (&new_body,
13551 gimple_build_assign (ctx->receiver_decl, t));
13552 }
13553 gimple_seq_add_seq (&new_body, fplist);
13554
13555 if (offloaded || data_region)
13556 {
13557 tree prev = NULL_TREE;
13558 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13559 switch (OMP_CLAUSE_CODE (c))
13560 {
13561 tree var, x;
13562 default:
13563 break;
13564 case OMP_CLAUSE_FIRSTPRIVATE:
13565 if (is_gimple_omp_oacc (ctx->stmt))
13566 break;
13567 var = OMP_CLAUSE_DECL (c);
13568 if (omp_privatize_by_reference (var)
13569 || is_gimple_reg_type (TREE_TYPE (var)))
13570 {
13571 tree new_var = lookup_decl (var, ctx);
13572 tree type;
13573 type = TREE_TYPE (var);
13574 if (omp_privatize_by_reference (var))
13575 type = TREE_TYPE (type);
13576 if ((INTEGRAL_TYPE_P (type)
13577 && TYPE_PRECISION (type) <= POINTER_SIZE)
13578 || TREE_CODE (type) == POINTER_TYPE)
13579 {
13580 x = build_receiver_ref (var, false, ctx);
13581 if (TREE_CODE (type) != POINTER_TYPE)
13582 x = fold_convert (pointer_sized_int_node, x);
13583 x = fold_convert (type, x);
13584 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13585 fb_rvalue);
13586 if (omp_privatize_by_reference (var))
13587 {
13588 tree v = create_tmp_var_raw (type, get_name (var));
13589 gimple_add_tmp_var (v);
13590 TREE_ADDRESSABLE (v) = 1;
13591 gimple_seq_add_stmt (&new_body,
13592 gimple_build_assign (v, x));
13593 x = build_fold_addr_expr (v);
13594 }
13595 gimple_seq_add_stmt (&new_body,
13596 gimple_build_assign (new_var, x));
13597 }
13598 else
13599 {
13600 bool by_ref = !omp_privatize_by_reference (var);
13601 x = build_receiver_ref (var, by_ref, ctx);
13602 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13603 fb_rvalue);
13604 gimple_seq_add_stmt (&new_body,
13605 gimple_build_assign (new_var, x));
13606 }
13607 }
13608 else if (is_variable_sized (var))
13609 {
13610 tree pvar = DECL_VALUE_EXPR (var);
13611 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13612 pvar = TREE_OPERAND (pvar, 0);
13613 gcc_assert (DECL_P (pvar));
13614 tree new_var = lookup_decl (pvar, ctx);
13615 x = build_receiver_ref (var, false, ctx);
13616 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13617 gimple_seq_add_stmt (&new_body,
13618 gimple_build_assign (new_var, x));
13619 }
13620 break;
13621 case OMP_CLAUSE_PRIVATE:
13622 if (is_gimple_omp_oacc (ctx->stmt))
13623 break;
13624 var = OMP_CLAUSE_DECL (c);
13625 if (omp_privatize_by_reference (var))
13626 {
13627 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13628 tree new_var = lookup_decl (var, ctx);
13629 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13630 if (TREE_CONSTANT (x))
13631 {
13632 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13633 get_name (var));
13634 gimple_add_tmp_var (x);
13635 TREE_ADDRESSABLE (x) = 1;
13636 x = build_fold_addr_expr_loc (clause_loc, x);
13637 }
13638 else
13639 break;
13640
13641 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13642 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13643 gimple_seq_add_stmt (&new_body,
13644 gimple_build_assign (new_var, x));
13645 }
13646 break;
13647 case OMP_CLAUSE_USE_DEVICE_PTR:
13648 case OMP_CLAUSE_USE_DEVICE_ADDR:
13649 case OMP_CLAUSE_HAS_DEVICE_ADDR:
13650 case OMP_CLAUSE_IS_DEVICE_PTR:
13651 tree new_var;
13652 gimple_seq assign_body;
13653 bool is_array_data;
13654 bool do_optional_check;
13655 assign_body = NULL;
13656 do_optional_check = false;
13657 var = OMP_CLAUSE_DECL (c);
13658 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13659
13660 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13661 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13662 x = build_sender_ref (is_array_data
13663 ? (splay_tree_key) &DECL_NAME (var)
13664 : (splay_tree_key) &DECL_UID (var), ctx);
13665 else
13666 {
13667 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13668 {
13669 while (TREE_CODE (var) == INDIRECT_REF
13670 || TREE_CODE (var) == ARRAY_REF)
13671 var = TREE_OPERAND (var, 0);
13672 }
13673 x = build_receiver_ref (var, false, ctx);
13674 }
13675
13676 if (is_array_data)
13677 {
13678 bool is_ref = omp_privatize_by_reference (var);
13679 do_optional_check = true;
13680 /* First, we copy the descriptor data from the host; then
13681 we update its data to point to the target address. */
13682 new_var = lookup_decl (var, ctx);
13683 new_var = DECL_VALUE_EXPR (new_var);
13684 tree v = new_var;
13685
13686 if (is_ref)
13687 {
13688 var = build_fold_indirect_ref (var);
13689 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13690 fb_rvalue);
13691 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13692 gimple_add_tmp_var (v);
13693 TREE_ADDRESSABLE (v) = 1;
13694 gimple_seq_add_stmt (&assign_body,
13695 gimple_build_assign (v, var));
13696 tree rhs = build_fold_addr_expr (v);
13697 gimple_seq_add_stmt (&assign_body,
13698 gimple_build_assign (new_var, rhs));
13699 }
13700 else
13701 gimple_seq_add_stmt (&assign_body,
13702 gimple_build_assign (new_var, var));
13703
13704 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13705 gcc_assert (v2);
13706 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13707 gimple_seq_add_stmt (&assign_body,
13708 gimple_build_assign (v2, x));
13709 }
13710 else if (is_variable_sized (var))
13711 {
13712 tree pvar = DECL_VALUE_EXPR (var);
13713 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13714 pvar = TREE_OPERAND (pvar, 0);
13715 gcc_assert (DECL_P (pvar));
13716 new_var = lookup_decl (pvar, ctx);
13717 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13718 gimple_seq_add_stmt (&assign_body,
13719 gimple_build_assign (new_var, x));
13720 }
13721 else if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13722 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_HAS_DEVICE_ADDR)
13723 && !omp_privatize_by_reference (var)
13724 && !omp_is_allocatable_or_ptr (var))
13725 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13726 {
13727 new_var = lookup_decl (var, ctx);
13728 new_var = DECL_VALUE_EXPR (new_var);
13729 gcc_assert (TREE_CODE (new_var) == MEM_REF);
13730 new_var = TREE_OPERAND (new_var, 0);
13731 gcc_assert (DECL_P (new_var));
13732 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13733 gimple_seq_add_stmt (&assign_body,
13734 gimple_build_assign (new_var, x));
13735 }
13736 else
13737 {
13738 tree type = TREE_TYPE (var);
13739 new_var = lookup_decl (var, ctx);
13740 if (omp_privatize_by_reference (var))
13741 {
13742 type = TREE_TYPE (type);
13743 if (POINTER_TYPE_P (type)
13744 && TREE_CODE (type) != ARRAY_TYPE
13745 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13746 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR)
13747 || (omp_privatize_by_reference (var)
13748 && omp_is_allocatable_or_ptr (var))))
13749 {
13750 tree v = create_tmp_var_raw (type, get_name (var));
13751 gimple_add_tmp_var (v);
13752 TREE_ADDRESSABLE (v) = 1;
13753 x = fold_convert (type, x);
13754 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13755 fb_rvalue);
13756 gimple_seq_add_stmt (&assign_body,
13757 gimple_build_assign (v, x));
13758 x = build_fold_addr_expr (v);
13759 do_optional_check = true;
13760 }
13761 }
13762 new_var = DECL_VALUE_EXPR (new_var);
13763 x = fold_convert (TREE_TYPE (new_var), x);
13764 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13765 gimple_seq_add_stmt (&assign_body,
13766 gimple_build_assign (new_var, x));
13767 }
13768 tree present;
13769 present = ((do_optional_check
13770 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_HAS_DEVICE_ADDR
13771 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13772 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13773 : NULL_TREE);
13774 if (present)
13775 {
13776 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13777 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13778 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13779 glabel *null_glabel = gimple_build_label (null_label);
13780 glabel *notnull_glabel = gimple_build_label (notnull_label);
13781 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13782 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13783 fb_rvalue);
13784 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13785 fb_rvalue);
13786 gcond *cond = gimple_build_cond_from_tree (present,
13787 notnull_label,
13788 null_label);
13789 gimple_seq_add_stmt (&new_body, cond);
13790 gimple_seq_add_stmt (&new_body, null_glabel);
13791 gimplify_assign (new_var, null_pointer_node, &new_body);
13792 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13793 gimple_seq_add_stmt (&new_body, notnull_glabel);
13794 gimple_seq_add_seq (&new_body, assign_body);
13795 gimple_seq_add_stmt (&new_body,
13796 gimple_build_label (opt_arg_label));
13797 }
13798 else
13799 gimple_seq_add_seq (&new_body, assign_body);
13800 break;
13801 }
13802 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13803 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13804 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
13805 or references to VLAs. */
13806 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13807 switch (OMP_CLAUSE_CODE (c))
13808 {
13809 tree var;
13810 default:
13811 break;
13812 case OMP_CLAUSE_MAP:
13813 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13814 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13815 {
13816 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13817 poly_int64 offset = 0;
13818 gcc_assert (prev);
13819 var = OMP_CLAUSE_DECL (c);
13820 if (DECL_P (var)
13821 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13822 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13823 ctx))
13824 && varpool_node::get_create (var)->offloadable)
13825 break;
13826 if (TREE_CODE (var) == INDIRECT_REF
13827 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13828 var = TREE_OPERAND (var, 0);
13829 if (TREE_CODE (var) == COMPONENT_REF)
13830 {
13831 var = get_addr_base_and_unit_offset (var, &offset);
13832 gcc_assert (var != NULL_TREE && DECL_P (var));
13833 }
13834 else if (DECL_SIZE (var)
13835 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13836 {
13837 tree var2 = DECL_VALUE_EXPR (var);
13838 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13839 var2 = TREE_OPERAND (var2, 0);
13840 gcc_assert (DECL_P (var2));
13841 var = var2;
13842 }
13843 tree new_var = lookup_decl (var, ctx), x;
13844 tree type = TREE_TYPE (new_var);
13845 bool is_ref;
13846 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13847 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13848 == COMPONENT_REF))
13849 {
13850 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13851 is_ref = true;
13852 new_var = build2 (MEM_REF, type,
13853 build_fold_addr_expr (new_var),
13854 build_int_cst (build_pointer_type (type),
13855 offset));
13856 }
13857 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13858 {
13859 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13860 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13861 new_var = build2 (MEM_REF, type,
13862 build_fold_addr_expr (new_var),
13863 build_int_cst (build_pointer_type (type),
13864 offset));
13865 }
13866 else
13867 is_ref = omp_privatize_by_reference (var);
13868 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13869 is_ref = false;
13870 bool ref_to_array = false;
13871 if (is_ref)
13872 {
13873 type = TREE_TYPE (type);
13874 if (TREE_CODE (type) == ARRAY_TYPE)
13875 {
13876 type = build_pointer_type (type);
13877 ref_to_array = true;
13878 }
13879 }
13880 else if (TREE_CODE (type) == ARRAY_TYPE)
13881 {
13882 tree decl2 = DECL_VALUE_EXPR (new_var);
13883 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13884 decl2 = TREE_OPERAND (decl2, 0);
13885 gcc_assert (DECL_P (decl2));
13886 new_var = decl2;
13887 type = TREE_TYPE (new_var);
13888 }
13889 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13890 x = fold_convert_loc (clause_loc, type, x);
13891 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13892 {
13893 tree bias = OMP_CLAUSE_SIZE (c);
13894 if (DECL_P (bias))
13895 bias = lookup_decl (bias, ctx);
13896 bias = fold_convert_loc (clause_loc, sizetype, bias);
13897 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13898 bias);
13899 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13900 TREE_TYPE (x), x, bias);
13901 }
13902 if (ref_to_array)
13903 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13904 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13905 if (is_ref && !ref_to_array)
13906 {
13907 tree t = create_tmp_var_raw (type, get_name (var));
13908 gimple_add_tmp_var (t);
13909 TREE_ADDRESSABLE (t) = 1;
13910 gimple_seq_add_stmt (&new_body,
13911 gimple_build_assign (t, x));
13912 x = build_fold_addr_expr_loc (clause_loc, t);
13913 }
13914 gimple_seq_add_stmt (&new_body,
13915 gimple_build_assign (new_var, x));
13916 prev = NULL_TREE;
13917 }
13918 else if (OMP_CLAUSE_CHAIN (c)
13919 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13920 == OMP_CLAUSE_MAP
13921 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13922 == GOMP_MAP_FIRSTPRIVATE_POINTER
13923 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13924 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13925 prev = c;
13926 break;
13927 case OMP_CLAUSE_PRIVATE:
13928 var = OMP_CLAUSE_DECL (c);
13929 if (is_variable_sized (var))
13930 {
13931 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13932 tree new_var = lookup_decl (var, ctx);
13933 tree pvar = DECL_VALUE_EXPR (var);
13934 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13935 pvar = TREE_OPERAND (pvar, 0);
13936 gcc_assert (DECL_P (pvar));
13937 tree new_pvar = lookup_decl (pvar, ctx);
13938 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13939 tree al = size_int (DECL_ALIGN (var));
13940 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13941 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13942 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13943 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13944 gimple_seq_add_stmt (&new_body,
13945 gimple_build_assign (new_pvar, x));
13946 }
13947 else if (omp_privatize_by_reference (var)
13948 && !is_gimple_omp_oacc (ctx->stmt))
13949 {
13950 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13951 tree new_var = lookup_decl (var, ctx);
13952 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13953 if (TREE_CONSTANT (x))
13954 break;
13955 else
13956 {
13957 tree atmp
13958 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13959 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13960 tree al = size_int (TYPE_ALIGN (rtype));
13961 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13962 }
13963
13964 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13965 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13966 gimple_seq_add_stmt (&new_body,
13967 gimple_build_assign (new_var, x));
13968 }
13969 break;
13970 }
13971
13972 gimple_seq fork_seq = NULL;
13973 gimple_seq join_seq = NULL;
13974
13975 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13976 {
13977 /* If there are reductions on the offloaded region itself, treat
13978 them as a dummy GANG loop. */
13979 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13980
13981 gcall *private_marker = lower_oacc_private_marker (ctx);
13982
13983 if (private_marker)
13984 gimple_call_set_arg (private_marker, 2, level);
13985
13986 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13987 false, NULL, private_marker, NULL, &fork_seq,
13988 &join_seq, ctx);
13989 }
13990
13991 gimple_seq_add_seq (&new_body, fork_seq);
13992 gimple_seq_add_seq (&new_body, tgt_body);
13993 gimple_seq_add_seq (&new_body, join_seq);
13994
13995 if (offloaded)
13996 {
13997 new_body = maybe_catch_exception (new_body);
13998 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13999 }
14000 gimple_omp_set_body (stmt, new_body);
14001 }
14002
14003 bind = gimple_build_bind (NULL, NULL,
14004 tgt_bind ? gimple_bind_block (tgt_bind)
14005 : NULL_TREE);
14006 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
14007 gimple_bind_add_seq (bind, ilist);
14008 gimple_bind_add_stmt (bind, stmt);
14009 gimple_bind_add_seq (bind, olist);
14010
14011 pop_gimplify_context (NULL);
14012
14013 if (dep_bind)
14014 {
14015 gimple_bind_add_seq (dep_bind, dep_ilist);
14016 gimple_bind_add_stmt (dep_bind, bind);
14017 gimple_bind_add_seq (dep_bind, dep_olist);
14018 pop_gimplify_context (dep_bind);
14019 }
14020 }
14021
14022 /* Expand code for an OpenMP teams directive. */
14023
14024 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)14025 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14026 {
14027 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
14028 push_gimplify_context ();
14029
14030 tree block = make_node (BLOCK);
14031 gbind *bind = gimple_build_bind (NULL, NULL, block);
14032 gsi_replace (gsi_p, bind, true);
14033 gimple_seq bind_body = NULL;
14034 gimple_seq dlist = NULL;
14035 gimple_seq olist = NULL;
14036
14037 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14038 OMP_CLAUSE_NUM_TEAMS);
14039 tree num_teams_lower = NULL_TREE;
14040 if (num_teams == NULL_TREE)
14041 num_teams = build_int_cst (unsigned_type_node, 0);
14042 else
14043 {
14044 num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
14045 if (num_teams_lower)
14046 {
14047 num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
14048 gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
14049 fb_rvalue);
14050 }
14051 num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
14052 num_teams = fold_convert (unsigned_type_node, num_teams);
14053 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
14054 }
14055 if (num_teams_lower == NULL_TREE)
14056 num_teams_lower = num_teams;
14057 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
14058 OMP_CLAUSE_THREAD_LIMIT);
14059 if (thread_limit == NULL_TREE)
14060 thread_limit = build_int_cst (unsigned_type_node, 0);
14061 else
14062 {
14063 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
14064 thread_limit = fold_convert (unsigned_type_node, thread_limit);
14065 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
14066 fb_rvalue);
14067 }
14068 location_t loc = gimple_location (teams_stmt);
14069 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
14070 tree rettype = TREE_TYPE (TREE_TYPE (decl));
14071 tree first = create_tmp_var (rettype);
14072 gimple_seq_add_stmt (&bind_body,
14073 gimple_build_assign (first, build_one_cst (rettype)));
14074 tree llabel = create_artificial_label (loc);
14075 gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
14076 gimple *call
14077 = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
14078 first);
14079 gimple_set_location (call, loc);
14080 tree temp = create_tmp_var (rettype);
14081 gimple_call_set_lhs (call, temp);
14082 gimple_seq_add_stmt (&bind_body, call);
14083
14084 tree tlabel = create_artificial_label (loc);
14085 tree flabel = create_artificial_label (loc);
14086 gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
14087 tlabel, flabel);
14088 gimple_seq_add_stmt (&bind_body, cond);
14089 gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
14090 gimple_seq_add_stmt (&bind_body,
14091 gimple_build_assign (first, build_zero_cst (rettype)));
14092
14093 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
14094 &bind_body, &dlist, ctx, NULL);
14095 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
14096 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
14097 NULL, ctx);
14098 gimple_seq_add_stmt (&bind_body, teams_stmt);
14099
14100 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
14101 gimple_omp_set_body (teams_stmt, NULL);
14102 gimple_seq_add_seq (&bind_body, olist);
14103 gimple_seq_add_seq (&bind_body, dlist);
14104 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
14105 gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
14106 gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
14107 gimple_bind_set_body (bind, bind_body);
14108
14109 pop_gimplify_context (bind);
14110
14111 gimple_bind_append_vars (bind, ctx->block_vars);
14112 BLOCK_VARS (block) = ctx->block_vars;
14113 if (BLOCK_VARS (block))
14114 TREE_USED (block) = 1;
14115 }
14116
14117 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
14118 regimplified. If DATA is non-NULL, lower_omp_1 is outside
14119 of OMP context, but with make_addressable_vars set. */
14120
14121 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)14122 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14123 void *data)
14124 {
14125 tree t = *tp;
14126
14127 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
14128 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14129 && data == NULL
14130 && DECL_HAS_VALUE_EXPR_P (t))
14131 return t;
14132
14133 if (make_addressable_vars
14134 && DECL_P (t)
14135 && bitmap_bit_p (make_addressable_vars, DECL_UID (t)))
14136 return t;
14137
14138 /* If a global variable has been privatized, TREE_CONSTANT on
14139 ADDR_EXPR might be wrong. */
14140 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14141 recompute_tree_invariant_for_addr_expr (t);
14142
14143 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14144 return NULL_TREE;
14145 }
14146
14147 /* Data to be communicated between lower_omp_regimplify_operands and
14148 lower_omp_regimplify_operands_p. */
14149
14150 struct lower_omp_regimplify_operands_data
14151 {
14152 omp_context *ctx;
14153 vec<tree> *decls;
14154 };
14155
14156 /* Helper function for lower_omp_regimplify_operands. Find
14157 omp_member_access_dummy_var vars and adjust temporarily their
14158 DECL_VALUE_EXPRs if needed. */
14159
14160 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)14161 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14162 void *data)
14163 {
14164 tree t = omp_member_access_dummy_var (*tp);
14165 if (t)
14166 {
14167 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14168 lower_omp_regimplify_operands_data *ldata
14169 = (lower_omp_regimplify_operands_data *) wi->info;
14170 tree o = maybe_lookup_decl (t, ldata->ctx);
14171 if (o != t)
14172 {
14173 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14174 ldata->decls->safe_push (*tp);
14175 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14176 SET_DECL_VALUE_EXPR (*tp, v);
14177 }
14178 }
14179 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14180 return NULL_TREE;
14181 }
14182
14183 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14184 of omp_member_access_dummy_var vars during regimplification. */
14185
14186 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)14187 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14188 gimple_stmt_iterator *gsi_p)
14189 {
14190 auto_vec<tree, 10> decls;
14191 if (ctx)
14192 {
14193 struct walk_stmt_info wi;
14194 memset (&wi, '\0', sizeof (wi));
14195 struct lower_omp_regimplify_operands_data data;
14196 data.ctx = ctx;
14197 data.decls = &decls;
14198 wi.info = &data;
14199 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14200 }
14201 gimple_regimplify_operands (stmt, gsi_p);
14202 while (!decls.is_empty ())
14203 {
14204 tree t = decls.pop ();
14205 tree v = decls.pop ();
14206 SET_DECL_VALUE_EXPR (t, v);
14207 }
14208 }
14209
14210 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)14211 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14212 {
14213 gimple *stmt = gsi_stmt (*gsi_p);
14214 struct walk_stmt_info wi;
14215 gcall *call_stmt;
14216
14217 if (gimple_has_location (stmt))
14218 input_location = gimple_location (stmt);
14219
14220 if (make_addressable_vars)
14221 memset (&wi, '\0', sizeof (wi));
14222
14223 /* If we have issued syntax errors, avoid doing any heavy lifting.
14224 Just replace the OMP directives with a NOP to avoid
14225 confusing RTL expansion. */
14226 if (seen_error () && is_gimple_omp (stmt))
14227 {
14228 gsi_replace (gsi_p, gimple_build_nop (), true);
14229 return;
14230 }
14231
14232 switch (gimple_code (stmt))
14233 {
14234 case GIMPLE_COND:
14235 {
14236 gcond *cond_stmt = as_a <gcond *> (stmt);
14237 if ((ctx || make_addressable_vars)
14238 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14239 lower_omp_regimplify_p,
14240 ctx ? NULL : &wi, NULL)
14241 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14242 lower_omp_regimplify_p,
14243 ctx ? NULL : &wi, NULL)))
14244 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14245 }
14246 break;
14247 case GIMPLE_CATCH:
14248 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14249 break;
14250 case GIMPLE_EH_FILTER:
14251 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14252 break;
14253 case GIMPLE_TRY:
14254 lower_omp (gimple_try_eval_ptr (stmt), ctx);
14255 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14256 break;
14257 case GIMPLE_TRANSACTION:
14258 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14259 ctx);
14260 break;
14261 case GIMPLE_BIND:
14262 if (ctx && is_gimple_omp_oacc (ctx->stmt))
14263 {
14264 tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14265 oacc_privatization_scan_decl_chain (ctx, vars);
14266 }
14267 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14268 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14269 break;
14270 case GIMPLE_OMP_PARALLEL:
14271 case GIMPLE_OMP_TASK:
14272 ctx = maybe_lookup_ctx (stmt);
14273 gcc_assert (ctx);
14274 if (ctx->cancellable)
14275 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14276 lower_omp_taskreg (gsi_p, ctx);
14277 break;
14278 case GIMPLE_OMP_FOR:
14279 ctx = maybe_lookup_ctx (stmt);
14280 gcc_assert (ctx);
14281 if (ctx->cancellable)
14282 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14283 lower_omp_for (gsi_p, ctx);
14284 break;
14285 case GIMPLE_OMP_SECTIONS:
14286 ctx = maybe_lookup_ctx (stmt);
14287 gcc_assert (ctx);
14288 if (ctx->cancellable)
14289 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14290 lower_omp_sections (gsi_p, ctx);
14291 break;
14292 case GIMPLE_OMP_SCOPE:
14293 ctx = maybe_lookup_ctx (stmt);
14294 gcc_assert (ctx);
14295 lower_omp_scope (gsi_p, ctx);
14296 break;
14297 case GIMPLE_OMP_SINGLE:
14298 ctx = maybe_lookup_ctx (stmt);
14299 gcc_assert (ctx);
14300 lower_omp_single (gsi_p, ctx);
14301 break;
14302 case GIMPLE_OMP_MASTER:
14303 case GIMPLE_OMP_MASKED:
14304 ctx = maybe_lookup_ctx (stmt);
14305 gcc_assert (ctx);
14306 lower_omp_master (gsi_p, ctx);
14307 break;
14308 case GIMPLE_OMP_TASKGROUP:
14309 ctx = maybe_lookup_ctx (stmt);
14310 gcc_assert (ctx);
14311 lower_omp_taskgroup (gsi_p, ctx);
14312 break;
14313 case GIMPLE_OMP_ORDERED:
14314 ctx = maybe_lookup_ctx (stmt);
14315 gcc_assert (ctx);
14316 lower_omp_ordered (gsi_p, ctx);
14317 break;
14318 case GIMPLE_OMP_SCAN:
14319 ctx = maybe_lookup_ctx (stmt);
14320 gcc_assert (ctx);
14321 lower_omp_scan (gsi_p, ctx);
14322 break;
14323 case GIMPLE_OMP_CRITICAL:
14324 ctx = maybe_lookup_ctx (stmt);
14325 gcc_assert (ctx);
14326 lower_omp_critical (gsi_p, ctx);
14327 break;
14328 case GIMPLE_OMP_ATOMIC_LOAD:
14329 if ((ctx || make_addressable_vars)
14330 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14331 as_a <gomp_atomic_load *> (stmt)),
14332 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14333 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14334 break;
14335 case GIMPLE_OMP_TARGET:
14336 ctx = maybe_lookup_ctx (stmt);
14337 gcc_assert (ctx);
14338 lower_omp_target (gsi_p, ctx);
14339 break;
14340 case GIMPLE_OMP_TEAMS:
14341 ctx = maybe_lookup_ctx (stmt);
14342 gcc_assert (ctx);
14343 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14344 lower_omp_taskreg (gsi_p, ctx);
14345 else
14346 lower_omp_teams (gsi_p, ctx);
14347 break;
14348 case GIMPLE_CALL:
14349 tree fndecl;
14350 call_stmt = as_a <gcall *> (stmt);
14351 fndecl = gimple_call_fndecl (call_stmt);
14352 if (fndecl
14353 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14354 switch (DECL_FUNCTION_CODE (fndecl))
14355 {
14356 case BUILT_IN_GOMP_BARRIER:
14357 if (ctx == NULL)
14358 break;
14359 /* FALLTHRU */
14360 case BUILT_IN_GOMP_CANCEL:
14361 case BUILT_IN_GOMP_CANCELLATION_POINT:
14362 omp_context *cctx;
14363 cctx = ctx;
14364 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14365 cctx = cctx->outer;
14366 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14367 if (!cctx->cancellable)
14368 {
14369 if (DECL_FUNCTION_CODE (fndecl)
14370 == BUILT_IN_GOMP_CANCELLATION_POINT)
14371 {
14372 stmt = gimple_build_nop ();
14373 gsi_replace (gsi_p, stmt, false);
14374 }
14375 break;
14376 }
14377 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14378 {
14379 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14380 gimple_call_set_fndecl (call_stmt, fndecl);
14381 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14382 }
14383 tree lhs;
14384 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14385 gimple_call_set_lhs (call_stmt, lhs);
14386 tree fallthru_label;
14387 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14388 gimple *g;
14389 g = gimple_build_label (fallthru_label);
14390 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14391 g = gimple_build_cond (NE_EXPR, lhs,
14392 fold_convert (TREE_TYPE (lhs),
14393 boolean_false_node),
14394 cctx->cancel_label, fallthru_label);
14395 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14396 break;
14397 default:
14398 break;
14399 }
14400 goto regimplify;
14401
14402 case GIMPLE_ASSIGN:
14403 for (omp_context *up = ctx; up; up = up->outer)
14404 {
14405 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14406 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14407 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14408 || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14409 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14410 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14411 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14412 && (gimple_omp_target_kind (up->stmt)
14413 == GF_OMP_TARGET_KIND_DATA)))
14414 continue;
14415 else if (!up->lastprivate_conditional_map)
14416 break;
14417 tree lhs = get_base_address (gimple_assign_lhs (stmt));
14418 if (TREE_CODE (lhs) == MEM_REF
14419 && DECL_P (TREE_OPERAND (lhs, 0))
14420 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14421 0))) == REFERENCE_TYPE)
14422 lhs = TREE_OPERAND (lhs, 0);
14423 if (DECL_P (lhs))
14424 if (tree *v = up->lastprivate_conditional_map->get (lhs))
14425 {
14426 tree clauses;
14427 if (up->combined_into_simd_safelen1)
14428 {
14429 up = up->outer;
14430 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14431 up = up->outer;
14432 }
14433 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14434 clauses = gimple_omp_for_clauses (up->stmt);
14435 else
14436 clauses = gimple_omp_sections_clauses (up->stmt);
14437 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14438 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14439 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14440 OMP_CLAUSE__CONDTEMP_);
14441 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14442 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14443 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14444 }
14445 }
14446 /* FALLTHRU */
14447
14448 default:
14449 regimplify:
14450 if ((ctx || make_addressable_vars)
14451 && walk_gimple_op (stmt, lower_omp_regimplify_p,
14452 ctx ? NULL : &wi))
14453 {
14454 /* Just remove clobbers, this should happen only if we have
14455 "privatized" local addressable variables in SIMD regions,
14456 the clobber isn't needed in that case and gimplifying address
14457 of the ARRAY_REF into a pointer and creating MEM_REF based
14458 clobber would create worse code than we get with the clobber
14459 dropped. */
14460 if (gimple_clobber_p (stmt))
14461 {
14462 gsi_replace (gsi_p, gimple_build_nop (), true);
14463 break;
14464 }
14465 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14466 }
14467 break;
14468 }
14469 }
14470
14471 static void
lower_omp(gimple_seq * body,omp_context * ctx)14472 lower_omp (gimple_seq *body, omp_context *ctx)
14473 {
14474 location_t saved_location = input_location;
14475 gimple_stmt_iterator gsi;
14476 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14477 lower_omp_1 (&gsi, ctx);
14478 /* During gimplification, we haven't folded statments inside offloading
14479 or taskreg regions (gimplify.cc:maybe_fold_stmt); do that now. */
14480 if (target_nesting_level || taskreg_nesting_level)
14481 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14482 fold_stmt (&gsi);
14483 input_location = saved_location;
14484 }
14485
14486 /* Main entry point. */
14487
14488 static unsigned int
execute_lower_omp(void)14489 execute_lower_omp (void)
14490 {
14491 gimple_seq body;
14492 int i;
14493 omp_context *ctx;
14494
14495 /* This pass always runs, to provide PROP_gimple_lomp.
14496 But often, there is nothing to do. */
14497 if (flag_openacc == 0 && flag_openmp == 0
14498 && flag_openmp_simd == 0)
14499 return 0;
14500
14501 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14502 delete_omp_context);
14503
14504 body = gimple_body (current_function_decl);
14505
14506 scan_omp (&body, NULL);
14507 gcc_assert (taskreg_nesting_level == 0);
14508 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14509 finish_taskreg_scan (ctx);
14510 taskreg_contexts.release ();
14511
14512 if (all_contexts->root)
14513 {
14514 if (make_addressable_vars)
14515 push_gimplify_context ();
14516 lower_omp (&body, NULL);
14517 if (make_addressable_vars)
14518 pop_gimplify_context (NULL);
14519 }
14520
14521 if (all_contexts)
14522 {
14523 splay_tree_delete (all_contexts);
14524 all_contexts = NULL;
14525 }
14526 BITMAP_FREE (make_addressable_vars);
14527 BITMAP_FREE (global_nonaddressable_vars);
14528
14529 /* If current function is a method, remove artificial dummy VAR_DECL created
14530 for non-static data member privatization, they aren't needed for
14531 debuginfo nor anything else, have been already replaced everywhere in the
14532 IL and cause problems with LTO. */
14533 if (DECL_ARGUMENTS (current_function_decl)
14534 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14535 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14536 == POINTER_TYPE))
14537 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14538
14539 for (auto task_stmt : task_cpyfns)
14540 finalize_task_copyfn (task_stmt);
14541 task_cpyfns.release ();
14542 return 0;
14543 }
14544
14545 namespace {
14546
14547 const pass_data pass_data_lower_omp =
14548 {
14549 GIMPLE_PASS, /* type */
14550 "omplower", /* name */
14551 OPTGROUP_OMP, /* optinfo_flags */
14552 TV_NONE, /* tv_id */
14553 PROP_gimple_any, /* properties_required */
14554 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14555 0, /* properties_destroyed */
14556 0, /* todo_flags_start */
14557 0, /* todo_flags_finish */
14558 };
14559
14560 class pass_lower_omp : public gimple_opt_pass
14561 {
14562 public:
pass_lower_omp(gcc::context * ctxt)14563 pass_lower_omp (gcc::context *ctxt)
14564 : gimple_opt_pass (pass_data_lower_omp, ctxt)
14565 {}
14566
14567 /* opt_pass methods: */
execute(function *)14568 virtual unsigned int execute (function *) { return execute_lower_omp (); }
14569
14570 }; // class pass_lower_omp
14571
14572 } // anon namespace
14573
14574 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)14575 make_pass_lower_omp (gcc::context *ctxt)
14576 {
14577 return new pass_lower_omp (ctxt);
14578 }
14579
14580 /* The following is a utility to diagnose structured block violations.
14581 It is not part of the "omplower" pass, as that's invoked too late. It
14582 should be invoked by the respective front ends after gimplification. */
14583
14584 static splay_tree all_labels;
14585
14586 /* Check for mismatched contexts and generate an error if needed. Return
14587 true if an error is detected. */
14588
14589 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)14590 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14591 gimple *branch_ctx, gimple *label_ctx)
14592 {
14593 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14594 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14595
14596 if (label_ctx == branch_ctx)
14597 return false;
14598
14599 const char* kind = NULL;
14600
14601 if (flag_openacc)
14602 {
14603 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14604 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14605 {
14606 gcc_checking_assert (kind == NULL);
14607 kind = "OpenACC";
14608 }
14609 }
14610 if (kind == NULL)
14611 {
14612 gcc_checking_assert (flag_openmp || flag_openmp_simd);
14613 kind = "OpenMP";
14614 }
14615
14616 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14617 so we could traverse it and issue a correct "exit" or "enter" error
14618 message upon a structured block violation.
14619
14620 We built the context by building a list with tree_cons'ing, but there is
14621 no easy counterpart in gimple tuples. It seems like far too much work
14622 for issuing exit/enter error messages. If someone really misses the
14623 distinct error message... patches welcome. */
14624
14625 #if 0
14626 /* Try to avoid confusing the user by producing and error message
14627 with correct "exit" or "enter" verbiage. We prefer "exit"
14628 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
14629 if (branch_ctx == NULL)
14630 exit_p = false;
14631 else
14632 {
14633 while (label_ctx)
14634 {
14635 if (TREE_VALUE (label_ctx) == branch_ctx)
14636 {
14637 exit_p = false;
14638 break;
14639 }
14640 label_ctx = TREE_CHAIN (label_ctx);
14641 }
14642 }
14643
14644 if (exit_p)
14645 error ("invalid exit from %s structured block", kind);
14646 else
14647 error ("invalid entry to %s structured block", kind);
14648 #endif
14649
14650 /* If it's obvious we have an invalid entry, be specific about the error. */
14651 if (branch_ctx == NULL)
14652 error ("invalid entry to %s structured block", kind);
14653 else
14654 {
14655 /* Otherwise, be vague and lazy, but efficient. */
14656 error ("invalid branch to/from %s structured block", kind);
14657 }
14658
14659 gsi_replace (gsi_p, gimple_build_nop (), false);
14660 return true;
14661 }
14662
14663 /* Pass 1: Create a minimal tree of structured blocks, and record
14664 where each label is found. */
14665
14666 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)14667 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14668 struct walk_stmt_info *wi)
14669 {
14670 gimple *context = (gimple *) wi->info;
14671 gimple *inner_context;
14672 gimple *stmt = gsi_stmt (*gsi_p);
14673
14674 *handled_ops_p = true;
14675
14676 switch (gimple_code (stmt))
14677 {
14678 WALK_SUBSTMTS;
14679
14680 case GIMPLE_OMP_PARALLEL:
14681 case GIMPLE_OMP_TASK:
14682 case GIMPLE_OMP_SCOPE:
14683 case GIMPLE_OMP_SECTIONS:
14684 case GIMPLE_OMP_SINGLE:
14685 case GIMPLE_OMP_SECTION:
14686 case GIMPLE_OMP_MASTER:
14687 case GIMPLE_OMP_MASKED:
14688 case GIMPLE_OMP_ORDERED:
14689 case GIMPLE_OMP_SCAN:
14690 case GIMPLE_OMP_CRITICAL:
14691 case GIMPLE_OMP_TARGET:
14692 case GIMPLE_OMP_TEAMS:
14693 case GIMPLE_OMP_TASKGROUP:
14694 /* The minimal context here is just the current OMP construct. */
14695 inner_context = stmt;
14696 wi->info = inner_context;
14697 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14698 wi->info = context;
14699 break;
14700
14701 case GIMPLE_OMP_FOR:
14702 inner_context = stmt;
14703 wi->info = inner_context;
14704 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14705 walk them. */
14706 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14707 diagnose_sb_1, NULL, wi);
14708 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14709 wi->info = context;
14710 break;
14711
14712 case GIMPLE_LABEL:
14713 splay_tree_insert (all_labels,
14714 (splay_tree_key) gimple_label_label (
14715 as_a <glabel *> (stmt)),
14716 (splay_tree_value) context);
14717 break;
14718
14719 default:
14720 break;
14721 }
14722
14723 return NULL_TREE;
14724 }
14725
14726 /* Pass 2: Check each branch and see if its context differs from that of
14727 the destination label's context. */
14728
14729 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)14730 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14731 struct walk_stmt_info *wi)
14732 {
14733 gimple *context = (gimple *) wi->info;
14734 splay_tree_node n;
14735 gimple *stmt = gsi_stmt (*gsi_p);
14736
14737 *handled_ops_p = true;
14738
14739 switch (gimple_code (stmt))
14740 {
14741 WALK_SUBSTMTS;
14742
14743 case GIMPLE_OMP_PARALLEL:
14744 case GIMPLE_OMP_TASK:
14745 case GIMPLE_OMP_SCOPE:
14746 case GIMPLE_OMP_SECTIONS:
14747 case GIMPLE_OMP_SINGLE:
14748 case GIMPLE_OMP_SECTION:
14749 case GIMPLE_OMP_MASTER:
14750 case GIMPLE_OMP_MASKED:
14751 case GIMPLE_OMP_ORDERED:
14752 case GIMPLE_OMP_SCAN:
14753 case GIMPLE_OMP_CRITICAL:
14754 case GIMPLE_OMP_TARGET:
14755 case GIMPLE_OMP_TEAMS:
14756 case GIMPLE_OMP_TASKGROUP:
14757 wi->info = stmt;
14758 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14759 wi->info = context;
14760 break;
14761
14762 case GIMPLE_OMP_FOR:
14763 wi->info = stmt;
14764 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14765 walk them. */
14766 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14767 diagnose_sb_2, NULL, wi);
14768 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14769 wi->info = context;
14770 break;
14771
14772 case GIMPLE_COND:
14773 {
14774 gcond *cond_stmt = as_a <gcond *> (stmt);
14775 tree lab = gimple_cond_true_label (cond_stmt);
14776 if (lab)
14777 {
14778 n = splay_tree_lookup (all_labels,
14779 (splay_tree_key) lab);
14780 diagnose_sb_0 (gsi_p, context,
14781 n ? (gimple *) n->value : NULL);
14782 }
14783 lab = gimple_cond_false_label (cond_stmt);
14784 if (lab)
14785 {
14786 n = splay_tree_lookup (all_labels,
14787 (splay_tree_key) lab);
14788 diagnose_sb_0 (gsi_p, context,
14789 n ? (gimple *) n->value : NULL);
14790 }
14791 }
14792 break;
14793
14794 case GIMPLE_GOTO:
14795 {
14796 tree lab = gimple_goto_dest (stmt);
14797 if (TREE_CODE (lab) != LABEL_DECL)
14798 break;
14799
14800 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14801 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14802 }
14803 break;
14804
14805 case GIMPLE_SWITCH:
14806 {
14807 gswitch *switch_stmt = as_a <gswitch *> (stmt);
14808 unsigned int i;
14809 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14810 {
14811 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14812 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14813 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14814 break;
14815 }
14816 }
14817 break;
14818
14819 case GIMPLE_RETURN:
14820 diagnose_sb_0 (gsi_p, context, NULL);
14821 break;
14822
14823 default:
14824 break;
14825 }
14826
14827 return NULL_TREE;
14828 }
14829
14830 static unsigned int
diagnose_omp_structured_block_errors(void)14831 diagnose_omp_structured_block_errors (void)
14832 {
14833 struct walk_stmt_info wi;
14834 gimple_seq body = gimple_body (current_function_decl);
14835
14836 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14837
14838 memset (&wi, 0, sizeof (wi));
14839 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14840
14841 memset (&wi, 0, sizeof (wi));
14842 wi.want_locations = true;
14843 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14844
14845 gimple_set_body (current_function_decl, body);
14846
14847 splay_tree_delete (all_labels);
14848 all_labels = NULL;
14849
14850 return 0;
14851 }
14852
14853 namespace {
14854
14855 const pass_data pass_data_diagnose_omp_blocks =
14856 {
14857 GIMPLE_PASS, /* type */
14858 "*diagnose_omp_blocks", /* name */
14859 OPTGROUP_OMP, /* optinfo_flags */
14860 TV_NONE, /* tv_id */
14861 PROP_gimple_any, /* properties_required */
14862 0, /* properties_provided */
14863 0, /* properties_destroyed */
14864 0, /* todo_flags_start */
14865 0, /* todo_flags_finish */
14866 };
14867
14868 class pass_diagnose_omp_blocks : public gimple_opt_pass
14869 {
14870 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)14871 pass_diagnose_omp_blocks (gcc::context *ctxt)
14872 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14873 {}
14874
14875 /* opt_pass methods: */
gate(function *)14876 virtual bool gate (function *)
14877 {
14878 return flag_openacc || flag_openmp || flag_openmp_simd;
14879 }
execute(function *)14880 virtual unsigned int execute (function *)
14881 {
14882 return diagnose_omp_structured_block_errors ();
14883 }
14884
14885 }; // class pass_diagnose_omp_blocks
14886
14887 } // anon namespace
14888
14889 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)14890 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14891 {
14892 return new pass_diagnose_omp_blocks (ctxt);
14893 }
14894
14895
14896 #include "gt-omp-low.h"
14897