1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2021 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62
63 /* Lowering of OMP parallel and workshare constructs proceeds in two
64 phases. The first phase scans the function looking for OMP statements
65 and then for variables that must be replaced to satisfy data sharing
66 clauses. The second phase expands code for the constructs, as well as
67 re-gimplifying things when variables have been replaced with complex
68 expressions.
69
70 Final code generation is done by pass_expand_omp. The flowgraph is
71 scanned for regions which are then moved to a new
72 function, to be invoked by the thread library, or offloaded. */
73
74 /* Context structure. Used to store information about each parallel
75 directive in the code. */
76
77 struct omp_context
78 {
79 /* This field must be at the beginning, as we do "inheritance": Some
80 callback functions for tree-inline.c (e.g., omp_copy_decl)
81 receive a copy_body_data pointer that is up-casted to an
82 omp_context pointer. */
83 copy_body_data cb;
84
85 /* The tree of contexts corresponding to the encountered constructs. */
86 struct omp_context *outer;
87 gimple *stmt;
88
89 /* Map variables to fields in a structure that allows communication
90 between sending and receiving threads. */
91 splay_tree field_map;
92 tree record_type;
93 tree sender_decl;
94 tree receiver_decl;
95
96 /* These are used just by task contexts, if task firstprivate fn is
97 needed. srecord_type is used to communicate from the thread
98 that encountered the task construct to task firstprivate fn,
99 record_type is allocated by GOMP_task, initialized by task firstprivate
100 fn and passed to the task body fn. */
101 splay_tree sfield_map;
102 tree srecord_type;
103
104 /* A chain of variables to add to the top-level block surrounding the
105 construct. In the case of a parallel, this is in the child function. */
106 tree block_vars;
107
108 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
109 barriers should jump to during omplower pass. */
110 tree cancel_label;
111
112 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
113 otherwise. */
114 gimple *simt_stmt;
115
116 /* For task reductions registered in this context, a vector containing
117 the length of the private copies block (if constant, otherwise NULL)
118 and then offsets (if constant, otherwise NULL) for each entry. */
119 vec<tree> task_reductions;
120
121 /* A hash map from the reduction clauses to the registered array
122 elts. */
123 hash_map<tree, unsigned> *task_reduction_map;
124
125 /* And a hash map from the lastprivate(conditional:) variables to their
126 corresponding tracking loop iteration variables. */
127 hash_map<tree, tree> *lastprivate_conditional_map;
128
129 /* And a hash map from the allocate variables to their corresponding
130 allocators. */
131 hash_map<tree, tree> *allocate_map;
132
133 /* A tree_list of the reduction clauses in this context. This is
134 only used for checking the consistency of OpenACC reduction
135 clauses in scan_omp_for and is not guaranteed to contain a valid
136 value outside of this function. */
137 tree local_reduction_clauses;
138
139 /* A tree_list of the reduction clauses in outer contexts. This is
140 only used for checking the consistency of OpenACC reduction
141 clauses in scan_omp_for and is not guaranteed to contain a valid
142 value outside of this function. */
143 tree outer_reduction_clauses;
144
145 /* Nesting depth of this context. Used to beautify error messages re
146 invalid gotos. The outermost ctx is depth 1, with depth 0 being
147 reserved for the main body of the function. */
148 int depth;
149
150 /* True if this parallel directive is nested within another. */
151 bool is_nested;
152
153 /* True if this construct can be cancelled. */
154 bool cancellable;
155
156 /* True if lower_omp_1 should look up lastprivate conditional in parent
157 context. */
158 bool combined_into_simd_safelen1;
159
160 /* True if there is nested scan context with inclusive clause. */
161 bool scan_inclusive;
162
163 /* True if there is nested scan context with exclusive clause. */
164 bool scan_exclusive;
165
166 /* True in the second simd loop of for simd with inscan reductions. */
167 bool for_simd_scan_phase;
168
169 /* True if there is order(concurrent) clause on the construct. */
170 bool order_concurrent;
171
172 /* True if there is bind clause on the construct (i.e. a loop construct). */
173 bool loop_p;
174
175 /* Only used for omp target contexts. True if a teams construct is
176 strictly nested in it. */
177 bool teams_nested_p;
178
179 /* Only used for omp target contexts. True if an OpenMP construct other
180 than teams is strictly nested in it. */
181 bool nonteams_nested_p;
182 };
183
184 static splay_tree all_contexts;
185 static int taskreg_nesting_level;
186 static int target_nesting_level;
187 static bitmap task_shared_vars;
188 static bitmap global_nonaddressable_vars;
189 static vec<omp_context *> taskreg_contexts;
190
191 static void scan_omp (gimple_seq *, omp_context *);
192 static tree scan_omp_1_op (tree *, int *, void *);
193
194 #define WALK_SUBSTMTS \
195 case GIMPLE_BIND: \
196 case GIMPLE_TRY: \
197 case GIMPLE_CATCH: \
198 case GIMPLE_EH_FILTER: \
199 case GIMPLE_TRANSACTION: \
200 /* The sub-statements for these should be walked. */ \
201 *handled_ops_p = false; \
202 break;
203
204 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
205 (This doesn't include OpenACC 'kernels' decomposed parts.) */
206
207 static bool
is_oacc_parallel_or_serial(omp_context * ctx)208 is_oacc_parallel_or_serial (omp_context *ctx)
209 {
210 enum gimple_code outer_type = gimple_code (ctx->stmt);
211 return ((outer_type == GIMPLE_OMP_TARGET)
212 && ((gimple_omp_target_kind (ctx->stmt)
213 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
214 || (gimple_omp_target_kind (ctx->stmt)
215 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
216 }
217
218 /* Return whether CTX represents an OpenACC 'kernels' construct.
219 (This doesn't include OpenACC 'kernels' decomposed parts.) */
220
221 static bool
is_oacc_kernels(omp_context * ctx)222 is_oacc_kernels (omp_context *ctx)
223 {
224 enum gimple_code outer_type = gimple_code (ctx->stmt);
225 return ((outer_type == GIMPLE_OMP_TARGET)
226 && (gimple_omp_target_kind (ctx->stmt)
227 == GF_OMP_TARGET_KIND_OACC_KERNELS));
228 }
229
230 /* Return whether CTX represents an OpenACC 'kernels' decomposed part. */
231
232 static bool
is_oacc_kernels_decomposed_part(omp_context * ctx)233 is_oacc_kernels_decomposed_part (omp_context *ctx)
234 {
235 enum gimple_code outer_type = gimple_code (ctx->stmt);
236 return ((outer_type == GIMPLE_OMP_TARGET)
237 && ((gimple_omp_target_kind (ctx->stmt)
238 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
239 || (gimple_omp_target_kind (ctx->stmt)
240 == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
241 || (gimple_omp_target_kind (ctx->stmt)
242 == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
243 }
244
245 /* Return true if STMT corresponds to an OpenMP target region. */
246 static bool
is_omp_target(gimple * stmt)247 is_omp_target (gimple *stmt)
248 {
249 if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
250 {
251 int kind = gimple_omp_target_kind (stmt);
252 return (kind == GF_OMP_TARGET_KIND_REGION
253 || kind == GF_OMP_TARGET_KIND_DATA
254 || kind == GF_OMP_TARGET_KIND_ENTER_DATA
255 || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
256 }
257 return false;
258 }
259
260 /* If DECL is the artificial dummy VAR_DECL created for non-static
261 data member privatization, return the underlying "this" parameter,
262 otherwise return NULL. */
263
264 tree
omp_member_access_dummy_var(tree decl)265 omp_member_access_dummy_var (tree decl)
266 {
267 if (!VAR_P (decl)
268 || !DECL_ARTIFICIAL (decl)
269 || !DECL_IGNORED_P (decl)
270 || !DECL_HAS_VALUE_EXPR_P (decl)
271 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
272 return NULL_TREE;
273
274 tree v = DECL_VALUE_EXPR (decl);
275 if (TREE_CODE (v) != COMPONENT_REF)
276 return NULL_TREE;
277
278 while (1)
279 switch (TREE_CODE (v))
280 {
281 case COMPONENT_REF:
282 case MEM_REF:
283 case INDIRECT_REF:
284 CASE_CONVERT:
285 case POINTER_PLUS_EXPR:
286 v = TREE_OPERAND (v, 0);
287 continue;
288 case PARM_DECL:
289 if (DECL_CONTEXT (v) == current_function_decl
290 && DECL_ARTIFICIAL (v)
291 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
292 return v;
293 return NULL_TREE;
294 default:
295 return NULL_TREE;
296 }
297 }
298
299 /* Helper for unshare_and_remap, called through walk_tree. */
300
301 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)302 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
303 {
304 tree *pair = (tree *) data;
305 if (*tp == pair[0])
306 {
307 *tp = unshare_expr (pair[1]);
308 *walk_subtrees = 0;
309 }
310 else if (IS_TYPE_OR_DECL_P (*tp))
311 *walk_subtrees = 0;
312 return NULL_TREE;
313 }
314
315 /* Return unshare_expr (X) with all occurrences of FROM
316 replaced with TO. */
317
318 static tree
unshare_and_remap(tree x,tree from,tree to)319 unshare_and_remap (tree x, tree from, tree to)
320 {
321 tree pair[2] = { from, to };
322 x = unshare_expr (x);
323 walk_tree (&x, unshare_and_remap_1, pair, NULL);
324 return x;
325 }
326
327 /* Convenience function for calling scan_omp_1_op on tree operands. */
328
329 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)330 scan_omp_op (tree *tp, omp_context *ctx)
331 {
332 struct walk_stmt_info wi;
333
334 memset (&wi, 0, sizeof (wi));
335 wi.info = ctx;
336 wi.want_locations = true;
337
338 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
339 }
340
341 static void lower_omp (gimple_seq *, omp_context *);
342 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
343 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
344
345 /* Return true if CTX is for an omp parallel. */
346
347 static inline bool
is_parallel_ctx(omp_context * ctx)348 is_parallel_ctx (omp_context *ctx)
349 {
350 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
351 }
352
353
354 /* Return true if CTX is for an omp task. */
355
356 static inline bool
is_task_ctx(omp_context * ctx)357 is_task_ctx (omp_context *ctx)
358 {
359 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
360 }
361
362
363 /* Return true if CTX is for an omp taskloop. */
364
365 static inline bool
is_taskloop_ctx(omp_context * ctx)366 is_taskloop_ctx (omp_context *ctx)
367 {
368 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
369 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
370 }
371
372
373 /* Return true if CTX is for a host omp teams. */
374
375 static inline bool
is_host_teams_ctx(omp_context * ctx)376 is_host_teams_ctx (omp_context *ctx)
377 {
378 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
379 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
380 }
381
382 /* Return true if CTX is for an omp parallel or omp task or host omp teams
383 (the last one is strictly not a task region in OpenMP speak, but we
384 need to treat it similarly). */
385
386 static inline bool
is_taskreg_ctx(omp_context * ctx)387 is_taskreg_ctx (omp_context *ctx)
388 {
389 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
390 }
391
392 /* Return true if EXPR is variable sized. */
393
394 static inline bool
is_variable_sized(const_tree expr)395 is_variable_sized (const_tree expr)
396 {
397 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
398 }
399
400 /* Lookup variables. The "maybe" form
401 allows for the variable form to not have been entered, otherwise we
402 assert that the variable must have been entered. */
403
404 static inline tree
lookup_decl(tree var,omp_context * ctx)405 lookup_decl (tree var, omp_context *ctx)
406 {
407 tree *n = ctx->cb.decl_map->get (var);
408 return *n;
409 }
410
411 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)412 maybe_lookup_decl (const_tree var, omp_context *ctx)
413 {
414 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
415 return n ? *n : NULL_TREE;
416 }
417
418 static inline tree
lookup_field(tree var,omp_context * ctx)419 lookup_field (tree var, omp_context *ctx)
420 {
421 splay_tree_node n;
422 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
423 return (tree) n->value;
424 }
425
426 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)427 lookup_sfield (splay_tree_key key, omp_context *ctx)
428 {
429 splay_tree_node n;
430 n = splay_tree_lookup (ctx->sfield_map
431 ? ctx->sfield_map : ctx->field_map, key);
432 return (tree) n->value;
433 }
434
435 static inline tree
lookup_sfield(tree var,omp_context * ctx)436 lookup_sfield (tree var, omp_context *ctx)
437 {
438 return lookup_sfield ((splay_tree_key) var, ctx);
439 }
440
441 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)442 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
443 {
444 splay_tree_node n;
445 n = splay_tree_lookup (ctx->field_map, key);
446 return n ? (tree) n->value : NULL_TREE;
447 }
448
449 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)450 maybe_lookup_field (tree var, omp_context *ctx)
451 {
452 return maybe_lookup_field ((splay_tree_key) var, ctx);
453 }
454
455 /* Return true if DECL should be copied by pointer. SHARED_CTX is
456 the parallel context if DECL is to be shared. */
457
458 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)459 use_pointer_for_field (tree decl, omp_context *shared_ctx)
460 {
461 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
462 || TYPE_ATOMIC (TREE_TYPE (decl)))
463 return true;
464
465 /* We can only use copy-in/copy-out semantics for shared variables
466 when we know the value is not accessible from an outer scope. */
467 if (shared_ctx)
468 {
469 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
470
471 /* ??? Trivially accessible from anywhere. But why would we even
472 be passing an address in this case? Should we simply assert
473 this to be false, or should we have a cleanup pass that removes
474 these from the list of mappings? */
475 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
476 return true;
477
478 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
479 without analyzing the expression whether or not its location
480 is accessible to anyone else. In the case of nested parallel
481 regions it certainly may be. */
482 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
483 return true;
484
485 /* Do not use copy-in/copy-out for variables that have their
486 address taken. */
487 if (is_global_var (decl))
488 {
489 /* For file scope vars, track whether we've seen them as
490 non-addressable initially and in that case, keep the same
491 answer for the duration of the pass, even when they are made
492 addressable later on e.g. through reduction expansion. Global
493 variables which weren't addressable before the pass will not
494 have their privatized copies address taken. See PR91216. */
495 if (!TREE_ADDRESSABLE (decl))
496 {
497 if (!global_nonaddressable_vars)
498 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
499 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
500 }
501 else if (!global_nonaddressable_vars
502 || !bitmap_bit_p (global_nonaddressable_vars,
503 DECL_UID (decl)))
504 return true;
505 }
506 else if (TREE_ADDRESSABLE (decl))
507 return true;
508
509 /* lower_send_shared_vars only uses copy-in, but not copy-out
510 for these. */
511 if (TREE_READONLY (decl)
512 || ((TREE_CODE (decl) == RESULT_DECL
513 || TREE_CODE (decl) == PARM_DECL)
514 && DECL_BY_REFERENCE (decl)))
515 return false;
516
517 /* Disallow copy-in/out in nested parallel if
518 decl is shared in outer parallel, otherwise
519 each thread could store the shared variable
520 in its own copy-in location, making the
521 variable no longer really shared. */
522 if (shared_ctx->is_nested)
523 {
524 omp_context *up;
525
526 for (up = shared_ctx->outer; up; up = up->outer)
527 if ((is_taskreg_ctx (up)
528 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
529 && is_gimple_omp_offloaded (up->stmt)))
530 && maybe_lookup_decl (decl, up))
531 break;
532
533 if (up)
534 {
535 tree c;
536
537 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
538 {
539 for (c = gimple_omp_target_clauses (up->stmt);
540 c; c = OMP_CLAUSE_CHAIN (c))
541 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
542 && OMP_CLAUSE_DECL (c) == decl)
543 break;
544 }
545 else
546 for (c = gimple_omp_taskreg_clauses (up->stmt);
547 c; c = OMP_CLAUSE_CHAIN (c))
548 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
549 && OMP_CLAUSE_DECL (c) == decl)
550 break;
551
552 if (c)
553 goto maybe_mark_addressable_and_ret;
554 }
555 }
556
557 /* For tasks avoid using copy-in/out. As tasks can be
558 deferred or executed in different thread, when GOMP_task
559 returns, the task hasn't necessarily terminated. */
560 if (is_task_ctx (shared_ctx))
561 {
562 tree outer;
563 maybe_mark_addressable_and_ret:
564 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
565 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
566 {
567 /* Taking address of OUTER in lower_send_shared_vars
568 might need regimplification of everything that uses the
569 variable. */
570 if (!task_shared_vars)
571 task_shared_vars = BITMAP_ALLOC (NULL);
572 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
573 TREE_ADDRESSABLE (outer) = 1;
574 }
575 return true;
576 }
577 }
578
579 return false;
580 }
581
582 /* Construct a new automatic decl similar to VAR. */
583
584 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)585 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
586 {
587 tree copy = copy_var_decl (var, name, type);
588
589 DECL_CONTEXT (copy) = current_function_decl;
590 DECL_CHAIN (copy) = ctx->block_vars;
591 /* If VAR is listed in task_shared_vars, it means it wasn't
592 originally addressable and is just because task needs to take
593 it's address. But we don't need to take address of privatizations
594 from that var. */
595 if (TREE_ADDRESSABLE (var)
596 && ((task_shared_vars
597 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
598 || (global_nonaddressable_vars
599 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
600 TREE_ADDRESSABLE (copy) = 0;
601 ctx->block_vars = copy;
602
603 return copy;
604 }
605
606 static tree
omp_copy_decl_1(tree var,omp_context * ctx)607 omp_copy_decl_1 (tree var, omp_context *ctx)
608 {
609 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
610 }
611
612 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
613 as appropriate. */
614 static tree
omp_build_component_ref(tree obj,tree field)615 omp_build_component_ref (tree obj, tree field)
616 {
617 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
618 if (TREE_THIS_VOLATILE (field))
619 TREE_THIS_VOLATILE (ret) |= 1;
620 if (TREE_READONLY (field))
621 TREE_READONLY (ret) |= 1;
622 return ret;
623 }
624
625 /* Build tree nodes to access the field for VAR on the receiver side. */
626
627 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)628 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
629 {
630 tree x, field = lookup_field (var, ctx);
631
632 /* If the receiver record type was remapped in the child function,
633 remap the field into the new record type. */
634 x = maybe_lookup_field (field, ctx);
635 if (x != NULL)
636 field = x;
637
638 x = build_simple_mem_ref (ctx->receiver_decl);
639 TREE_THIS_NOTRAP (x) = 1;
640 x = omp_build_component_ref (x, field);
641 if (by_ref)
642 {
643 x = build_simple_mem_ref (x);
644 TREE_THIS_NOTRAP (x) = 1;
645 }
646
647 return x;
648 }
649
650 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
651 of a parallel, this is a component reference; for workshare constructs
652 this is some variable. */
653
654 static tree
655 build_outer_var_ref (tree var, omp_context *ctx,
656 enum omp_clause_code code = OMP_CLAUSE_ERROR)
657 {
658 tree x;
659 omp_context *outer = ctx->outer;
660 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
661 outer = outer->outer;
662
663 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
664 x = var;
665 else if (is_variable_sized (var))
666 {
667 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
668 x = build_outer_var_ref (x, ctx, code);
669 x = build_simple_mem_ref (x);
670 }
671 else if (is_taskreg_ctx (ctx))
672 {
673 bool by_ref = use_pointer_for_field (var, NULL);
674 x = build_receiver_ref (var, by_ref, ctx);
675 }
676 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
677 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
678 || ctx->loop_p
679 || (code == OMP_CLAUSE_PRIVATE
680 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
681 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
682 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
683 {
684 /* #pragma omp simd isn't a worksharing construct, and can reference
685 even private vars in its linear etc. clauses.
686 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
687 to private vars in all worksharing constructs. */
688 x = NULL_TREE;
689 if (outer && is_taskreg_ctx (outer))
690 x = lookup_decl (var, outer);
691 else if (outer)
692 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
693 if (x == NULL_TREE)
694 x = var;
695 }
696 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
697 {
698 gcc_assert (outer);
699 splay_tree_node n
700 = splay_tree_lookup (outer->field_map,
701 (splay_tree_key) &DECL_UID (var));
702 if (n == NULL)
703 {
704 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
705 x = var;
706 else
707 x = lookup_decl (var, outer);
708 }
709 else
710 {
711 tree field = (tree) n->value;
712 /* If the receiver record type was remapped in the child function,
713 remap the field into the new record type. */
714 x = maybe_lookup_field (field, outer);
715 if (x != NULL)
716 field = x;
717
718 x = build_simple_mem_ref (outer->receiver_decl);
719 x = omp_build_component_ref (x, field);
720 if (use_pointer_for_field (var, outer))
721 x = build_simple_mem_ref (x);
722 }
723 }
724 else if (outer)
725 x = lookup_decl (var, outer);
726 else if (omp_is_reference (var))
727 /* This can happen with orphaned constructs. If var is reference, it is
728 possible it is shared and as such valid. */
729 x = var;
730 else if (omp_member_access_dummy_var (var))
731 x = var;
732 else
733 gcc_unreachable ();
734
735 if (x == var)
736 {
737 tree t = omp_member_access_dummy_var (var);
738 if (t)
739 {
740 x = DECL_VALUE_EXPR (var);
741 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
742 if (o != t)
743 x = unshare_and_remap (x, t, o);
744 else
745 x = unshare_expr (x);
746 }
747 }
748
749 if (omp_is_reference (var))
750 x = build_simple_mem_ref (x);
751
752 return x;
753 }
754
755 /* Build tree nodes to access the field for VAR on the sender side. */
756
757 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)758 build_sender_ref (splay_tree_key key, omp_context *ctx)
759 {
760 tree field = lookup_sfield (key, ctx);
761 return omp_build_component_ref (ctx->sender_decl, field);
762 }
763
764 static tree
build_sender_ref(tree var,omp_context * ctx)765 build_sender_ref (tree var, omp_context *ctx)
766 {
767 return build_sender_ref ((splay_tree_key) var, ctx);
768 }
769
770 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
771 BASE_POINTERS_RESTRICT, declare the field with restrict. */
772
773 static void
install_var_field(tree var,bool by_ref,int mask,omp_context * ctx)774 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
775 {
776 tree field, type, sfield = NULL_TREE;
777 splay_tree_key key = (splay_tree_key) var;
778
779 if ((mask & 16) != 0)
780 {
781 key = (splay_tree_key) &DECL_NAME (var);
782 gcc_checking_assert (key != (splay_tree_key) var);
783 }
784 if ((mask & 8) != 0)
785 {
786 key = (splay_tree_key) &DECL_UID (var);
787 gcc_checking_assert (key != (splay_tree_key) var);
788 }
789 gcc_assert ((mask & 1) == 0
790 || !splay_tree_lookup (ctx->field_map, key));
791 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
792 || !splay_tree_lookup (ctx->sfield_map, key));
793 gcc_assert ((mask & 3) == 3
794 || !is_gimple_omp_oacc (ctx->stmt));
795
796 type = TREE_TYPE (var);
797 if ((mask & 16) != 0)
798 type = lang_hooks.decls.omp_array_data (var, true);
799
800 /* Prevent redeclaring the var in the split-off function with a restrict
801 pointer type. Note that we only clear type itself, restrict qualifiers in
802 the pointed-to type will be ignored by points-to analysis. */
803 if (POINTER_TYPE_P (type)
804 && TYPE_RESTRICT (type))
805 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
806
807 if (mask & 4)
808 {
809 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
810 type = build_pointer_type (build_pointer_type (type));
811 }
812 else if (by_ref)
813 type = build_pointer_type (type);
814 else if ((mask & (32 | 3)) == 1 && omp_is_reference (var))
815 type = TREE_TYPE (type);
816
817 field = build_decl (DECL_SOURCE_LOCATION (var),
818 FIELD_DECL, DECL_NAME (var), type);
819
820 /* Remember what variable this field was created for. This does have a
821 side effect of making dwarf2out ignore this member, so for helpful
822 debugging we clear it later in delete_omp_context. */
823 DECL_ABSTRACT_ORIGIN (field) = var;
824 if ((mask & 16) == 0 && type == TREE_TYPE (var))
825 {
826 SET_DECL_ALIGN (field, DECL_ALIGN (var));
827 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
828 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
829 }
830 else
831 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
832
833 if ((mask & 3) == 3)
834 {
835 insert_field_into_struct (ctx->record_type, field);
836 if (ctx->srecord_type)
837 {
838 sfield = build_decl (DECL_SOURCE_LOCATION (var),
839 FIELD_DECL, DECL_NAME (var), type);
840 DECL_ABSTRACT_ORIGIN (sfield) = var;
841 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
842 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
843 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
844 insert_field_into_struct (ctx->srecord_type, sfield);
845 }
846 }
847 else
848 {
849 if (ctx->srecord_type == NULL_TREE)
850 {
851 tree t;
852
853 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
854 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
855 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
856 {
857 sfield = build_decl (DECL_SOURCE_LOCATION (t),
858 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
859 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
860 insert_field_into_struct (ctx->srecord_type, sfield);
861 splay_tree_insert (ctx->sfield_map,
862 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
863 (splay_tree_value) sfield);
864 }
865 }
866 sfield = field;
867 insert_field_into_struct ((mask & 1) ? ctx->record_type
868 : ctx->srecord_type, field);
869 }
870
871 if (mask & 1)
872 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
873 if ((mask & 2) && ctx->sfield_map)
874 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
875 }
876
877 static tree
install_var_local(tree var,omp_context * ctx)878 install_var_local (tree var, omp_context *ctx)
879 {
880 tree new_var = omp_copy_decl_1 (var, ctx);
881 insert_decl_map (&ctx->cb, var, new_var);
882 return new_var;
883 }
884
885 /* Adjust the replacement for DECL in CTX for the new context. This means
886 copying the DECL_VALUE_EXPR, and fixing up the type. */
887
888 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)889 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
890 {
891 tree new_decl, size;
892
893 new_decl = lookup_decl (decl, ctx);
894
895 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
896
897 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
898 && DECL_HAS_VALUE_EXPR_P (decl))
899 {
900 tree ve = DECL_VALUE_EXPR (decl);
901 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
902 SET_DECL_VALUE_EXPR (new_decl, ve);
903 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
904 }
905
906 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
907 {
908 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
909 if (size == error_mark_node)
910 size = TYPE_SIZE (TREE_TYPE (new_decl));
911 DECL_SIZE (new_decl) = size;
912
913 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
914 if (size == error_mark_node)
915 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
916 DECL_SIZE_UNIT (new_decl) = size;
917 }
918 }
919
920 /* The callback for remap_decl. Search all containing contexts for a
921 mapping of the variable; this avoids having to duplicate the splay
922 tree ahead of time. We know a mapping doesn't already exist in the
923 given context. Create new mappings to implement default semantics. */
924
925 static tree
omp_copy_decl(tree var,copy_body_data * cb)926 omp_copy_decl (tree var, copy_body_data *cb)
927 {
928 omp_context *ctx = (omp_context *) cb;
929 tree new_var;
930
931 if (TREE_CODE (var) == LABEL_DECL)
932 {
933 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
934 return var;
935 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
936 DECL_CONTEXT (new_var) = current_function_decl;
937 insert_decl_map (&ctx->cb, var, new_var);
938 return new_var;
939 }
940
941 while (!is_taskreg_ctx (ctx))
942 {
943 ctx = ctx->outer;
944 if (ctx == NULL)
945 return var;
946 new_var = maybe_lookup_decl (var, ctx);
947 if (new_var)
948 return new_var;
949 }
950
951 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
952 return var;
953
954 return error_mark_node;
955 }
956
957 /* Create a new context, with OUTER_CTX being the surrounding context. */
958
959 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)960 new_omp_context (gimple *stmt, omp_context *outer_ctx)
961 {
962 omp_context *ctx = XCNEW (omp_context);
963
964 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
965 (splay_tree_value) ctx);
966 ctx->stmt = stmt;
967
968 if (outer_ctx)
969 {
970 ctx->outer = outer_ctx;
971 ctx->cb = outer_ctx->cb;
972 ctx->cb.block = NULL;
973 ctx->depth = outer_ctx->depth + 1;
974 }
975 else
976 {
977 ctx->cb.src_fn = current_function_decl;
978 ctx->cb.dst_fn = current_function_decl;
979 ctx->cb.src_node = cgraph_node::get (current_function_decl);
980 gcc_checking_assert (ctx->cb.src_node);
981 ctx->cb.dst_node = ctx->cb.src_node;
982 ctx->cb.src_cfun = cfun;
983 ctx->cb.copy_decl = omp_copy_decl;
984 ctx->cb.eh_lp_nr = 0;
985 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
986 ctx->cb.adjust_array_error_bounds = true;
987 ctx->cb.dont_remap_vla_if_no_change = true;
988 ctx->depth = 1;
989 }
990
991 ctx->cb.decl_map = new hash_map<tree, tree>;
992
993 return ctx;
994 }
995
996 static gimple_seq maybe_catch_exception (gimple_seq);
997
998 /* Finalize task copyfn. */
999
1000 static void
finalize_task_copyfn(gomp_task * task_stmt)1001 finalize_task_copyfn (gomp_task *task_stmt)
1002 {
1003 struct function *child_cfun;
1004 tree child_fn;
1005 gimple_seq seq = NULL, new_seq;
1006 gbind *bind;
1007
1008 child_fn = gimple_omp_task_copy_fn (task_stmt);
1009 if (child_fn == NULL_TREE)
1010 return;
1011
1012 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1013 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1014
1015 push_cfun (child_cfun);
1016 bind = gimplify_body (child_fn, false);
1017 gimple_seq_add_stmt (&seq, bind);
1018 new_seq = maybe_catch_exception (seq);
1019 if (new_seq != seq)
1020 {
1021 bind = gimple_build_bind (NULL, new_seq, NULL);
1022 seq = NULL;
1023 gimple_seq_add_stmt (&seq, bind);
1024 }
1025 gimple_set_body (child_fn, seq);
1026 pop_cfun ();
1027
1028 /* Inform the callgraph about the new function. */
1029 cgraph_node *node = cgraph_node::get_create (child_fn);
1030 node->parallelized_function = 1;
1031 cgraph_node::add_new_function (child_fn, false);
1032 }
1033
1034 /* Destroy a omp_context data structures. Called through the splay tree
1035 value delete callback. */
1036
1037 static void
delete_omp_context(splay_tree_value value)1038 delete_omp_context (splay_tree_value value)
1039 {
1040 omp_context *ctx = (omp_context *) value;
1041
1042 delete ctx->cb.decl_map;
1043
1044 if (ctx->field_map)
1045 splay_tree_delete (ctx->field_map);
1046 if (ctx->sfield_map)
1047 splay_tree_delete (ctx->sfield_map);
1048
1049 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1050 it produces corrupt debug information. */
1051 if (ctx->record_type)
1052 {
1053 tree t;
1054 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1055 DECL_ABSTRACT_ORIGIN (t) = NULL;
1056 }
1057 if (ctx->srecord_type)
1058 {
1059 tree t;
1060 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1061 DECL_ABSTRACT_ORIGIN (t) = NULL;
1062 }
1063
1064 if (is_task_ctx (ctx))
1065 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1066
1067 if (ctx->task_reduction_map)
1068 {
1069 ctx->task_reductions.release ();
1070 delete ctx->task_reduction_map;
1071 }
1072
1073 delete ctx->lastprivate_conditional_map;
1074 delete ctx->allocate_map;
1075
1076 XDELETE (ctx);
1077 }
1078
1079 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1080 context. */
1081
1082 static void
fixup_child_record_type(omp_context * ctx)1083 fixup_child_record_type (omp_context *ctx)
1084 {
1085 tree f, type = ctx->record_type;
1086
1087 if (!ctx->receiver_decl)
1088 return;
1089 /* ??? It isn't sufficient to just call remap_type here, because
1090 variably_modified_type_p doesn't work the way we expect for
1091 record types. Testing each field for whether it needs remapping
1092 and creating a new record by hand works, however. */
1093 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1094 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1095 break;
1096 if (f)
1097 {
1098 tree name, new_fields = NULL;
1099
1100 type = lang_hooks.types.make_type (RECORD_TYPE);
1101 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1102 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1103 TYPE_DECL, name, type);
1104 TYPE_NAME (type) = name;
1105
1106 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1107 {
1108 tree new_f = copy_node (f);
1109 DECL_CONTEXT (new_f) = type;
1110 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1111 DECL_CHAIN (new_f) = new_fields;
1112 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1113 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1114 &ctx->cb, NULL);
1115 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1116 &ctx->cb, NULL);
1117 new_fields = new_f;
1118
1119 /* Arrange to be able to look up the receiver field
1120 given the sender field. */
1121 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1122 (splay_tree_value) new_f);
1123 }
1124 TYPE_FIELDS (type) = nreverse (new_fields);
1125 layout_type (type);
1126 }
1127
1128 /* In a target region we never modify any of the pointers in *.omp_data_i,
1129 so attempt to help the optimizers. */
1130 if (is_gimple_omp_offloaded (ctx->stmt))
1131 type = build_qualified_type (type, TYPE_QUAL_CONST);
1132
1133 TREE_TYPE (ctx->receiver_decl)
1134 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1135 }
1136
1137 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1138 specified by CLAUSES. */
1139
1140 static void
scan_sharing_clauses(tree clauses,omp_context * ctx)1141 scan_sharing_clauses (tree clauses, omp_context *ctx)
1142 {
1143 tree c, decl;
1144 bool scan_array_reductions = false;
1145
1146 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1147 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1148 && (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1149 /* omp_default_mem_alloc is 1 */
1150 || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))))
1151 {
1152 if (ctx->allocate_map == NULL)
1153 ctx->allocate_map = new hash_map<tree, tree>;
1154 ctx->allocate_map->put (OMP_CLAUSE_DECL (c),
1155 OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1156 ? OMP_CLAUSE_ALLOCATE_ALLOCATOR (c)
1157 : integer_zero_node);
1158 }
1159
1160 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1161 {
1162 bool by_ref;
1163
1164 switch (OMP_CLAUSE_CODE (c))
1165 {
1166 case OMP_CLAUSE_PRIVATE:
1167 decl = OMP_CLAUSE_DECL (c);
1168 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1169 goto do_private;
1170 else if (!is_variable_sized (decl))
1171 install_var_local (decl, ctx);
1172 break;
1173
1174 case OMP_CLAUSE_SHARED:
1175 decl = OMP_CLAUSE_DECL (c);
1176 if (ctx->allocate_map && ctx->allocate_map->get (decl))
1177 ctx->allocate_map->remove (decl);
1178 /* Ignore shared directives in teams construct inside of
1179 target construct. */
1180 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1181 && !is_host_teams_ctx (ctx))
1182 {
1183 /* Global variables don't need to be copied,
1184 the receiver side will use them directly. */
1185 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1186 if (is_global_var (odecl))
1187 break;
1188 insert_decl_map (&ctx->cb, decl, odecl);
1189 break;
1190 }
1191 gcc_assert (is_taskreg_ctx (ctx));
1192 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1193 || !is_variable_sized (decl));
1194 /* Global variables don't need to be copied,
1195 the receiver side will use them directly. */
1196 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1197 break;
1198 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1199 {
1200 use_pointer_for_field (decl, ctx);
1201 break;
1202 }
1203 by_ref = use_pointer_for_field (decl, NULL);
1204 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1205 || TREE_ADDRESSABLE (decl)
1206 || by_ref
1207 || omp_is_reference (decl))
1208 {
1209 by_ref = use_pointer_for_field (decl, ctx);
1210 install_var_field (decl, by_ref, 3, ctx);
1211 install_var_local (decl, ctx);
1212 break;
1213 }
1214 /* We don't need to copy const scalar vars back. */
1215 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1216 goto do_private;
1217
1218 case OMP_CLAUSE_REDUCTION:
1219 /* Collect 'reduction' clauses on OpenACC compute construct. */
1220 if (is_gimple_omp_oacc (ctx->stmt)
1221 && is_gimple_omp_offloaded (ctx->stmt))
1222 {
1223 /* No 'reduction' clauses on OpenACC 'kernels'. */
1224 gcc_checking_assert (!is_oacc_kernels (ctx));
1225 /* Likewise, on OpenACC 'kernels' decomposed parts. */
1226 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1227
1228 ctx->local_reduction_clauses
1229 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1230 }
1231 /* FALLTHRU */
1232
1233 case OMP_CLAUSE_IN_REDUCTION:
1234 decl = OMP_CLAUSE_DECL (c);
1235 if (ctx->allocate_map
1236 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1237 && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1238 || OMP_CLAUSE_REDUCTION_TASK (c)))
1239 || is_task_ctx (ctx)))
1240 {
1241 /* For now. */
1242 if (ctx->allocate_map->get (decl))
1243 ctx->allocate_map->remove (decl);
1244 }
1245 if (TREE_CODE (decl) == MEM_REF)
1246 {
1247 tree t = TREE_OPERAND (decl, 0);
1248 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1249 t = TREE_OPERAND (t, 0);
1250 if (TREE_CODE (t) == INDIRECT_REF
1251 || TREE_CODE (t) == ADDR_EXPR)
1252 t = TREE_OPERAND (t, 0);
1253 install_var_local (t, ctx);
1254 if (is_taskreg_ctx (ctx)
1255 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1256 || (is_task_ctx (ctx)
1257 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1258 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1259 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1260 == POINTER_TYPE)))))
1261 && !is_variable_sized (t)
1262 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1263 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1264 && !is_task_ctx (ctx))))
1265 {
1266 by_ref = use_pointer_for_field (t, NULL);
1267 if (is_task_ctx (ctx)
1268 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1269 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1270 {
1271 install_var_field (t, false, 1, ctx);
1272 install_var_field (t, by_ref, 2, ctx);
1273 }
1274 else
1275 install_var_field (t, by_ref, 3, ctx);
1276 }
1277 break;
1278 }
1279 if (is_task_ctx (ctx)
1280 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1281 && OMP_CLAUSE_REDUCTION_TASK (c)
1282 && is_parallel_ctx (ctx)))
1283 {
1284 /* Global variables don't need to be copied,
1285 the receiver side will use them directly. */
1286 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1287 {
1288 by_ref = use_pointer_for_field (decl, ctx);
1289 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1290 install_var_field (decl, by_ref, 3, ctx);
1291 }
1292 install_var_local (decl, ctx);
1293 break;
1294 }
1295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1296 && OMP_CLAUSE_REDUCTION_TASK (c))
1297 {
1298 install_var_local (decl, ctx);
1299 break;
1300 }
1301 goto do_private;
1302
1303 case OMP_CLAUSE_LASTPRIVATE:
1304 /* Let the corresponding firstprivate clause create
1305 the variable. */
1306 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1307 break;
1308 /* FALLTHRU */
1309
1310 case OMP_CLAUSE_FIRSTPRIVATE:
1311 case OMP_CLAUSE_LINEAR:
1312 decl = OMP_CLAUSE_DECL (c);
1313 do_private:
1314 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1315 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1316 && is_gimple_omp_offloaded (ctx->stmt))
1317 {
1318 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1319 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1320 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1321 install_var_field (decl, true, 3, ctx);
1322 else
1323 install_var_field (decl, false, 3, ctx);
1324 }
1325 if (is_variable_sized (decl))
1326 {
1327 if (is_task_ctx (ctx))
1328 {
1329 if (ctx->allocate_map
1330 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1331 {
1332 /* For now. */
1333 if (ctx->allocate_map->get (decl))
1334 ctx->allocate_map->remove (decl);
1335 }
1336 install_var_field (decl, false, 1, ctx);
1337 }
1338 break;
1339 }
1340 else if (is_taskreg_ctx (ctx))
1341 {
1342 bool global
1343 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1344 by_ref = use_pointer_for_field (decl, NULL);
1345
1346 if (is_task_ctx (ctx)
1347 && (global || by_ref || omp_is_reference (decl)))
1348 {
1349 if (ctx->allocate_map
1350 && ctx->allocate_map->get (decl))
1351 install_var_field (decl, by_ref, 32 | 1, ctx);
1352 else
1353 install_var_field (decl, false, 1, ctx);
1354 if (!global)
1355 install_var_field (decl, by_ref, 2, ctx);
1356 }
1357 else if (!global)
1358 install_var_field (decl, by_ref, 3, ctx);
1359 }
1360 install_var_local (decl, ctx);
1361 break;
1362
1363 case OMP_CLAUSE_USE_DEVICE_PTR:
1364 case OMP_CLAUSE_USE_DEVICE_ADDR:
1365 decl = OMP_CLAUSE_DECL (c);
1366
1367 /* Fortran array descriptors. */
1368 if (lang_hooks.decls.omp_array_data (decl, true))
1369 install_var_field (decl, false, 19, ctx);
1370 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1371 && !omp_is_reference (decl)
1372 && !omp_is_allocatable_or_ptr (decl))
1373 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1374 install_var_field (decl, true, 11, ctx);
1375 else
1376 install_var_field (decl, false, 11, ctx);
1377 if (DECL_SIZE (decl)
1378 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1379 {
1380 tree decl2 = DECL_VALUE_EXPR (decl);
1381 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1382 decl2 = TREE_OPERAND (decl2, 0);
1383 gcc_assert (DECL_P (decl2));
1384 install_var_local (decl2, ctx);
1385 }
1386 install_var_local (decl, ctx);
1387 break;
1388
1389 case OMP_CLAUSE_IS_DEVICE_PTR:
1390 decl = OMP_CLAUSE_DECL (c);
1391 goto do_private;
1392
1393 case OMP_CLAUSE__LOOPTEMP_:
1394 case OMP_CLAUSE__REDUCTEMP_:
1395 gcc_assert (is_taskreg_ctx (ctx));
1396 decl = OMP_CLAUSE_DECL (c);
1397 install_var_field (decl, false, 3, ctx);
1398 install_var_local (decl, ctx);
1399 break;
1400
1401 case OMP_CLAUSE_COPYPRIVATE:
1402 case OMP_CLAUSE_COPYIN:
1403 decl = OMP_CLAUSE_DECL (c);
1404 by_ref = use_pointer_for_field (decl, NULL);
1405 install_var_field (decl, by_ref, 3, ctx);
1406 break;
1407
1408 case OMP_CLAUSE_FINAL:
1409 case OMP_CLAUSE_IF:
1410 case OMP_CLAUSE_NUM_THREADS:
1411 case OMP_CLAUSE_NUM_TEAMS:
1412 case OMP_CLAUSE_THREAD_LIMIT:
1413 case OMP_CLAUSE_DEVICE:
1414 case OMP_CLAUSE_SCHEDULE:
1415 case OMP_CLAUSE_DIST_SCHEDULE:
1416 case OMP_CLAUSE_DEPEND:
1417 case OMP_CLAUSE_PRIORITY:
1418 case OMP_CLAUSE_GRAINSIZE:
1419 case OMP_CLAUSE_NUM_TASKS:
1420 case OMP_CLAUSE_NUM_GANGS:
1421 case OMP_CLAUSE_NUM_WORKERS:
1422 case OMP_CLAUSE_VECTOR_LENGTH:
1423 case OMP_CLAUSE_DETACH:
1424 if (ctx->outer)
1425 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1426 break;
1427
1428 case OMP_CLAUSE_TO:
1429 case OMP_CLAUSE_FROM:
1430 case OMP_CLAUSE_MAP:
1431 if (ctx->outer)
1432 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1433 decl = OMP_CLAUSE_DECL (c);
1434 /* Global variables with "omp declare target" attribute
1435 don't need to be copied, the receiver side will use them
1436 directly. However, global variables with "omp declare target link"
1437 attribute need to be copied. Or when ALWAYS modifier is used. */
1438 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1439 && DECL_P (decl)
1440 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1441 && (OMP_CLAUSE_MAP_KIND (c)
1442 != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1443 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1444 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1445 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1446 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1447 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1448 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1449 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1450 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1451 && varpool_node::get_create (decl)->offloadable
1452 && !lookup_attribute ("omp declare target link",
1453 DECL_ATTRIBUTES (decl)))
1454 break;
1455 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1456 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1457 {
1458 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1459 not offloaded; there is nothing to map for those. */
1460 if (!is_gimple_omp_offloaded (ctx->stmt)
1461 && !POINTER_TYPE_P (TREE_TYPE (decl))
1462 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1463 break;
1464 }
1465 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1466 && DECL_P (decl)
1467 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1468 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1469 && is_omp_target (ctx->stmt))
1470 {
1471 /* If this is an offloaded region, an attach operation should
1472 only exist when the pointer variable is mapped in a prior
1473 clause. */
1474 if (is_gimple_omp_offloaded (ctx->stmt))
1475 gcc_assert
1476 (maybe_lookup_decl (decl, ctx)
1477 || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1478 && lookup_attribute ("omp declare target",
1479 DECL_ATTRIBUTES (decl))));
1480
1481 /* By itself, attach/detach is generated as part of pointer
1482 variable mapping and should not create new variables in the
1483 offloaded region, however sender refs for it must be created
1484 for its address to be passed to the runtime. */
1485 tree field
1486 = build_decl (OMP_CLAUSE_LOCATION (c),
1487 FIELD_DECL, NULL_TREE, ptr_type_node);
1488 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1489 insert_field_into_struct (ctx->record_type, field);
1490 /* To not clash with a map of the pointer variable itself,
1491 attach/detach maps have their field looked up by the *clause*
1492 tree expression, not the decl. */
1493 gcc_assert (!splay_tree_lookup (ctx->field_map,
1494 (splay_tree_key) c));
1495 splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1496 (splay_tree_value) field);
1497 break;
1498 }
1499 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1500 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1501 || (OMP_CLAUSE_MAP_KIND (c)
1502 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1503 {
1504 if (TREE_CODE (decl) == COMPONENT_REF
1505 || (TREE_CODE (decl) == INDIRECT_REF
1506 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1507 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1508 == REFERENCE_TYPE)))
1509 break;
1510 if (DECL_SIZE (decl)
1511 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1512 {
1513 tree decl2 = DECL_VALUE_EXPR (decl);
1514 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1515 decl2 = TREE_OPERAND (decl2, 0);
1516 gcc_assert (DECL_P (decl2));
1517 install_var_local (decl2, ctx);
1518 }
1519 install_var_local (decl, ctx);
1520 break;
1521 }
1522 if (DECL_P (decl))
1523 {
1524 if (DECL_SIZE (decl)
1525 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1526 {
1527 tree decl2 = DECL_VALUE_EXPR (decl);
1528 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1529 decl2 = TREE_OPERAND (decl2, 0);
1530 gcc_assert (DECL_P (decl2));
1531 install_var_field (decl2, true, 3, ctx);
1532 install_var_local (decl2, ctx);
1533 install_var_local (decl, ctx);
1534 }
1535 else
1536 {
1537 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1538 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1539 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1540 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1541 install_var_field (decl, true, 7, ctx);
1542 else
1543 install_var_field (decl, true, 3, ctx);
1544 if (is_gimple_omp_offloaded (ctx->stmt)
1545 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1546 install_var_local (decl, ctx);
1547 }
1548 }
1549 else
1550 {
1551 tree base = get_base_address (decl);
1552 tree nc = OMP_CLAUSE_CHAIN (c);
1553 if (DECL_P (base)
1554 && nc != NULL_TREE
1555 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1556 && OMP_CLAUSE_DECL (nc) == base
1557 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1558 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1559 {
1560 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1561 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1562 }
1563 else
1564 {
1565 if (ctx->outer)
1566 {
1567 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1568 decl = OMP_CLAUSE_DECL (c);
1569 }
1570 gcc_assert (!splay_tree_lookup (ctx->field_map,
1571 (splay_tree_key) decl));
1572 tree field
1573 = build_decl (OMP_CLAUSE_LOCATION (c),
1574 FIELD_DECL, NULL_TREE, ptr_type_node);
1575 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1576 insert_field_into_struct (ctx->record_type, field);
1577 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1578 (splay_tree_value) field);
1579 }
1580 }
1581 break;
1582
1583 case OMP_CLAUSE_ORDER:
1584 ctx->order_concurrent = true;
1585 break;
1586
1587 case OMP_CLAUSE_BIND:
1588 ctx->loop_p = true;
1589 break;
1590
1591 case OMP_CLAUSE_NOWAIT:
1592 case OMP_CLAUSE_ORDERED:
1593 case OMP_CLAUSE_COLLAPSE:
1594 case OMP_CLAUSE_UNTIED:
1595 case OMP_CLAUSE_MERGEABLE:
1596 case OMP_CLAUSE_PROC_BIND:
1597 case OMP_CLAUSE_SAFELEN:
1598 case OMP_CLAUSE_SIMDLEN:
1599 case OMP_CLAUSE_THREADS:
1600 case OMP_CLAUSE_SIMD:
1601 case OMP_CLAUSE_NOGROUP:
1602 case OMP_CLAUSE_DEFAULTMAP:
1603 case OMP_CLAUSE_ASYNC:
1604 case OMP_CLAUSE_WAIT:
1605 case OMP_CLAUSE_GANG:
1606 case OMP_CLAUSE_WORKER:
1607 case OMP_CLAUSE_VECTOR:
1608 case OMP_CLAUSE_INDEPENDENT:
1609 case OMP_CLAUSE_AUTO:
1610 case OMP_CLAUSE_SEQ:
1611 case OMP_CLAUSE_TILE:
1612 case OMP_CLAUSE__SIMT_:
1613 case OMP_CLAUSE_DEFAULT:
1614 case OMP_CLAUSE_NONTEMPORAL:
1615 case OMP_CLAUSE_IF_PRESENT:
1616 case OMP_CLAUSE_FINALIZE:
1617 case OMP_CLAUSE_TASK_REDUCTION:
1618 case OMP_CLAUSE_ALLOCATE:
1619 break;
1620
1621 case OMP_CLAUSE_ALIGNED:
1622 decl = OMP_CLAUSE_DECL (c);
1623 if (is_global_var (decl)
1624 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1625 install_var_local (decl, ctx);
1626 break;
1627
1628 case OMP_CLAUSE__CONDTEMP_:
1629 decl = OMP_CLAUSE_DECL (c);
1630 if (is_parallel_ctx (ctx))
1631 {
1632 install_var_field (decl, false, 3, ctx);
1633 install_var_local (decl, ctx);
1634 }
1635 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1636 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1637 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1638 install_var_local (decl, ctx);
1639 break;
1640
1641 case OMP_CLAUSE__CACHE_:
1642 default:
1643 gcc_unreachable ();
1644 }
1645 }
1646
1647 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1648 {
1649 switch (OMP_CLAUSE_CODE (c))
1650 {
1651 case OMP_CLAUSE_LASTPRIVATE:
1652 /* Let the corresponding firstprivate clause create
1653 the variable. */
1654 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1655 scan_array_reductions = true;
1656 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1657 break;
1658 /* FALLTHRU */
1659
1660 case OMP_CLAUSE_FIRSTPRIVATE:
1661 case OMP_CLAUSE_PRIVATE:
1662 case OMP_CLAUSE_LINEAR:
1663 case OMP_CLAUSE_IS_DEVICE_PTR:
1664 decl = OMP_CLAUSE_DECL (c);
1665 if (is_variable_sized (decl))
1666 {
1667 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1668 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1669 && is_gimple_omp_offloaded (ctx->stmt))
1670 {
1671 tree decl2 = DECL_VALUE_EXPR (decl);
1672 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1673 decl2 = TREE_OPERAND (decl2, 0);
1674 gcc_assert (DECL_P (decl2));
1675 install_var_local (decl2, ctx);
1676 fixup_remapped_decl (decl2, ctx, false);
1677 }
1678 install_var_local (decl, ctx);
1679 }
1680 fixup_remapped_decl (decl, ctx,
1681 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1682 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1683 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1684 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1685 scan_array_reductions = true;
1686 break;
1687
1688 case OMP_CLAUSE_REDUCTION:
1689 case OMP_CLAUSE_IN_REDUCTION:
1690 decl = OMP_CLAUSE_DECL (c);
1691 if (TREE_CODE (decl) != MEM_REF)
1692 {
1693 if (is_variable_sized (decl))
1694 install_var_local (decl, ctx);
1695 fixup_remapped_decl (decl, ctx, false);
1696 }
1697 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1698 scan_array_reductions = true;
1699 break;
1700
1701 case OMP_CLAUSE_TASK_REDUCTION:
1702 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1703 scan_array_reductions = true;
1704 break;
1705
1706 case OMP_CLAUSE_SHARED:
1707 /* Ignore shared directives in teams construct inside of
1708 target construct. */
1709 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1710 && !is_host_teams_ctx (ctx))
1711 break;
1712 decl = OMP_CLAUSE_DECL (c);
1713 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1714 break;
1715 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1716 {
1717 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1718 ctx->outer)))
1719 break;
1720 bool by_ref = use_pointer_for_field (decl, ctx);
1721 install_var_field (decl, by_ref, 11, ctx);
1722 break;
1723 }
1724 fixup_remapped_decl (decl, ctx, false);
1725 break;
1726
1727 case OMP_CLAUSE_MAP:
1728 if (!is_gimple_omp_offloaded (ctx->stmt))
1729 break;
1730 decl = OMP_CLAUSE_DECL (c);
1731 if (DECL_P (decl)
1732 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1733 && (OMP_CLAUSE_MAP_KIND (c)
1734 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1735 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1736 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1737 && varpool_node::get_create (decl)->offloadable)
1738 break;
1739 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1740 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1741 && is_omp_target (ctx->stmt)
1742 && !is_gimple_omp_offloaded (ctx->stmt))
1743 break;
1744 if (DECL_P (decl))
1745 {
1746 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1747 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1748 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1749 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1750 {
1751 tree new_decl = lookup_decl (decl, ctx);
1752 TREE_TYPE (new_decl)
1753 = remap_type (TREE_TYPE (decl), &ctx->cb);
1754 }
1755 else if (DECL_SIZE (decl)
1756 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1757 {
1758 tree decl2 = DECL_VALUE_EXPR (decl);
1759 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1760 decl2 = TREE_OPERAND (decl2, 0);
1761 gcc_assert (DECL_P (decl2));
1762 fixup_remapped_decl (decl2, ctx, false);
1763 fixup_remapped_decl (decl, ctx, true);
1764 }
1765 else
1766 fixup_remapped_decl (decl, ctx, false);
1767 }
1768 break;
1769
1770 case OMP_CLAUSE_COPYPRIVATE:
1771 case OMP_CLAUSE_COPYIN:
1772 case OMP_CLAUSE_DEFAULT:
1773 case OMP_CLAUSE_IF:
1774 case OMP_CLAUSE_NUM_THREADS:
1775 case OMP_CLAUSE_NUM_TEAMS:
1776 case OMP_CLAUSE_THREAD_LIMIT:
1777 case OMP_CLAUSE_DEVICE:
1778 case OMP_CLAUSE_SCHEDULE:
1779 case OMP_CLAUSE_DIST_SCHEDULE:
1780 case OMP_CLAUSE_NOWAIT:
1781 case OMP_CLAUSE_ORDERED:
1782 case OMP_CLAUSE_COLLAPSE:
1783 case OMP_CLAUSE_UNTIED:
1784 case OMP_CLAUSE_FINAL:
1785 case OMP_CLAUSE_MERGEABLE:
1786 case OMP_CLAUSE_PROC_BIND:
1787 case OMP_CLAUSE_SAFELEN:
1788 case OMP_CLAUSE_SIMDLEN:
1789 case OMP_CLAUSE_ALIGNED:
1790 case OMP_CLAUSE_DEPEND:
1791 case OMP_CLAUSE_DETACH:
1792 case OMP_CLAUSE_ALLOCATE:
1793 case OMP_CLAUSE__LOOPTEMP_:
1794 case OMP_CLAUSE__REDUCTEMP_:
1795 case OMP_CLAUSE_TO:
1796 case OMP_CLAUSE_FROM:
1797 case OMP_CLAUSE_PRIORITY:
1798 case OMP_CLAUSE_GRAINSIZE:
1799 case OMP_CLAUSE_NUM_TASKS:
1800 case OMP_CLAUSE_THREADS:
1801 case OMP_CLAUSE_SIMD:
1802 case OMP_CLAUSE_NOGROUP:
1803 case OMP_CLAUSE_DEFAULTMAP:
1804 case OMP_CLAUSE_ORDER:
1805 case OMP_CLAUSE_BIND:
1806 case OMP_CLAUSE_USE_DEVICE_PTR:
1807 case OMP_CLAUSE_USE_DEVICE_ADDR:
1808 case OMP_CLAUSE_NONTEMPORAL:
1809 case OMP_CLAUSE_ASYNC:
1810 case OMP_CLAUSE_WAIT:
1811 case OMP_CLAUSE_NUM_GANGS:
1812 case OMP_CLAUSE_NUM_WORKERS:
1813 case OMP_CLAUSE_VECTOR_LENGTH:
1814 case OMP_CLAUSE_GANG:
1815 case OMP_CLAUSE_WORKER:
1816 case OMP_CLAUSE_VECTOR:
1817 case OMP_CLAUSE_INDEPENDENT:
1818 case OMP_CLAUSE_AUTO:
1819 case OMP_CLAUSE_SEQ:
1820 case OMP_CLAUSE_TILE:
1821 case OMP_CLAUSE__SIMT_:
1822 case OMP_CLAUSE_IF_PRESENT:
1823 case OMP_CLAUSE_FINALIZE:
1824 case OMP_CLAUSE__CONDTEMP_:
1825 break;
1826
1827 case OMP_CLAUSE__CACHE_:
1828 default:
1829 gcc_unreachable ();
1830 }
1831 }
1832
1833 gcc_checking_assert (!scan_array_reductions
1834 || !is_gimple_omp_oacc (ctx->stmt));
1835 if (scan_array_reductions)
1836 {
1837 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1838 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1839 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1840 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1841 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1842 {
1843 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1844 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1845 }
1846 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1847 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1848 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1849 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1850 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1851 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1852 }
1853 }
1854
1855 /* Create a new name for omp child function. Returns an identifier. */
1856
1857 static tree
create_omp_child_function_name(bool task_copy)1858 create_omp_child_function_name (bool task_copy)
1859 {
1860 return clone_function_name_numbered (current_function_decl,
1861 task_copy ? "_omp_cpyfn" : "_omp_fn");
1862 }
1863
1864 /* Return true if CTX may belong to offloaded code: either if current function
1865 is offloaded, or any enclosing context corresponds to a target region. */
1866
1867 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1868 omp_maybe_offloaded_ctx (omp_context *ctx)
1869 {
1870 if (cgraph_node::get (current_function_decl)->offloadable)
1871 return true;
1872 for (; ctx; ctx = ctx->outer)
1873 if (is_gimple_omp_offloaded (ctx->stmt))
1874 return true;
1875 return false;
1876 }
1877
1878 /* Build a decl for the omp child function. It'll not contain a body
1879 yet, just the bare decl. */
1880
1881 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1882 create_omp_child_function (omp_context *ctx, bool task_copy)
1883 {
1884 tree decl, type, name, t;
1885
1886 name = create_omp_child_function_name (task_copy);
1887 if (task_copy)
1888 type = build_function_type_list (void_type_node, ptr_type_node,
1889 ptr_type_node, NULL_TREE);
1890 else
1891 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1892
1893 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1894
1895 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1896 || !task_copy);
1897 if (!task_copy)
1898 ctx->cb.dst_fn = decl;
1899 else
1900 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1901
1902 TREE_STATIC (decl) = 1;
1903 TREE_USED (decl) = 1;
1904 DECL_ARTIFICIAL (decl) = 1;
1905 DECL_IGNORED_P (decl) = 0;
1906 TREE_PUBLIC (decl) = 0;
1907 DECL_UNINLINABLE (decl) = 1;
1908 DECL_EXTERNAL (decl) = 0;
1909 DECL_CONTEXT (decl) = NULL_TREE;
1910 DECL_INITIAL (decl) = make_node (BLOCK);
1911 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1912 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1913 /* Remove omp declare simd attribute from the new attributes. */
1914 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1915 {
1916 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1917 a = a2;
1918 a = TREE_CHAIN (a);
1919 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1920 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1921 *p = TREE_CHAIN (*p);
1922 else
1923 {
1924 tree chain = TREE_CHAIN (*p);
1925 *p = copy_node (*p);
1926 p = &TREE_CHAIN (*p);
1927 *p = chain;
1928 }
1929 }
1930 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1931 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1932 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1933 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1934 DECL_FUNCTION_VERSIONED (decl)
1935 = DECL_FUNCTION_VERSIONED (current_function_decl);
1936
1937 if (omp_maybe_offloaded_ctx (ctx))
1938 {
1939 cgraph_node::get_create (decl)->offloadable = 1;
1940 if (ENABLE_OFFLOADING)
1941 g->have_offload = true;
1942 }
1943
1944 if (cgraph_node::get_create (decl)->offloadable
1945 && !lookup_attribute ("omp declare target",
1946 DECL_ATTRIBUTES (current_function_decl)))
1947 {
1948 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1949 ? "omp target entrypoint"
1950 : "omp declare target");
1951 DECL_ATTRIBUTES (decl)
1952 = tree_cons (get_identifier (target_attr),
1953 NULL_TREE, DECL_ATTRIBUTES (decl));
1954 }
1955
1956 t = build_decl (DECL_SOURCE_LOCATION (decl),
1957 RESULT_DECL, NULL_TREE, void_type_node);
1958 DECL_ARTIFICIAL (t) = 1;
1959 DECL_IGNORED_P (t) = 1;
1960 DECL_CONTEXT (t) = decl;
1961 DECL_RESULT (decl) = t;
1962
1963 tree data_name = get_identifier (".omp_data_i");
1964 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1965 ptr_type_node);
1966 DECL_ARTIFICIAL (t) = 1;
1967 DECL_NAMELESS (t) = 1;
1968 DECL_ARG_TYPE (t) = ptr_type_node;
1969 DECL_CONTEXT (t) = current_function_decl;
1970 TREE_USED (t) = 1;
1971 TREE_READONLY (t) = 1;
1972 DECL_ARGUMENTS (decl) = t;
1973 if (!task_copy)
1974 ctx->receiver_decl = t;
1975 else
1976 {
1977 t = build_decl (DECL_SOURCE_LOCATION (decl),
1978 PARM_DECL, get_identifier (".omp_data_o"),
1979 ptr_type_node);
1980 DECL_ARTIFICIAL (t) = 1;
1981 DECL_NAMELESS (t) = 1;
1982 DECL_ARG_TYPE (t) = ptr_type_node;
1983 DECL_CONTEXT (t) = current_function_decl;
1984 TREE_USED (t) = 1;
1985 TREE_ADDRESSABLE (t) = 1;
1986 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1987 DECL_ARGUMENTS (decl) = t;
1988 }
1989
1990 /* Allocate memory for the function structure. The call to
1991 allocate_struct_function clobbers CFUN, so we need to restore
1992 it afterward. */
1993 push_struct_function (decl);
1994 cfun->function_end_locus = gimple_location (ctx->stmt);
1995 init_tree_ssa (cfun);
1996 pop_cfun ();
1997 }
1998
1999 /* Callback for walk_gimple_seq. Check if combined parallel
2000 contains gimple_omp_for_combined_into_p OMP_FOR. */
2001
2002 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2003 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2004 bool *handled_ops_p,
2005 struct walk_stmt_info *wi)
2006 {
2007 gimple *stmt = gsi_stmt (*gsi_p);
2008
2009 *handled_ops_p = true;
2010 switch (gimple_code (stmt))
2011 {
2012 WALK_SUBSTMTS;
2013
2014 case GIMPLE_OMP_FOR:
2015 if (gimple_omp_for_combined_into_p (stmt)
2016 && gimple_omp_for_kind (stmt)
2017 == *(const enum gf_mask *) (wi->info))
2018 {
2019 wi->info = stmt;
2020 return integer_zero_node;
2021 }
2022 break;
2023 default:
2024 break;
2025 }
2026 return NULL;
2027 }
2028
2029 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
2030
2031 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)2032 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2033 omp_context *outer_ctx)
2034 {
2035 struct walk_stmt_info wi;
2036
2037 memset (&wi, 0, sizeof (wi));
2038 wi.val_only = true;
2039 wi.info = (void *) &msk;
2040 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2041 if (wi.info != (void *) &msk)
2042 {
2043 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2044 struct omp_for_data fd;
2045 omp_extract_for_data (for_stmt, &fd, NULL);
2046 /* We need two temporaries with fd.loop.v type (istart/iend)
2047 and then (fd.collapse - 1) temporaries with the same
2048 type for count2 ... countN-1 vars if not constant. */
2049 size_t count = 2, i;
2050 tree type = fd.iter_type;
2051 if (fd.collapse > 1
2052 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2053 {
2054 count += fd.collapse - 1;
2055 /* If there are lastprivate clauses on the inner
2056 GIMPLE_OMP_FOR, add one more temporaries for the total number
2057 of iterations (product of count1 ... countN-1). */
2058 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2059 OMP_CLAUSE_LASTPRIVATE)
2060 || (msk == GF_OMP_FOR_KIND_FOR
2061 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2062 OMP_CLAUSE_LASTPRIVATE)))
2063 {
2064 tree temp = create_tmp_var (type);
2065 tree c = build_omp_clause (UNKNOWN_LOCATION,
2066 OMP_CLAUSE__LOOPTEMP_);
2067 insert_decl_map (&outer_ctx->cb, temp, temp);
2068 OMP_CLAUSE_DECL (c) = temp;
2069 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2070 gimple_omp_taskreg_set_clauses (stmt, c);
2071 }
2072 if (fd.non_rect
2073 && fd.last_nonrect == fd.first_nonrect + 1)
2074 if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2075 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2076 {
2077 v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2078 tree type2 = TREE_TYPE (v);
2079 count++;
2080 for (i = 0; i < 3; i++)
2081 {
2082 tree temp = create_tmp_var (type2);
2083 tree c = build_omp_clause (UNKNOWN_LOCATION,
2084 OMP_CLAUSE__LOOPTEMP_);
2085 insert_decl_map (&outer_ctx->cb, temp, temp);
2086 OMP_CLAUSE_DECL (c) = temp;
2087 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2088 gimple_omp_taskreg_set_clauses (stmt, c);
2089 }
2090 }
2091 }
2092 for (i = 0; i < count; i++)
2093 {
2094 tree temp = create_tmp_var (type);
2095 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2096 insert_decl_map (&outer_ctx->cb, temp, temp);
2097 OMP_CLAUSE_DECL (c) = temp;
2098 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2099 gimple_omp_taskreg_set_clauses (stmt, c);
2100 }
2101 }
2102 if (msk == GF_OMP_FOR_KIND_TASKLOOP
2103 && omp_find_clause (gimple_omp_task_clauses (stmt),
2104 OMP_CLAUSE_REDUCTION))
2105 {
2106 tree type = build_pointer_type (pointer_sized_int_node);
2107 tree temp = create_tmp_var (type);
2108 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2109 insert_decl_map (&outer_ctx->cb, temp, temp);
2110 OMP_CLAUSE_DECL (c) = temp;
2111 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2112 gimple_omp_task_set_clauses (stmt, c);
2113 }
2114 }
2115
2116 /* Scan an OpenMP parallel directive. */
2117
2118 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2119 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2120 {
2121 omp_context *ctx;
2122 tree name;
2123 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2124
2125 /* Ignore parallel directives with empty bodies, unless there
2126 are copyin clauses. */
2127 if (optimize > 0
2128 && empty_body_p (gimple_omp_body (stmt))
2129 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2130 OMP_CLAUSE_COPYIN) == NULL)
2131 {
2132 gsi_replace (gsi, gimple_build_nop (), false);
2133 return;
2134 }
2135
2136 if (gimple_omp_parallel_combined_p (stmt))
2137 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2138 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2139 OMP_CLAUSE_REDUCTION);
2140 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2141 if (OMP_CLAUSE_REDUCTION_TASK (c))
2142 {
2143 tree type = build_pointer_type (pointer_sized_int_node);
2144 tree temp = create_tmp_var (type);
2145 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2146 if (outer_ctx)
2147 insert_decl_map (&outer_ctx->cb, temp, temp);
2148 OMP_CLAUSE_DECL (c) = temp;
2149 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2150 gimple_omp_parallel_set_clauses (stmt, c);
2151 break;
2152 }
2153 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2154 break;
2155
2156 ctx = new_omp_context (stmt, outer_ctx);
2157 taskreg_contexts.safe_push (ctx);
2158 if (taskreg_nesting_level > 1)
2159 ctx->is_nested = true;
2160 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2161 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2162 name = create_tmp_var_name (".omp_data_s");
2163 name = build_decl (gimple_location (stmt),
2164 TYPE_DECL, name, ctx->record_type);
2165 DECL_ARTIFICIAL (name) = 1;
2166 DECL_NAMELESS (name) = 1;
2167 TYPE_NAME (ctx->record_type) = name;
2168 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2169 create_omp_child_function (ctx, false);
2170 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2171
2172 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2173 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2174
2175 if (TYPE_FIELDS (ctx->record_type) == NULL)
2176 ctx->record_type = ctx->receiver_decl = NULL;
2177 }
2178
2179 /* Scan an OpenMP task directive. */
2180
2181 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2182 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2183 {
2184 omp_context *ctx;
2185 tree name, t;
2186 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2187
2188 /* Ignore task directives with empty bodies, unless they have depend
2189 clause. */
2190 if (optimize > 0
2191 && gimple_omp_body (stmt)
2192 && empty_body_p (gimple_omp_body (stmt))
2193 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2194 {
2195 gsi_replace (gsi, gimple_build_nop (), false);
2196 return;
2197 }
2198
2199 if (gimple_omp_task_taskloop_p (stmt))
2200 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2201
2202 ctx = new_omp_context (stmt, outer_ctx);
2203
2204 if (gimple_omp_task_taskwait_p (stmt))
2205 {
2206 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2207 return;
2208 }
2209
2210 taskreg_contexts.safe_push (ctx);
2211 if (taskreg_nesting_level > 1)
2212 ctx->is_nested = true;
2213 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2214 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2215 name = create_tmp_var_name (".omp_data_s");
2216 name = build_decl (gimple_location (stmt),
2217 TYPE_DECL, name, ctx->record_type);
2218 DECL_ARTIFICIAL (name) = 1;
2219 DECL_NAMELESS (name) = 1;
2220 TYPE_NAME (ctx->record_type) = name;
2221 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2222 create_omp_child_function (ctx, false);
2223 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2224
2225 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2226
2227 if (ctx->srecord_type)
2228 {
2229 name = create_tmp_var_name (".omp_data_a");
2230 name = build_decl (gimple_location (stmt),
2231 TYPE_DECL, name, ctx->srecord_type);
2232 DECL_ARTIFICIAL (name) = 1;
2233 DECL_NAMELESS (name) = 1;
2234 TYPE_NAME (ctx->srecord_type) = name;
2235 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2236 create_omp_child_function (ctx, true);
2237 }
2238
2239 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2240
2241 if (TYPE_FIELDS (ctx->record_type) == NULL)
2242 {
2243 ctx->record_type = ctx->receiver_decl = NULL;
2244 t = build_int_cst (long_integer_type_node, 0);
2245 gimple_omp_task_set_arg_size (stmt, t);
2246 t = build_int_cst (long_integer_type_node, 1);
2247 gimple_omp_task_set_arg_align (stmt, t);
2248 }
2249 }
2250
2251 /* Helper function for finish_taskreg_scan, called through walk_tree.
2252 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2253 tree, replace it in the expression. */
2254
2255 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)2256 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2257 {
2258 if (VAR_P (*tp))
2259 {
2260 omp_context *ctx = (omp_context *) data;
2261 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2262 if (t != *tp)
2263 {
2264 if (DECL_HAS_VALUE_EXPR_P (t))
2265 t = unshare_expr (DECL_VALUE_EXPR (t));
2266 *tp = t;
2267 }
2268 *walk_subtrees = 0;
2269 }
2270 else if (IS_TYPE_OR_DECL_P (*tp))
2271 *walk_subtrees = 0;
2272 return NULL_TREE;
2273 }
2274
2275 /* If any decls have been made addressable during scan_omp,
2276 adjust their fields if needed, and layout record types
2277 of parallel/task constructs. */
2278
2279 static void
finish_taskreg_scan(omp_context * ctx)2280 finish_taskreg_scan (omp_context *ctx)
2281 {
2282 if (ctx->record_type == NULL_TREE)
2283 return;
2284
2285 /* If any task_shared_vars were needed, verify all
2286 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2287 statements if use_pointer_for_field hasn't changed
2288 because of that. If it did, update field types now. */
2289 if (task_shared_vars)
2290 {
2291 tree c;
2292
2293 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2294 c; c = OMP_CLAUSE_CHAIN (c))
2295 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2296 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2297 {
2298 tree decl = OMP_CLAUSE_DECL (c);
2299
2300 /* Global variables don't need to be copied,
2301 the receiver side will use them directly. */
2302 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2303 continue;
2304 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2305 || !use_pointer_for_field (decl, ctx))
2306 continue;
2307 tree field = lookup_field (decl, ctx);
2308 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2309 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2310 continue;
2311 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2312 TREE_THIS_VOLATILE (field) = 0;
2313 DECL_USER_ALIGN (field) = 0;
2314 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2315 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2316 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2317 if (ctx->srecord_type)
2318 {
2319 tree sfield = lookup_sfield (decl, ctx);
2320 TREE_TYPE (sfield) = TREE_TYPE (field);
2321 TREE_THIS_VOLATILE (sfield) = 0;
2322 DECL_USER_ALIGN (sfield) = 0;
2323 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2324 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2325 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2326 }
2327 }
2328 }
2329
2330 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2331 {
2332 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2333 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2334 if (c)
2335 {
2336 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2337 expects to find it at the start of data. */
2338 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2339 tree *p = &TYPE_FIELDS (ctx->record_type);
2340 while (*p)
2341 if (*p == f)
2342 {
2343 *p = DECL_CHAIN (*p);
2344 break;
2345 }
2346 else
2347 p = &DECL_CHAIN (*p);
2348 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2349 TYPE_FIELDS (ctx->record_type) = f;
2350 }
2351 layout_type (ctx->record_type);
2352 fixup_child_record_type (ctx);
2353 }
2354 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2355 {
2356 layout_type (ctx->record_type);
2357 fixup_child_record_type (ctx);
2358 }
2359 else
2360 {
2361 location_t loc = gimple_location (ctx->stmt);
2362 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2363 tree detach_clause
2364 = omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2365 OMP_CLAUSE_DETACH);
2366 /* Move VLA fields to the end. */
2367 p = &TYPE_FIELDS (ctx->record_type);
2368 while (*p)
2369 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2370 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2371 {
2372 *q = *p;
2373 *p = TREE_CHAIN (*p);
2374 TREE_CHAIN (*q) = NULL_TREE;
2375 q = &TREE_CHAIN (*q);
2376 }
2377 else
2378 p = &DECL_CHAIN (*p);
2379 *p = vla_fields;
2380 if (gimple_omp_task_taskloop_p (ctx->stmt))
2381 {
2382 /* Move fields corresponding to first and second _looptemp_
2383 clause first. There are filled by GOMP_taskloop
2384 and thus need to be in specific positions. */
2385 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2386 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2387 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2388 OMP_CLAUSE__LOOPTEMP_);
2389 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2390 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2391 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2392 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2393 p = &TYPE_FIELDS (ctx->record_type);
2394 while (*p)
2395 if (*p == f1 || *p == f2 || *p == f3)
2396 *p = DECL_CHAIN (*p);
2397 else
2398 p = &DECL_CHAIN (*p);
2399 DECL_CHAIN (f1) = f2;
2400 if (c3)
2401 {
2402 DECL_CHAIN (f2) = f3;
2403 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2404 }
2405 else
2406 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2407 TYPE_FIELDS (ctx->record_type) = f1;
2408 if (ctx->srecord_type)
2409 {
2410 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2411 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2412 if (c3)
2413 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2414 p = &TYPE_FIELDS (ctx->srecord_type);
2415 while (*p)
2416 if (*p == f1 || *p == f2 || *p == f3)
2417 *p = DECL_CHAIN (*p);
2418 else
2419 p = &DECL_CHAIN (*p);
2420 DECL_CHAIN (f1) = f2;
2421 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2422 if (c3)
2423 {
2424 DECL_CHAIN (f2) = f3;
2425 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2426 }
2427 else
2428 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2429 TYPE_FIELDS (ctx->srecord_type) = f1;
2430 }
2431 }
2432 if (detach_clause)
2433 {
2434 tree c, field;
2435
2436 /* Look for a firstprivate clause with the detach event handle. */
2437 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2438 c; c = OMP_CLAUSE_CHAIN (c))
2439 {
2440 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2441 continue;
2442 if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2443 == OMP_CLAUSE_DECL (detach_clause))
2444 break;
2445 }
2446
2447 gcc_assert (c);
2448 field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2449
2450 /* Move field corresponding to the detach clause first.
2451 This is filled by GOMP_task and needs to be in a
2452 specific position. */
2453 p = &TYPE_FIELDS (ctx->record_type);
2454 while (*p)
2455 if (*p == field)
2456 *p = DECL_CHAIN (*p);
2457 else
2458 p = &DECL_CHAIN (*p);
2459 DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2460 TYPE_FIELDS (ctx->record_type) = field;
2461 if (ctx->srecord_type)
2462 {
2463 field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2464 p = &TYPE_FIELDS (ctx->srecord_type);
2465 while (*p)
2466 if (*p == field)
2467 *p = DECL_CHAIN (*p);
2468 else
2469 p = &DECL_CHAIN (*p);
2470 DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2471 TYPE_FIELDS (ctx->srecord_type) = field;
2472 }
2473 }
2474 layout_type (ctx->record_type);
2475 fixup_child_record_type (ctx);
2476 if (ctx->srecord_type)
2477 layout_type (ctx->srecord_type);
2478 tree t = fold_convert_loc (loc, long_integer_type_node,
2479 TYPE_SIZE_UNIT (ctx->record_type));
2480 if (TREE_CODE (t) != INTEGER_CST)
2481 {
2482 t = unshare_expr (t);
2483 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2484 }
2485 gimple_omp_task_set_arg_size (ctx->stmt, t);
2486 t = build_int_cst (long_integer_type_node,
2487 TYPE_ALIGN_UNIT (ctx->record_type));
2488 gimple_omp_task_set_arg_align (ctx->stmt, t);
2489 }
2490 }
2491
2492 /* Find the enclosing offload context. */
2493
2494 static omp_context *
enclosing_target_ctx(omp_context * ctx)2495 enclosing_target_ctx (omp_context *ctx)
2496 {
2497 for (; ctx; ctx = ctx->outer)
2498 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2499 break;
2500
2501 return ctx;
2502 }
2503
2504 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2505 construct.
2506 (This doesn't include OpenACC 'kernels' decomposed parts.) */
2507
2508 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2509 ctx_in_oacc_kernels_region (omp_context *ctx)
2510 {
2511 for (;ctx != NULL; ctx = ctx->outer)
2512 {
2513 gimple *stmt = ctx->stmt;
2514 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2515 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2516 return true;
2517 }
2518
2519 return false;
2520 }
2521
2522 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2523 (This doesn't include OpenACC 'kernels' decomposed parts.)
2524 Until kernels handling moves to use the same loop indirection
2525 scheme as parallel, we need to do this checking early. */
2526
2527 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2528 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2529 {
2530 bool checking = true;
2531 unsigned outer_mask = 0;
2532 unsigned this_mask = 0;
2533 bool has_seq = false, has_auto = false;
2534
2535 if (ctx->outer)
2536 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2537 if (!stmt)
2538 {
2539 checking = false;
2540 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2541 return outer_mask;
2542 stmt = as_a <gomp_for *> (ctx->stmt);
2543 }
2544
2545 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2546 {
2547 switch (OMP_CLAUSE_CODE (c))
2548 {
2549 case OMP_CLAUSE_GANG:
2550 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2551 break;
2552 case OMP_CLAUSE_WORKER:
2553 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2554 break;
2555 case OMP_CLAUSE_VECTOR:
2556 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2557 break;
2558 case OMP_CLAUSE_SEQ:
2559 has_seq = true;
2560 break;
2561 case OMP_CLAUSE_AUTO:
2562 has_auto = true;
2563 break;
2564 default:
2565 break;
2566 }
2567 }
2568
2569 if (checking)
2570 {
2571 if (has_seq && (this_mask || has_auto))
2572 error_at (gimple_location (stmt), "%<seq%> overrides other"
2573 " OpenACC loop specifiers");
2574 else if (has_auto && this_mask)
2575 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2576 " OpenACC loop specifiers");
2577
2578 if (this_mask & outer_mask)
2579 error_at (gimple_location (stmt), "inner loop uses same"
2580 " OpenACC parallelism as containing loop");
2581 }
2582
2583 return outer_mask | this_mask;
2584 }
2585
2586 /* Scan a GIMPLE_OMP_FOR. */
2587
2588 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2589 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2590 {
2591 omp_context *ctx;
2592 size_t i;
2593 tree clauses = gimple_omp_for_clauses (stmt);
2594
2595 ctx = new_omp_context (stmt, outer_ctx);
2596
2597 if (is_gimple_omp_oacc (stmt))
2598 {
2599 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2600
2601 if (!(tgt && is_oacc_kernels (tgt)))
2602 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2603 {
2604 tree c_op0;
2605 switch (OMP_CLAUSE_CODE (c))
2606 {
2607 case OMP_CLAUSE_GANG:
2608 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2609 break;
2610
2611 case OMP_CLAUSE_WORKER:
2612 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2613 break;
2614
2615 case OMP_CLAUSE_VECTOR:
2616 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2617 break;
2618
2619 default:
2620 continue;
2621 }
2622
2623 if (c_op0)
2624 {
2625 /* By construction, this is impossible for OpenACC 'kernels'
2626 decomposed parts. */
2627 gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2628
2629 error_at (OMP_CLAUSE_LOCATION (c),
2630 "argument not permitted on %qs clause",
2631 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2632 if (tgt)
2633 inform (gimple_location (tgt->stmt),
2634 "enclosing parent compute construct");
2635 else if (oacc_get_fn_attrib (current_function_decl))
2636 inform (DECL_SOURCE_LOCATION (current_function_decl),
2637 "enclosing routine");
2638 else
2639 gcc_unreachable ();
2640 }
2641 }
2642
2643 if (tgt && is_oacc_kernels (tgt))
2644 check_oacc_kernel_gwv (stmt, ctx);
2645
2646 /* Collect all variables named in reductions on this loop. Ensure
2647 that, if this loop has a reduction on some variable v, and there is
2648 a reduction on v somewhere in an outer context, then there is a
2649 reduction on v on all intervening loops as well. */
2650 tree local_reduction_clauses = NULL;
2651 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2652 {
2653 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2654 local_reduction_clauses
2655 = tree_cons (NULL, c, local_reduction_clauses);
2656 }
2657 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2658 ctx->outer_reduction_clauses
2659 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2660 ctx->outer->outer_reduction_clauses);
2661 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2662 tree local_iter = local_reduction_clauses;
2663 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2664 {
2665 tree local_clause = TREE_VALUE (local_iter);
2666 tree local_var = OMP_CLAUSE_DECL (local_clause);
2667 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2668 bool have_outer_reduction = false;
2669 tree ctx_iter = outer_reduction_clauses;
2670 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2671 {
2672 tree outer_clause = TREE_VALUE (ctx_iter);
2673 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2674 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2675 if (outer_var == local_var && outer_op != local_op)
2676 {
2677 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2678 "conflicting reduction operations for %qE",
2679 local_var);
2680 inform (OMP_CLAUSE_LOCATION (outer_clause),
2681 "location of the previous reduction for %qE",
2682 outer_var);
2683 }
2684 if (outer_var == local_var)
2685 {
2686 have_outer_reduction = true;
2687 break;
2688 }
2689 }
2690 if (have_outer_reduction)
2691 {
2692 /* There is a reduction on outer_var both on this loop and on
2693 some enclosing loop. Walk up the context tree until such a
2694 loop with a reduction on outer_var is found, and complain
2695 about all intervening loops that do not have such a
2696 reduction. */
2697 struct omp_context *curr_loop = ctx->outer;
2698 bool found = false;
2699 while (curr_loop != NULL)
2700 {
2701 tree curr_iter = curr_loop->local_reduction_clauses;
2702 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2703 {
2704 tree curr_clause = TREE_VALUE (curr_iter);
2705 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2706 if (curr_var == local_var)
2707 {
2708 found = true;
2709 break;
2710 }
2711 }
2712 if (!found)
2713 warning_at (gimple_location (curr_loop->stmt), 0,
2714 "nested loop in reduction needs "
2715 "reduction clause for %qE",
2716 local_var);
2717 else
2718 break;
2719 curr_loop = curr_loop->outer;
2720 }
2721 }
2722 }
2723 ctx->local_reduction_clauses = local_reduction_clauses;
2724 ctx->outer_reduction_clauses
2725 = chainon (unshare_expr (ctx->local_reduction_clauses),
2726 ctx->outer_reduction_clauses);
2727
2728 if (tgt && is_oacc_kernels (tgt))
2729 {
2730 /* Strip out reductions, as they are not handled yet. */
2731 tree *prev_ptr = &clauses;
2732
2733 while (tree probe = *prev_ptr)
2734 {
2735 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2736
2737 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2738 *prev_ptr = *next_ptr;
2739 else
2740 prev_ptr = next_ptr;
2741 }
2742
2743 gimple_omp_for_set_clauses (stmt, clauses);
2744 }
2745 }
2746
2747 scan_sharing_clauses (clauses, ctx);
2748
2749 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2750 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2751 {
2752 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2753 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2754 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2755 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2756 }
2757 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2758 return ctx;
2759 }
2760
2761 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2762
2763 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2764 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2765 omp_context *outer_ctx)
2766 {
2767 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2768 gsi_replace (gsi, bind, false);
2769 gimple_seq seq = NULL;
2770 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2771 tree cond = create_tmp_var_raw (integer_type_node);
2772 DECL_CONTEXT (cond) = current_function_decl;
2773 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2774 gimple_bind_set_vars (bind, cond);
2775 gimple_call_set_lhs (g, cond);
2776 gimple_seq_add_stmt (&seq, g);
2777 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2778 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2779 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2780 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2781 gimple_seq_add_stmt (&seq, g);
2782 g = gimple_build_label (lab1);
2783 gimple_seq_add_stmt (&seq, g);
2784 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2785 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2786 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2787 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2788 gimple_omp_for_set_clauses (new_stmt, clause);
2789 gimple_seq_add_stmt (&seq, new_stmt);
2790 g = gimple_build_goto (lab3);
2791 gimple_seq_add_stmt (&seq, g);
2792 g = gimple_build_label (lab2);
2793 gimple_seq_add_stmt (&seq, g);
2794 gimple_seq_add_stmt (&seq, stmt);
2795 g = gimple_build_label (lab3);
2796 gimple_seq_add_stmt (&seq, g);
2797 gimple_bind_set_body (bind, seq);
2798 update_stmt (bind);
2799 scan_omp_for (new_stmt, outer_ctx);
2800 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2801 }
2802
2803 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2804 struct walk_stmt_info *);
2805 static omp_context *maybe_lookup_ctx (gimple *);
2806
2807 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2808 for scan phase loop. */
2809
2810 static void
scan_omp_simd_scan(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2811 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2812 omp_context *outer_ctx)
2813 {
2814 /* The only change between inclusive and exclusive scan will be
2815 within the first simd loop, so just use inclusive in the
2816 worksharing loop. */
2817 outer_ctx->scan_inclusive = true;
2818 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2819 OMP_CLAUSE_DECL (c) = integer_zero_node;
2820
2821 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2822 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2823 gsi_replace (gsi, input_stmt, false);
2824 gimple_seq input_body = NULL;
2825 gimple_seq_add_stmt (&input_body, stmt);
2826 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2827
2828 gimple_stmt_iterator input1_gsi = gsi_none ();
2829 struct walk_stmt_info wi;
2830 memset (&wi, 0, sizeof (wi));
2831 wi.val_only = true;
2832 wi.info = (void *) &input1_gsi;
2833 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2834 gcc_assert (!gsi_end_p (input1_gsi));
2835
2836 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2837 gsi_next (&input1_gsi);
2838 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2839 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2840 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2841 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2842 std::swap (input_stmt1, scan_stmt1);
2843
2844 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2845 gimple_omp_set_body (input_stmt1, NULL);
2846
2847 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2848 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2849
2850 gimple_omp_set_body (input_stmt1, input_body1);
2851 gimple_omp_set_body (scan_stmt1, NULL);
2852
2853 gimple_stmt_iterator input2_gsi = gsi_none ();
2854 memset (&wi, 0, sizeof (wi));
2855 wi.val_only = true;
2856 wi.info = (void *) &input2_gsi;
2857 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2858 NULL, &wi);
2859 gcc_assert (!gsi_end_p (input2_gsi));
2860
2861 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2862 gsi_next (&input2_gsi);
2863 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2864 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2865 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2866 std::swap (input_stmt2, scan_stmt2);
2867
2868 gimple_omp_set_body (input_stmt2, NULL);
2869
2870 gimple_omp_set_body (input_stmt, input_body);
2871 gimple_omp_set_body (scan_stmt, scan_body);
2872
2873 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2874 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2875
2876 ctx = new_omp_context (scan_stmt, outer_ctx);
2877 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2878
2879 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2880 }
2881
2882 /* Scan an OpenMP sections directive. */
2883
2884 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2885 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2886 {
2887 omp_context *ctx;
2888
2889 ctx = new_omp_context (stmt, outer_ctx);
2890 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2891 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2892 }
2893
2894 /* Scan an OpenMP single directive. */
2895
2896 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2897 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2898 {
2899 omp_context *ctx;
2900 tree name;
2901
2902 ctx = new_omp_context (stmt, outer_ctx);
2903 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2904 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2905 name = create_tmp_var_name (".omp_copy_s");
2906 name = build_decl (gimple_location (stmt),
2907 TYPE_DECL, name, ctx->record_type);
2908 TYPE_NAME (ctx->record_type) = name;
2909
2910 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2911 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2912
2913 if (TYPE_FIELDS (ctx->record_type) == NULL)
2914 ctx->record_type = NULL;
2915 else
2916 layout_type (ctx->record_type);
2917 }
2918
2919 /* Scan a GIMPLE_OMP_TARGET. */
2920
2921 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)2922 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2923 {
2924 omp_context *ctx;
2925 tree name;
2926 bool offloaded = is_gimple_omp_offloaded (stmt);
2927 tree clauses = gimple_omp_target_clauses (stmt);
2928
2929 ctx = new_omp_context (stmt, outer_ctx);
2930 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2931 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2932 name = create_tmp_var_name (".omp_data_t");
2933 name = build_decl (gimple_location (stmt),
2934 TYPE_DECL, name, ctx->record_type);
2935 DECL_ARTIFICIAL (name) = 1;
2936 DECL_NAMELESS (name) = 1;
2937 TYPE_NAME (ctx->record_type) = name;
2938 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2939
2940 if (offloaded)
2941 {
2942 create_omp_child_function (ctx, false);
2943 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2944 }
2945
2946 scan_sharing_clauses (clauses, ctx);
2947 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2948
2949 if (TYPE_FIELDS (ctx->record_type) == NULL)
2950 ctx->record_type = ctx->receiver_decl = NULL;
2951 else
2952 {
2953 TYPE_FIELDS (ctx->record_type)
2954 = nreverse (TYPE_FIELDS (ctx->record_type));
2955 if (flag_checking)
2956 {
2957 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2958 for (tree field = TYPE_FIELDS (ctx->record_type);
2959 field;
2960 field = DECL_CHAIN (field))
2961 gcc_assert (DECL_ALIGN (field) == align);
2962 }
2963 layout_type (ctx->record_type);
2964 if (offloaded)
2965 fixup_child_record_type (ctx);
2966 }
2967
2968 if (ctx->teams_nested_p && ctx->nonteams_nested_p)
2969 {
2970 error_at (gimple_location (stmt),
2971 "%<target%> construct with nested %<teams%> construct "
2972 "contains directives outside of the %<teams%> construct");
2973 gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
2974 }
2975 }
2976
2977 /* Scan an OpenMP teams directive. */
2978
2979 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)2980 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2981 {
2982 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2983
2984 if (!gimple_omp_teams_host (stmt))
2985 {
2986 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2987 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2988 return;
2989 }
2990 taskreg_contexts.safe_push (ctx);
2991 gcc_assert (taskreg_nesting_level == 1);
2992 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2993 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2994 tree name = create_tmp_var_name (".omp_data_s");
2995 name = build_decl (gimple_location (stmt),
2996 TYPE_DECL, name, ctx->record_type);
2997 DECL_ARTIFICIAL (name) = 1;
2998 DECL_NAMELESS (name) = 1;
2999 TYPE_NAME (ctx->record_type) = name;
3000 TYPE_ARTIFICIAL (ctx->record_type) = 1;
3001 create_omp_child_function (ctx, false);
3002 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3003
3004 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3005 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3006
3007 if (TYPE_FIELDS (ctx->record_type) == NULL)
3008 ctx->record_type = ctx->receiver_decl = NULL;
3009 }
3010
3011 /* Check nesting restrictions. */
3012 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)3013 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3014 {
3015 tree c;
3016
3017 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3018 inside an OpenACC CTX. */
3019 if (!(is_gimple_omp (stmt)
3020 && is_gimple_omp_oacc (stmt))
3021 /* Except for atomic codes that we share with OpenMP. */
3022 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3023 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3024 {
3025 if (oacc_get_fn_attrib (cfun->decl) != NULL)
3026 {
3027 error_at (gimple_location (stmt),
3028 "non-OpenACC construct inside of OpenACC routine");
3029 return false;
3030 }
3031 else
3032 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3033 if (is_gimple_omp (octx->stmt)
3034 && is_gimple_omp_oacc (octx->stmt))
3035 {
3036 error_at (gimple_location (stmt),
3037 "non-OpenACC construct inside of OpenACC region");
3038 return false;
3039 }
3040 }
3041
3042 if (ctx != NULL)
3043 {
3044 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3045 && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3046 {
3047 if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3048 ctx->teams_nested_p = true;
3049 else
3050 ctx->nonteams_nested_p = true;
3051 }
3052 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3053 && ctx->outer
3054 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3055 ctx = ctx->outer;
3056 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3057 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3058 && !ctx->loop_p)
3059 {
3060 c = NULL_TREE;
3061 if (ctx->order_concurrent
3062 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3063 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3064 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3065 {
3066 error_at (gimple_location (stmt),
3067 "OpenMP constructs other than %<parallel%>, %<loop%>"
3068 " or %<simd%> may not be nested inside a region with"
3069 " the %<order(concurrent)%> clause");
3070 return false;
3071 }
3072 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3073 {
3074 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3075 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3076 {
3077 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3078 && (ctx->outer == NULL
3079 || !gimple_omp_for_combined_into_p (ctx->stmt)
3080 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3081 || (gimple_omp_for_kind (ctx->outer->stmt)
3082 != GF_OMP_FOR_KIND_FOR)
3083 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3084 {
3085 error_at (gimple_location (stmt),
3086 "%<ordered simd threads%> must be closely "
3087 "nested inside of %<%s simd%> region",
3088 lang_GNU_Fortran () ? "do" : "for");
3089 return false;
3090 }
3091 return true;
3092 }
3093 }
3094 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3095 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3096 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3097 return true;
3098 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3099 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3100 return true;
3101 error_at (gimple_location (stmt),
3102 "OpenMP constructs other than "
3103 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3104 "not be nested inside %<simd%> region");
3105 return false;
3106 }
3107 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3108 {
3109 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3110 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3111 && omp_find_clause (gimple_omp_for_clauses (stmt),
3112 OMP_CLAUSE_BIND) == NULL_TREE))
3113 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3114 {
3115 error_at (gimple_location (stmt),
3116 "only %<distribute%>, %<parallel%> or %<loop%> "
3117 "regions are allowed to be strictly nested inside "
3118 "%<teams%> region");
3119 return false;
3120 }
3121 }
3122 else if (ctx->order_concurrent
3123 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3124 && (gimple_code (stmt) != GIMPLE_OMP_FOR
3125 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3126 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3127 {
3128 if (ctx->loop_p)
3129 error_at (gimple_location (stmt),
3130 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3131 "%<simd%> may not be nested inside a %<loop%> region");
3132 else
3133 error_at (gimple_location (stmt),
3134 "OpenMP constructs other than %<parallel%>, %<loop%> or "
3135 "%<simd%> may not be nested inside a region with "
3136 "the %<order(concurrent)%> clause");
3137 return false;
3138 }
3139 }
3140 switch (gimple_code (stmt))
3141 {
3142 case GIMPLE_OMP_FOR:
3143 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3144 return true;
3145 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3146 {
3147 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3148 {
3149 error_at (gimple_location (stmt),
3150 "%<distribute%> region must be strictly nested "
3151 "inside %<teams%> construct");
3152 return false;
3153 }
3154 return true;
3155 }
3156 /* We split taskloop into task and nested taskloop in it. */
3157 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3158 return true;
3159 /* For now, hope this will change and loop bind(parallel) will not
3160 be allowed in lots of contexts. */
3161 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3162 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3163 return true;
3164 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3165 {
3166 bool ok = false;
3167
3168 if (ctx)
3169 switch (gimple_code (ctx->stmt))
3170 {
3171 case GIMPLE_OMP_FOR:
3172 ok = (gimple_omp_for_kind (ctx->stmt)
3173 == GF_OMP_FOR_KIND_OACC_LOOP);
3174 break;
3175
3176 case GIMPLE_OMP_TARGET:
3177 switch (gimple_omp_target_kind (ctx->stmt))
3178 {
3179 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3180 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3181 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3182 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3183 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3184 ok = true;
3185 break;
3186
3187 default:
3188 break;
3189 }
3190
3191 default:
3192 break;
3193 }
3194 else if (oacc_get_fn_attrib (current_function_decl))
3195 ok = true;
3196 if (!ok)
3197 {
3198 error_at (gimple_location (stmt),
3199 "OpenACC loop directive must be associated with"
3200 " an OpenACC compute region");
3201 return false;
3202 }
3203 }
3204 /* FALLTHRU */
3205 case GIMPLE_CALL:
3206 if (is_gimple_call (stmt)
3207 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3208 == BUILT_IN_GOMP_CANCEL
3209 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3210 == BUILT_IN_GOMP_CANCELLATION_POINT))
3211 {
3212 const char *bad = NULL;
3213 const char *kind = NULL;
3214 const char *construct
3215 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3216 == BUILT_IN_GOMP_CANCEL)
3217 ? "cancel"
3218 : "cancellation point";
3219 if (ctx == NULL)
3220 {
3221 error_at (gimple_location (stmt), "orphaned %qs construct",
3222 construct);
3223 return false;
3224 }
3225 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3226 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3227 : 0)
3228 {
3229 case 1:
3230 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3231 bad = "parallel";
3232 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3233 == BUILT_IN_GOMP_CANCEL
3234 && !integer_zerop (gimple_call_arg (stmt, 1)))
3235 ctx->cancellable = true;
3236 kind = "parallel";
3237 break;
3238 case 2:
3239 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3240 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3241 bad = "for";
3242 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3243 == BUILT_IN_GOMP_CANCEL
3244 && !integer_zerop (gimple_call_arg (stmt, 1)))
3245 {
3246 ctx->cancellable = true;
3247 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3248 OMP_CLAUSE_NOWAIT))
3249 warning_at (gimple_location (stmt), 0,
3250 "%<cancel for%> inside "
3251 "%<nowait%> for construct");
3252 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3253 OMP_CLAUSE_ORDERED))
3254 warning_at (gimple_location (stmt), 0,
3255 "%<cancel for%> inside "
3256 "%<ordered%> for construct");
3257 }
3258 kind = "for";
3259 break;
3260 case 4:
3261 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3262 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3263 bad = "sections";
3264 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3265 == BUILT_IN_GOMP_CANCEL
3266 && !integer_zerop (gimple_call_arg (stmt, 1)))
3267 {
3268 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3269 {
3270 ctx->cancellable = true;
3271 if (omp_find_clause (gimple_omp_sections_clauses
3272 (ctx->stmt),
3273 OMP_CLAUSE_NOWAIT))
3274 warning_at (gimple_location (stmt), 0,
3275 "%<cancel sections%> inside "
3276 "%<nowait%> sections construct");
3277 }
3278 else
3279 {
3280 gcc_assert (ctx->outer
3281 && gimple_code (ctx->outer->stmt)
3282 == GIMPLE_OMP_SECTIONS);
3283 ctx->outer->cancellable = true;
3284 if (omp_find_clause (gimple_omp_sections_clauses
3285 (ctx->outer->stmt),
3286 OMP_CLAUSE_NOWAIT))
3287 warning_at (gimple_location (stmt), 0,
3288 "%<cancel sections%> inside "
3289 "%<nowait%> sections construct");
3290 }
3291 }
3292 kind = "sections";
3293 break;
3294 case 8:
3295 if (!is_task_ctx (ctx)
3296 && (!is_taskloop_ctx (ctx)
3297 || ctx->outer == NULL
3298 || !is_task_ctx (ctx->outer)))
3299 bad = "task";
3300 else
3301 {
3302 for (omp_context *octx = ctx->outer;
3303 octx; octx = octx->outer)
3304 {
3305 switch (gimple_code (octx->stmt))
3306 {
3307 case GIMPLE_OMP_TASKGROUP:
3308 break;
3309 case GIMPLE_OMP_TARGET:
3310 if (gimple_omp_target_kind (octx->stmt)
3311 != GF_OMP_TARGET_KIND_REGION)
3312 continue;
3313 /* FALLTHRU */
3314 case GIMPLE_OMP_PARALLEL:
3315 case GIMPLE_OMP_TEAMS:
3316 error_at (gimple_location (stmt),
3317 "%<%s taskgroup%> construct not closely "
3318 "nested inside of %<taskgroup%> region",
3319 construct);
3320 return false;
3321 case GIMPLE_OMP_TASK:
3322 if (gimple_omp_task_taskloop_p (octx->stmt)
3323 && octx->outer
3324 && is_taskloop_ctx (octx->outer))
3325 {
3326 tree clauses
3327 = gimple_omp_for_clauses (octx->outer->stmt);
3328 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3329 break;
3330 }
3331 continue;
3332 default:
3333 continue;
3334 }
3335 break;
3336 }
3337 ctx->cancellable = true;
3338 }
3339 kind = "taskgroup";
3340 break;
3341 default:
3342 error_at (gimple_location (stmt), "invalid arguments");
3343 return false;
3344 }
3345 if (bad)
3346 {
3347 error_at (gimple_location (stmt),
3348 "%<%s %s%> construct not closely nested inside of %qs",
3349 construct, kind, bad);
3350 return false;
3351 }
3352 }
3353 /* FALLTHRU */
3354 case GIMPLE_OMP_SECTIONS:
3355 case GIMPLE_OMP_SINGLE:
3356 for (; ctx != NULL; ctx = ctx->outer)
3357 switch (gimple_code (ctx->stmt))
3358 {
3359 case GIMPLE_OMP_FOR:
3360 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3361 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3362 break;
3363 /* FALLTHRU */
3364 case GIMPLE_OMP_SECTIONS:
3365 case GIMPLE_OMP_SINGLE:
3366 case GIMPLE_OMP_ORDERED:
3367 case GIMPLE_OMP_MASTER:
3368 case GIMPLE_OMP_TASK:
3369 case GIMPLE_OMP_CRITICAL:
3370 if (is_gimple_call (stmt))
3371 {
3372 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3373 != BUILT_IN_GOMP_BARRIER)
3374 return true;
3375 error_at (gimple_location (stmt),
3376 "barrier region may not be closely nested inside "
3377 "of work-sharing, %<loop%>, %<critical%>, "
3378 "%<ordered%>, %<master%>, explicit %<task%> or "
3379 "%<taskloop%> region");
3380 return false;
3381 }
3382 error_at (gimple_location (stmt),
3383 "work-sharing region may not be closely nested inside "
3384 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3385 "%<master%>, explicit %<task%> or %<taskloop%> region");
3386 return false;
3387 case GIMPLE_OMP_PARALLEL:
3388 case GIMPLE_OMP_TEAMS:
3389 return true;
3390 case GIMPLE_OMP_TARGET:
3391 if (gimple_omp_target_kind (ctx->stmt)
3392 == GF_OMP_TARGET_KIND_REGION)
3393 return true;
3394 break;
3395 default:
3396 break;
3397 }
3398 break;
3399 case GIMPLE_OMP_MASTER:
3400 for (; ctx != NULL; ctx = ctx->outer)
3401 switch (gimple_code (ctx->stmt))
3402 {
3403 case GIMPLE_OMP_FOR:
3404 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3405 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3406 break;
3407 /* FALLTHRU */
3408 case GIMPLE_OMP_SECTIONS:
3409 case GIMPLE_OMP_SINGLE:
3410 case GIMPLE_OMP_TASK:
3411 error_at (gimple_location (stmt),
3412 "%<master%> region may not be closely nested inside "
3413 "of work-sharing, %<loop%>, explicit %<task%> or "
3414 "%<taskloop%> region");
3415 return false;
3416 case GIMPLE_OMP_PARALLEL:
3417 case GIMPLE_OMP_TEAMS:
3418 return true;
3419 case GIMPLE_OMP_TARGET:
3420 if (gimple_omp_target_kind (ctx->stmt)
3421 == GF_OMP_TARGET_KIND_REGION)
3422 return true;
3423 break;
3424 default:
3425 break;
3426 }
3427 break;
3428 case GIMPLE_OMP_TASK:
3429 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3430 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3431 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3432 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3433 {
3434 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3435 error_at (OMP_CLAUSE_LOCATION (c),
3436 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3437 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3438 return false;
3439 }
3440 break;
3441 case GIMPLE_OMP_ORDERED:
3442 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3443 c; c = OMP_CLAUSE_CHAIN (c))
3444 {
3445 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3446 {
3447 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3448 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3449 continue;
3450 }
3451 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3452 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3453 || kind == OMP_CLAUSE_DEPEND_SINK)
3454 {
3455 tree oclause;
3456 /* Look for containing ordered(N) loop. */
3457 if (ctx == NULL
3458 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3459 || (oclause
3460 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3461 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3462 {
3463 error_at (OMP_CLAUSE_LOCATION (c),
3464 "%<ordered%> construct with %<depend%> clause "
3465 "must be closely nested inside an %<ordered%> "
3466 "loop");
3467 return false;
3468 }
3469 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3470 {
3471 error_at (OMP_CLAUSE_LOCATION (c),
3472 "%<ordered%> construct with %<depend%> clause "
3473 "must be closely nested inside a loop with "
3474 "%<ordered%> clause with a parameter");
3475 return false;
3476 }
3477 }
3478 else
3479 {
3480 error_at (OMP_CLAUSE_LOCATION (c),
3481 "invalid depend kind in omp %<ordered%> %<depend%>");
3482 return false;
3483 }
3484 }
3485 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3486 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3487 {
3488 /* ordered simd must be closely nested inside of simd region,
3489 and simd region must not encounter constructs other than
3490 ordered simd, therefore ordered simd may be either orphaned,
3491 or ctx->stmt must be simd. The latter case is handled already
3492 earlier. */
3493 if (ctx != NULL)
3494 {
3495 error_at (gimple_location (stmt),
3496 "%<ordered%> %<simd%> must be closely nested inside "
3497 "%<simd%> region");
3498 return false;
3499 }
3500 }
3501 for (; ctx != NULL; ctx = ctx->outer)
3502 switch (gimple_code (ctx->stmt))
3503 {
3504 case GIMPLE_OMP_CRITICAL:
3505 case GIMPLE_OMP_TASK:
3506 case GIMPLE_OMP_ORDERED:
3507 ordered_in_taskloop:
3508 error_at (gimple_location (stmt),
3509 "%<ordered%> region may not be closely nested inside "
3510 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3511 "%<taskloop%> region");
3512 return false;
3513 case GIMPLE_OMP_FOR:
3514 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3515 goto ordered_in_taskloop;
3516 tree o;
3517 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3518 OMP_CLAUSE_ORDERED);
3519 if (o == NULL)
3520 {
3521 error_at (gimple_location (stmt),
3522 "%<ordered%> region must be closely nested inside "
3523 "a loop region with an %<ordered%> clause");
3524 return false;
3525 }
3526 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3527 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3528 {
3529 error_at (gimple_location (stmt),
3530 "%<ordered%> region without %<depend%> clause may "
3531 "not be closely nested inside a loop region with "
3532 "an %<ordered%> clause with a parameter");
3533 return false;
3534 }
3535 return true;
3536 case GIMPLE_OMP_TARGET:
3537 if (gimple_omp_target_kind (ctx->stmt)
3538 != GF_OMP_TARGET_KIND_REGION)
3539 break;
3540 /* FALLTHRU */
3541 case GIMPLE_OMP_PARALLEL:
3542 case GIMPLE_OMP_TEAMS:
3543 error_at (gimple_location (stmt),
3544 "%<ordered%> region must be closely nested inside "
3545 "a loop region with an %<ordered%> clause");
3546 return false;
3547 default:
3548 break;
3549 }
3550 break;
3551 case GIMPLE_OMP_CRITICAL:
3552 {
3553 tree this_stmt_name
3554 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3555 for (; ctx != NULL; ctx = ctx->outer)
3556 if (gomp_critical *other_crit
3557 = dyn_cast <gomp_critical *> (ctx->stmt))
3558 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3559 {
3560 error_at (gimple_location (stmt),
3561 "%<critical%> region may not be nested inside "
3562 "a %<critical%> region with the same name");
3563 return false;
3564 }
3565 }
3566 break;
3567 case GIMPLE_OMP_TEAMS:
3568 if (ctx == NULL)
3569 break;
3570 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3571 || (gimple_omp_target_kind (ctx->stmt)
3572 != GF_OMP_TARGET_KIND_REGION))
3573 {
3574 /* Teams construct can appear either strictly nested inside of
3575 target construct with no intervening stmts, or can be encountered
3576 only by initial task (so must not appear inside any OpenMP
3577 construct. */
3578 error_at (gimple_location (stmt),
3579 "%<teams%> construct must be closely nested inside of "
3580 "%<target%> construct or not nested in any OpenMP "
3581 "construct");
3582 return false;
3583 }
3584 break;
3585 case GIMPLE_OMP_TARGET:
3586 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3587 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3588 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3589 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3590 {
3591 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3592 error_at (OMP_CLAUSE_LOCATION (c),
3593 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3594 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3595 return false;
3596 }
3597 if (is_gimple_omp_offloaded (stmt)
3598 && oacc_get_fn_attrib (cfun->decl) != NULL)
3599 {
3600 error_at (gimple_location (stmt),
3601 "OpenACC region inside of OpenACC routine, nested "
3602 "parallelism not supported yet");
3603 return false;
3604 }
3605 for (; ctx != NULL; ctx = ctx->outer)
3606 {
3607 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3608 {
3609 if (is_gimple_omp (stmt)
3610 && is_gimple_omp_oacc (stmt)
3611 && is_gimple_omp (ctx->stmt))
3612 {
3613 error_at (gimple_location (stmt),
3614 "OpenACC construct inside of non-OpenACC region");
3615 return false;
3616 }
3617 continue;
3618 }
3619
3620 const char *stmt_name, *ctx_stmt_name;
3621 switch (gimple_omp_target_kind (stmt))
3622 {
3623 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3624 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3625 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3626 case GF_OMP_TARGET_KIND_ENTER_DATA:
3627 stmt_name = "target enter data"; break;
3628 case GF_OMP_TARGET_KIND_EXIT_DATA:
3629 stmt_name = "target exit data"; break;
3630 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3631 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3632 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3633 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3634 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3635 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3636 stmt_name = "enter/exit data"; break;
3637 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3638 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3639 break;
3640 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3641 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3642 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3643 /* OpenACC 'kernels' decomposed parts. */
3644 stmt_name = "kernels"; break;
3645 default: gcc_unreachable ();
3646 }
3647 switch (gimple_omp_target_kind (ctx->stmt))
3648 {
3649 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3650 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3651 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3652 ctx_stmt_name = "parallel"; break;
3653 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3654 ctx_stmt_name = "kernels"; break;
3655 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3656 ctx_stmt_name = "serial"; break;
3657 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3658 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3659 ctx_stmt_name = "host_data"; break;
3660 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3661 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3662 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3663 /* OpenACC 'kernels' decomposed parts. */
3664 ctx_stmt_name = "kernels"; break;
3665 default: gcc_unreachable ();
3666 }
3667
3668 /* OpenACC/OpenMP mismatch? */
3669 if (is_gimple_omp_oacc (stmt)
3670 != is_gimple_omp_oacc (ctx->stmt))
3671 {
3672 error_at (gimple_location (stmt),
3673 "%s %qs construct inside of %s %qs region",
3674 (is_gimple_omp_oacc (stmt)
3675 ? "OpenACC" : "OpenMP"), stmt_name,
3676 (is_gimple_omp_oacc (ctx->stmt)
3677 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3678 return false;
3679 }
3680 if (is_gimple_omp_offloaded (ctx->stmt))
3681 {
3682 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3683 if (is_gimple_omp_oacc (ctx->stmt))
3684 {
3685 error_at (gimple_location (stmt),
3686 "%qs construct inside of %qs region",
3687 stmt_name, ctx_stmt_name);
3688 return false;
3689 }
3690 else
3691 {
3692 warning_at (gimple_location (stmt), 0,
3693 "%qs construct inside of %qs region",
3694 stmt_name, ctx_stmt_name);
3695 }
3696 }
3697 }
3698 break;
3699 default:
3700 break;
3701 }
3702 return true;
3703 }
3704
3705
3706 /* Helper function scan_omp.
3707
3708 Callback for walk_tree or operators in walk_gimple_stmt used to
3709 scan for OMP directives in TP. */
3710
3711 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3712 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3713 {
3714 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3715 omp_context *ctx = (omp_context *) wi->info;
3716 tree t = *tp;
3717
3718 switch (TREE_CODE (t))
3719 {
3720 case VAR_DECL:
3721 case PARM_DECL:
3722 case LABEL_DECL:
3723 case RESULT_DECL:
3724 if (ctx)
3725 {
3726 tree repl = remap_decl (t, &ctx->cb);
3727 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3728 *tp = repl;
3729 }
3730 break;
3731
3732 default:
3733 if (ctx && TYPE_P (t))
3734 *tp = remap_type (t, &ctx->cb);
3735 else if (!DECL_P (t))
3736 {
3737 *walk_subtrees = 1;
3738 if (ctx)
3739 {
3740 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3741 if (tem != TREE_TYPE (t))
3742 {
3743 if (TREE_CODE (t) == INTEGER_CST)
3744 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3745 else
3746 TREE_TYPE (t) = tem;
3747 }
3748 }
3749 }
3750 break;
3751 }
3752
3753 return NULL_TREE;
3754 }
3755
3756 /* Return true if FNDECL is a setjmp or a longjmp. */
3757
3758 static bool
setjmp_or_longjmp_p(const_tree fndecl)3759 setjmp_or_longjmp_p (const_tree fndecl)
3760 {
3761 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3762 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3763 return true;
3764
3765 tree declname = DECL_NAME (fndecl);
3766 if (!declname
3767 || (DECL_CONTEXT (fndecl) != NULL_TREE
3768 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3769 || !TREE_PUBLIC (fndecl))
3770 return false;
3771
3772 const char *name = IDENTIFIER_POINTER (declname);
3773 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3774 }
3775
3776 /* Return true if FNDECL is an omp_* runtime API call. */
3777
3778 static bool
omp_runtime_api_call(const_tree fndecl)3779 omp_runtime_api_call (const_tree fndecl)
3780 {
3781 tree declname = DECL_NAME (fndecl);
3782 if (!declname
3783 || (DECL_CONTEXT (fndecl) != NULL_TREE
3784 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3785 || !TREE_PUBLIC (fndecl))
3786 return false;
3787
3788 const char *name = IDENTIFIER_POINTER (declname);
3789 if (strncmp (name, "omp_", 4) != 0)
3790 return false;
3791
3792 static const char *omp_runtime_apis[] =
3793 {
3794 /* This array has 3 sections. First omp_* calls that don't
3795 have any suffixes. */
3796 "target_alloc",
3797 "target_associate_ptr",
3798 "target_disassociate_ptr",
3799 "target_free",
3800 "target_is_present",
3801 "target_memcpy",
3802 "target_memcpy_rect",
3803 NULL,
3804 /* Now omp_* calls that are available as omp_* and omp_*_. */
3805 "capture_affinity",
3806 "destroy_lock",
3807 "destroy_nest_lock",
3808 "display_affinity",
3809 "get_active_level",
3810 "get_affinity_format",
3811 "get_cancellation",
3812 "get_default_device",
3813 "get_dynamic",
3814 "get_initial_device",
3815 "get_level",
3816 "get_max_active_levels",
3817 "get_max_task_priority",
3818 "get_max_threads",
3819 "get_nested",
3820 "get_num_devices",
3821 "get_num_places",
3822 "get_num_procs",
3823 "get_num_teams",
3824 "get_num_threads",
3825 "get_partition_num_places",
3826 "get_place_num",
3827 "get_proc_bind",
3828 "get_team_num",
3829 "get_thread_limit",
3830 "get_thread_num",
3831 "get_wtick",
3832 "get_wtime",
3833 "in_final",
3834 "in_parallel",
3835 "init_lock",
3836 "init_nest_lock",
3837 "is_initial_device",
3838 "pause_resource",
3839 "pause_resource_all",
3840 "set_affinity_format",
3841 "set_lock",
3842 "set_nest_lock",
3843 "test_lock",
3844 "test_nest_lock",
3845 "unset_lock",
3846 "unset_nest_lock",
3847 NULL,
3848 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3849 "get_ancestor_thread_num",
3850 "get_partition_place_nums",
3851 "get_place_num_procs",
3852 "get_place_proc_ids",
3853 "get_schedule",
3854 "get_team_size",
3855 "set_default_device",
3856 "set_dynamic",
3857 "set_max_active_levels",
3858 "set_nested",
3859 "set_num_threads",
3860 "set_schedule"
3861 };
3862
3863 int mode = 0;
3864 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3865 {
3866 if (omp_runtime_apis[i] == NULL)
3867 {
3868 mode++;
3869 continue;
3870 }
3871 size_t len = strlen (omp_runtime_apis[i]);
3872 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3873 && (name[4 + len] == '\0'
3874 || (mode > 0
3875 && name[4 + len] == '_'
3876 && (name[4 + len + 1] == '\0'
3877 || (mode > 1
3878 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3879 return true;
3880 }
3881 return false;
3882 }
3883
3884 /* Helper function for scan_omp.
3885
3886 Callback for walk_gimple_stmt used to scan for OMP directives in
3887 the current statement in GSI. */
3888
3889 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)3890 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3891 struct walk_stmt_info *wi)
3892 {
3893 gimple *stmt = gsi_stmt (*gsi);
3894 omp_context *ctx = (omp_context *) wi->info;
3895
3896 if (gimple_has_location (stmt))
3897 input_location = gimple_location (stmt);
3898
3899 /* Check the nesting restrictions. */
3900 bool remove = false;
3901 if (is_gimple_omp (stmt))
3902 remove = !check_omp_nesting_restrictions (stmt, ctx);
3903 else if (is_gimple_call (stmt))
3904 {
3905 tree fndecl = gimple_call_fndecl (stmt);
3906 if (fndecl)
3907 {
3908 if (ctx
3909 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3910 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3911 && setjmp_or_longjmp_p (fndecl)
3912 && !ctx->loop_p)
3913 {
3914 remove = true;
3915 error_at (gimple_location (stmt),
3916 "setjmp/longjmp inside %<simd%> construct");
3917 }
3918 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3919 switch (DECL_FUNCTION_CODE (fndecl))
3920 {
3921 case BUILT_IN_GOMP_BARRIER:
3922 case BUILT_IN_GOMP_CANCEL:
3923 case BUILT_IN_GOMP_CANCELLATION_POINT:
3924 case BUILT_IN_GOMP_TASKYIELD:
3925 case BUILT_IN_GOMP_TASKWAIT:
3926 case BUILT_IN_GOMP_TASKGROUP_START:
3927 case BUILT_IN_GOMP_TASKGROUP_END:
3928 remove = !check_omp_nesting_restrictions (stmt, ctx);
3929 break;
3930 default:
3931 break;
3932 }
3933 else if (ctx)
3934 {
3935 omp_context *octx = ctx;
3936 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3937 octx = ctx->outer;
3938 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3939 {
3940 remove = true;
3941 error_at (gimple_location (stmt),
3942 "OpenMP runtime API call %qD in a region with "
3943 "%<order(concurrent)%> clause", fndecl);
3944 }
3945 }
3946 }
3947 }
3948 if (remove)
3949 {
3950 stmt = gimple_build_nop ();
3951 gsi_replace (gsi, stmt, false);
3952 }
3953
3954 *handled_ops_p = true;
3955
3956 switch (gimple_code (stmt))
3957 {
3958 case GIMPLE_OMP_PARALLEL:
3959 taskreg_nesting_level++;
3960 scan_omp_parallel (gsi, ctx);
3961 taskreg_nesting_level--;
3962 break;
3963
3964 case GIMPLE_OMP_TASK:
3965 taskreg_nesting_level++;
3966 scan_omp_task (gsi, ctx);
3967 taskreg_nesting_level--;
3968 break;
3969
3970 case GIMPLE_OMP_FOR:
3971 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3972 == GF_OMP_FOR_KIND_SIMD)
3973 && gimple_omp_for_combined_into_p (stmt)
3974 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3975 {
3976 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3977 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3978 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3979 {
3980 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3981 break;
3982 }
3983 }
3984 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3985 == GF_OMP_FOR_KIND_SIMD)
3986 && omp_maybe_offloaded_ctx (ctx)
3987 && omp_max_simt_vf ()
3988 && gimple_omp_for_collapse (stmt) == 1)
3989 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3990 else
3991 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3992 break;
3993
3994 case GIMPLE_OMP_SECTIONS:
3995 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3996 break;
3997
3998 case GIMPLE_OMP_SINGLE:
3999 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4000 break;
4001
4002 case GIMPLE_OMP_SCAN:
4003 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4004 {
4005 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4006 ctx->scan_inclusive = true;
4007 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4008 ctx->scan_exclusive = true;
4009 }
4010 /* FALLTHRU */
4011 case GIMPLE_OMP_SECTION:
4012 case GIMPLE_OMP_MASTER:
4013 case GIMPLE_OMP_ORDERED:
4014 case GIMPLE_OMP_CRITICAL:
4015 ctx = new_omp_context (stmt, ctx);
4016 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4017 break;
4018
4019 case GIMPLE_OMP_TASKGROUP:
4020 ctx = new_omp_context (stmt, ctx);
4021 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4022 scan_omp (gimple_omp_body_ptr (stmt), ctx);
4023 break;
4024
4025 case GIMPLE_OMP_TARGET:
4026 if (is_gimple_omp_offloaded (stmt))
4027 {
4028 taskreg_nesting_level++;
4029 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4030 taskreg_nesting_level--;
4031 }
4032 else
4033 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4034 break;
4035
4036 case GIMPLE_OMP_TEAMS:
4037 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4038 {
4039 taskreg_nesting_level++;
4040 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4041 taskreg_nesting_level--;
4042 }
4043 else
4044 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4045 break;
4046
4047 case GIMPLE_BIND:
4048 {
4049 tree var;
4050
4051 *handled_ops_p = false;
4052 if (ctx)
4053 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4054 var ;
4055 var = DECL_CHAIN (var))
4056 insert_decl_map (&ctx->cb, var, var);
4057 }
4058 break;
4059 default:
4060 *handled_ops_p = false;
4061 break;
4062 }
4063
4064 return NULL_TREE;
4065 }
4066
4067
4068 /* Scan all the statements starting at the current statement. CTX
4069 contains context information about the OMP directives and
4070 clauses found during the scan. */
4071
4072 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)4073 scan_omp (gimple_seq *body_p, omp_context *ctx)
4074 {
4075 location_t saved_location;
4076 struct walk_stmt_info wi;
4077
4078 memset (&wi, 0, sizeof (wi));
4079 wi.info = ctx;
4080 wi.want_locations = true;
4081
4082 saved_location = input_location;
4083 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4084 input_location = saved_location;
4085 }
4086
4087 /* Re-gimplification and code generation routines. */
4088
4089 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4090 of BIND if in a method. */
4091
4092 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)4093 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4094 {
4095 if (DECL_ARGUMENTS (current_function_decl)
4096 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4097 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4098 == POINTER_TYPE))
4099 {
4100 tree vars = gimple_bind_vars (bind);
4101 for (tree *pvar = &vars; *pvar; )
4102 if (omp_member_access_dummy_var (*pvar))
4103 *pvar = DECL_CHAIN (*pvar);
4104 else
4105 pvar = &DECL_CHAIN (*pvar);
4106 gimple_bind_set_vars (bind, vars);
4107 }
4108 }
4109
4110 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4111 block and its subblocks. */
4112
4113 static void
remove_member_access_dummy_vars(tree block)4114 remove_member_access_dummy_vars (tree block)
4115 {
4116 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4117 if (omp_member_access_dummy_var (*pvar))
4118 *pvar = DECL_CHAIN (*pvar);
4119 else
4120 pvar = &DECL_CHAIN (*pvar);
4121
4122 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4123 remove_member_access_dummy_vars (block);
4124 }
4125
4126 /* If a context was created for STMT when it was scanned, return it. */
4127
4128 static omp_context *
maybe_lookup_ctx(gimple * stmt)4129 maybe_lookup_ctx (gimple *stmt)
4130 {
4131 splay_tree_node n;
4132 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4133 return n ? (omp_context *) n->value : NULL;
4134 }
4135
4136
4137 /* Find the mapping for DECL in CTX or the immediately enclosing
4138 context that has a mapping for DECL.
4139
4140 If CTX is a nested parallel directive, we may have to use the decl
4141 mappings created in CTX's parent context. Suppose that we have the
4142 following parallel nesting (variable UIDs showed for clarity):
4143
4144 iD.1562 = 0;
4145 #omp parallel shared(iD.1562) -> outer parallel
4146 iD.1562 = iD.1562 + 1;
4147
4148 #omp parallel shared (iD.1562) -> inner parallel
4149 iD.1562 = iD.1562 - 1;
4150
4151 Each parallel structure will create a distinct .omp_data_s structure
4152 for copying iD.1562 in/out of the directive:
4153
4154 outer parallel .omp_data_s.1.i -> iD.1562
4155 inner parallel .omp_data_s.2.i -> iD.1562
4156
4157 A shared variable mapping will produce a copy-out operation before
4158 the parallel directive and a copy-in operation after it. So, in
4159 this case we would have:
4160
4161 iD.1562 = 0;
4162 .omp_data_o.1.i = iD.1562;
4163 #omp parallel shared(iD.1562) -> outer parallel
4164 .omp_data_i.1 = &.omp_data_o.1
4165 .omp_data_i.1->i = .omp_data_i.1->i + 1;
4166
4167 .omp_data_o.2.i = iD.1562; -> **
4168 #omp parallel shared(iD.1562) -> inner parallel
4169 .omp_data_i.2 = &.omp_data_o.2
4170 .omp_data_i.2->i = .omp_data_i.2->i - 1;
4171
4172
4173 ** This is a problem. The symbol iD.1562 cannot be referenced
4174 inside the body of the outer parallel region. But since we are
4175 emitting this copy operation while expanding the inner parallel
4176 directive, we need to access the CTX structure of the outer
4177 parallel directive to get the correct mapping:
4178
4179 .omp_data_o.2.i = .omp_data_i.1->i
4180
4181 Since there may be other workshare or parallel directives enclosing
4182 the parallel directive, it may be necessary to walk up the context
4183 parent chain. This is not a problem in general because nested
4184 parallelism happens only rarely. */
4185
4186 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)4187 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4188 {
4189 tree t;
4190 omp_context *up;
4191
4192 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4193 t = maybe_lookup_decl (decl, up);
4194
4195 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4196
4197 return t ? t : decl;
4198 }
4199
4200
4201 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4202 in outer contexts. */
4203
4204 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)4205 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4206 {
4207 tree t = NULL;
4208 omp_context *up;
4209
4210 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4211 t = maybe_lookup_decl (decl, up);
4212
4213 return t ? t : decl;
4214 }
4215
4216
4217 /* Construct the initialization value for reduction operation OP. */
4218
4219 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)4220 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4221 {
4222 switch (op)
4223 {
4224 case PLUS_EXPR:
4225 case MINUS_EXPR:
4226 case BIT_IOR_EXPR:
4227 case BIT_XOR_EXPR:
4228 case TRUTH_OR_EXPR:
4229 case TRUTH_ORIF_EXPR:
4230 case TRUTH_XOR_EXPR:
4231 case NE_EXPR:
4232 return build_zero_cst (type);
4233
4234 case MULT_EXPR:
4235 case TRUTH_AND_EXPR:
4236 case TRUTH_ANDIF_EXPR:
4237 case EQ_EXPR:
4238 return fold_convert_loc (loc, type, integer_one_node);
4239
4240 case BIT_AND_EXPR:
4241 return fold_convert_loc (loc, type, integer_minus_one_node);
4242
4243 case MAX_EXPR:
4244 if (SCALAR_FLOAT_TYPE_P (type))
4245 {
4246 REAL_VALUE_TYPE max, min;
4247 if (HONOR_INFINITIES (type))
4248 {
4249 real_inf (&max);
4250 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4251 }
4252 else
4253 real_maxval (&min, 1, TYPE_MODE (type));
4254 return build_real (type, min);
4255 }
4256 else if (POINTER_TYPE_P (type))
4257 {
4258 wide_int min
4259 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4260 return wide_int_to_tree (type, min);
4261 }
4262 else
4263 {
4264 gcc_assert (INTEGRAL_TYPE_P (type));
4265 return TYPE_MIN_VALUE (type);
4266 }
4267
4268 case MIN_EXPR:
4269 if (SCALAR_FLOAT_TYPE_P (type))
4270 {
4271 REAL_VALUE_TYPE max;
4272 if (HONOR_INFINITIES (type))
4273 real_inf (&max);
4274 else
4275 real_maxval (&max, 0, TYPE_MODE (type));
4276 return build_real (type, max);
4277 }
4278 else if (POINTER_TYPE_P (type))
4279 {
4280 wide_int max
4281 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4282 return wide_int_to_tree (type, max);
4283 }
4284 else
4285 {
4286 gcc_assert (INTEGRAL_TYPE_P (type));
4287 return TYPE_MAX_VALUE (type);
4288 }
4289
4290 default:
4291 gcc_unreachable ();
4292 }
4293 }
4294
4295 /* Construct the initialization value for reduction CLAUSE. */
4296
4297 tree
omp_reduction_init(tree clause,tree type)4298 omp_reduction_init (tree clause, tree type)
4299 {
4300 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4301 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4302 }
4303
4304 /* Return alignment to be assumed for var in CLAUSE, which should be
4305 OMP_CLAUSE_ALIGNED. */
4306
4307 static tree
omp_clause_aligned_alignment(tree clause)4308 omp_clause_aligned_alignment (tree clause)
4309 {
4310 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4311 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4312
4313 /* Otherwise return implementation defined alignment. */
4314 unsigned int al = 1;
4315 opt_scalar_mode mode_iter;
4316 auto_vector_modes modes;
4317 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4318 static enum mode_class classes[]
4319 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4320 for (int i = 0; i < 4; i += 2)
4321 /* The for loop above dictates that we only walk through scalar classes. */
4322 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4323 {
4324 scalar_mode mode = mode_iter.require ();
4325 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4326 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4327 continue;
4328 machine_mode alt_vmode;
4329 for (unsigned int j = 0; j < modes.length (); ++j)
4330 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4331 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4332 vmode = alt_vmode;
4333
4334 tree type = lang_hooks.types.type_for_mode (mode, 1);
4335 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4336 continue;
4337 type = build_vector_type_for_mode (type, vmode);
4338 if (TYPE_MODE (type) != vmode)
4339 continue;
4340 if (TYPE_ALIGN_UNIT (type) > al)
4341 al = TYPE_ALIGN_UNIT (type);
4342 }
4343 return build_int_cst (integer_type_node, al);
4344 }
4345
4346
4347 /* This structure is part of the interface between lower_rec_simd_input_clauses
4348 and lower_rec_input_clauses. */
4349
4350 class omplow_simd_context {
4351 public:
omplow_simd_context()4352 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4353 tree idx;
4354 tree lane;
4355 tree lastlane;
4356 vec<tree, va_heap> simt_eargs;
4357 gimple_seq simt_dlist;
4358 poly_uint64_pod max_vf;
4359 bool is_simt;
4360 };
4361
4362 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4363 privatization. */
4364
4365 static bool
4366 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4367 omplow_simd_context *sctx, tree &ivar,
4368 tree &lvar, tree *rvar = NULL,
4369 tree *rvar2 = NULL)
4370 {
4371 if (known_eq (sctx->max_vf, 0U))
4372 {
4373 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4374 if (maybe_gt (sctx->max_vf, 1U))
4375 {
4376 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4377 OMP_CLAUSE_SAFELEN);
4378 if (c)
4379 {
4380 poly_uint64 safe_len;
4381 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4382 || maybe_lt (safe_len, 1U))
4383 sctx->max_vf = 1;
4384 else
4385 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4386 }
4387 }
4388 if (maybe_gt (sctx->max_vf, 1U))
4389 {
4390 sctx->idx = create_tmp_var (unsigned_type_node);
4391 sctx->lane = create_tmp_var (unsigned_type_node);
4392 }
4393 }
4394 if (known_eq (sctx->max_vf, 1U))
4395 return false;
4396
4397 if (sctx->is_simt)
4398 {
4399 if (is_gimple_reg (new_var))
4400 {
4401 ivar = lvar = new_var;
4402 return true;
4403 }
4404 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4405 ivar = lvar = create_tmp_var (type);
4406 TREE_ADDRESSABLE (ivar) = 1;
4407 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4408 NULL, DECL_ATTRIBUTES (ivar));
4409 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4410 tree clobber = build_clobber (type);
4411 gimple *g = gimple_build_assign (ivar, clobber);
4412 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4413 }
4414 else
4415 {
4416 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4417 tree avar = create_tmp_var_raw (atype);
4418 if (TREE_ADDRESSABLE (new_var))
4419 TREE_ADDRESSABLE (avar) = 1;
4420 DECL_ATTRIBUTES (avar)
4421 = tree_cons (get_identifier ("omp simd array"), NULL,
4422 DECL_ATTRIBUTES (avar));
4423 gimple_add_tmp_var (avar);
4424 tree iavar = avar;
4425 if (rvar && !ctx->for_simd_scan_phase)
4426 {
4427 /* For inscan reductions, create another array temporary,
4428 which will hold the reduced value. */
4429 iavar = create_tmp_var_raw (atype);
4430 if (TREE_ADDRESSABLE (new_var))
4431 TREE_ADDRESSABLE (iavar) = 1;
4432 DECL_ATTRIBUTES (iavar)
4433 = tree_cons (get_identifier ("omp simd array"), NULL,
4434 tree_cons (get_identifier ("omp simd inscan"), NULL,
4435 DECL_ATTRIBUTES (iavar)));
4436 gimple_add_tmp_var (iavar);
4437 ctx->cb.decl_map->put (avar, iavar);
4438 if (sctx->lastlane == NULL_TREE)
4439 sctx->lastlane = create_tmp_var (unsigned_type_node);
4440 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4441 sctx->lastlane, NULL_TREE, NULL_TREE);
4442 TREE_THIS_NOTRAP (*rvar) = 1;
4443
4444 if (ctx->scan_exclusive)
4445 {
4446 /* And for exclusive scan yet another one, which will
4447 hold the value during the scan phase. */
4448 tree savar = create_tmp_var_raw (atype);
4449 if (TREE_ADDRESSABLE (new_var))
4450 TREE_ADDRESSABLE (savar) = 1;
4451 DECL_ATTRIBUTES (savar)
4452 = tree_cons (get_identifier ("omp simd array"), NULL,
4453 tree_cons (get_identifier ("omp simd inscan "
4454 "exclusive"), NULL,
4455 DECL_ATTRIBUTES (savar)));
4456 gimple_add_tmp_var (savar);
4457 ctx->cb.decl_map->put (iavar, savar);
4458 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4459 sctx->idx, NULL_TREE, NULL_TREE);
4460 TREE_THIS_NOTRAP (*rvar2) = 1;
4461 }
4462 }
4463 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4464 NULL_TREE, NULL_TREE);
4465 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4466 NULL_TREE, NULL_TREE);
4467 TREE_THIS_NOTRAP (ivar) = 1;
4468 TREE_THIS_NOTRAP (lvar) = 1;
4469 }
4470 if (DECL_P (new_var))
4471 {
4472 SET_DECL_VALUE_EXPR (new_var, lvar);
4473 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4474 }
4475 return true;
4476 }
4477
4478 /* Helper function of lower_rec_input_clauses. For a reference
4479 in simd reduction, add an underlying variable it will reference. */
4480
4481 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)4482 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4483 {
4484 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4485 if (TREE_CONSTANT (z))
4486 {
4487 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4488 get_name (new_vard));
4489 gimple_add_tmp_var (z);
4490 TREE_ADDRESSABLE (z) = 1;
4491 z = build_fold_addr_expr_loc (loc, z);
4492 gimplify_assign (new_vard, z, ilist);
4493 }
4494 }
4495
4496 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4497 code to emit (type) (tskred_temp[idx]). */
4498
4499 static tree
task_reduction_read(gimple_seq * ilist,tree tskred_temp,tree type,unsigned idx)4500 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4501 unsigned idx)
4502 {
4503 unsigned HOST_WIDE_INT sz
4504 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4505 tree r = build2 (MEM_REF, pointer_sized_int_node,
4506 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4507 idx * sz));
4508 tree v = create_tmp_var (pointer_sized_int_node);
4509 gimple *g = gimple_build_assign (v, r);
4510 gimple_seq_add_stmt (ilist, g);
4511 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4512 {
4513 v = create_tmp_var (type);
4514 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4515 gimple_seq_add_stmt (ilist, g);
4516 }
4517 return v;
4518 }
4519
4520 /* Lower early initialization of privatized variable NEW_VAR
4521 if it needs an allocator (has allocate clause). */
4522
4523 static bool
lower_private_allocate(tree var,tree new_var,tree & allocator,tree & allocate_ptr,gimple_seq * ilist,omp_context * ctx,bool is_ref,tree size)4524 lower_private_allocate (tree var, tree new_var, tree &allocator,
4525 tree &allocate_ptr, gimple_seq *ilist,
4526 omp_context *ctx, bool is_ref, tree size)
4527 {
4528 if (allocator)
4529 return false;
4530 gcc_assert (allocate_ptr == NULL_TREE);
4531 if (ctx->allocate_map
4532 && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4533 if (tree *allocatorp = ctx->allocate_map->get (var))
4534 allocator = *allocatorp;
4535 if (allocator == NULL_TREE)
4536 return false;
4537 if (!is_ref && omp_is_reference (var))
4538 {
4539 allocator = NULL_TREE;
4540 return false;
4541 }
4542
4543 if (TREE_CODE (allocator) != INTEGER_CST)
4544 allocator = build_outer_var_ref (allocator, ctx);
4545 allocator = fold_convert (pointer_sized_int_node, allocator);
4546 if (TREE_CODE (allocator) != INTEGER_CST)
4547 {
4548 tree var = create_tmp_var (TREE_TYPE (allocator));
4549 gimplify_assign (var, allocator, ilist);
4550 allocator = var;
4551 }
4552
4553 tree ptr_type, align, sz = size;
4554 if (TYPE_P (new_var))
4555 {
4556 ptr_type = build_pointer_type (new_var);
4557 align = build_int_cst (size_type_node, TYPE_ALIGN_UNIT (new_var));
4558 }
4559 else if (is_ref)
4560 {
4561 ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4562 align = build_int_cst (size_type_node,
4563 TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4564 }
4565 else
4566 {
4567 ptr_type = build_pointer_type (TREE_TYPE (new_var));
4568 align = build_int_cst (size_type_node, DECL_ALIGN_UNIT (new_var));
4569 if (sz == NULL_TREE)
4570 sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4571 }
4572 if (TREE_CODE (sz) != INTEGER_CST)
4573 {
4574 tree szvar = create_tmp_var (size_type_node);
4575 gimplify_assign (szvar, sz, ilist);
4576 sz = szvar;
4577 }
4578 allocate_ptr = create_tmp_var (ptr_type);
4579 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4580 gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4581 gimple_call_set_lhs (g, allocate_ptr);
4582 gimple_seq_add_stmt (ilist, g);
4583 if (!is_ref)
4584 {
4585 tree x = build_simple_mem_ref (allocate_ptr);
4586 TREE_THIS_NOTRAP (x) = 1;
4587 SET_DECL_VALUE_EXPR (new_var, x);
4588 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4589 }
4590 return true;
4591 }
4592
4593 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4594 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4595 private variables. Initialization statements go in ILIST, while calls
4596 to destructors go in DLIST. */
4597
4598 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)4599 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4600 omp_context *ctx, struct omp_for_data *fd)
4601 {
4602 tree c, copyin_seq, x, ptr;
4603 bool copyin_by_ref = false;
4604 bool lastprivate_firstprivate = false;
4605 bool reduction_omp_orig_ref = false;
4606 int pass;
4607 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4608 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4609 omplow_simd_context sctx = omplow_simd_context ();
4610 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4611 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4612 gimple_seq llist[4] = { };
4613 tree nonconst_simd_if = NULL_TREE;
4614
4615 copyin_seq = NULL;
4616 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4617
4618 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4619 with data sharing clauses referencing variable sized vars. That
4620 is unnecessarily hard to support and very unlikely to result in
4621 vectorized code anyway. */
4622 if (is_simd)
4623 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4624 switch (OMP_CLAUSE_CODE (c))
4625 {
4626 case OMP_CLAUSE_LINEAR:
4627 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4628 sctx.max_vf = 1;
4629 /* FALLTHRU */
4630 case OMP_CLAUSE_PRIVATE:
4631 case OMP_CLAUSE_FIRSTPRIVATE:
4632 case OMP_CLAUSE_LASTPRIVATE:
4633 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4634 sctx.max_vf = 1;
4635 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4636 {
4637 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4638 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4639 sctx.max_vf = 1;
4640 }
4641 break;
4642 case OMP_CLAUSE_REDUCTION:
4643 case OMP_CLAUSE_IN_REDUCTION:
4644 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4645 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4646 sctx.max_vf = 1;
4647 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4648 {
4649 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4650 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4651 sctx.max_vf = 1;
4652 }
4653 break;
4654 case OMP_CLAUSE_IF:
4655 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4656 sctx.max_vf = 1;
4657 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4658 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4659 break;
4660 case OMP_CLAUSE_SIMDLEN:
4661 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4662 sctx.max_vf = 1;
4663 break;
4664 case OMP_CLAUSE__CONDTEMP_:
4665 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4666 if (sctx.is_simt)
4667 sctx.max_vf = 1;
4668 break;
4669 default:
4670 continue;
4671 }
4672
4673 /* Add a placeholder for simduid. */
4674 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4675 sctx.simt_eargs.safe_push (NULL_TREE);
4676
4677 unsigned task_reduction_cnt = 0;
4678 unsigned task_reduction_cntorig = 0;
4679 unsigned task_reduction_cnt_full = 0;
4680 unsigned task_reduction_cntorig_full = 0;
4681 unsigned task_reduction_other_cnt = 0;
4682 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4683 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4684 /* Do all the fixed sized types in the first pass, and the variable sized
4685 types in the second pass. This makes sure that the scalar arguments to
4686 the variable sized types are processed before we use them in the
4687 variable sized operations. For task reductions we use 4 passes, in the
4688 first two we ignore them, in the third one gather arguments for
4689 GOMP_task_reduction_remap call and in the last pass actually handle
4690 the task reductions. */
4691 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4692 ? 4 : 2); ++pass)
4693 {
4694 if (pass == 2 && task_reduction_cnt)
4695 {
4696 tskred_atype
4697 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4698 + task_reduction_cntorig);
4699 tskred_avar = create_tmp_var_raw (tskred_atype);
4700 gimple_add_tmp_var (tskred_avar);
4701 TREE_ADDRESSABLE (tskred_avar) = 1;
4702 task_reduction_cnt_full = task_reduction_cnt;
4703 task_reduction_cntorig_full = task_reduction_cntorig;
4704 }
4705 else if (pass == 3 && task_reduction_cnt)
4706 {
4707 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4708 gimple *g
4709 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4710 size_int (task_reduction_cntorig),
4711 build_fold_addr_expr (tskred_avar));
4712 gimple_seq_add_stmt (ilist, g);
4713 }
4714 if (pass == 3 && task_reduction_other_cnt)
4715 {
4716 /* For reduction clauses, build
4717 tskred_base = (void *) tskred_temp[2]
4718 + omp_get_thread_num () * tskred_temp[1]
4719 or if tskred_temp[1] is known to be constant, that constant
4720 directly. This is the start of the private reduction copy block
4721 for the current thread. */
4722 tree v = create_tmp_var (integer_type_node);
4723 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4724 gimple *g = gimple_build_call (x, 0);
4725 gimple_call_set_lhs (g, v);
4726 gimple_seq_add_stmt (ilist, g);
4727 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4728 tskred_temp = OMP_CLAUSE_DECL (c);
4729 if (is_taskreg_ctx (ctx))
4730 tskred_temp = lookup_decl (tskred_temp, ctx);
4731 tree v2 = create_tmp_var (sizetype);
4732 g = gimple_build_assign (v2, NOP_EXPR, v);
4733 gimple_seq_add_stmt (ilist, g);
4734 if (ctx->task_reductions[0])
4735 v = fold_convert (sizetype, ctx->task_reductions[0]);
4736 else
4737 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4738 tree v3 = create_tmp_var (sizetype);
4739 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4740 gimple_seq_add_stmt (ilist, g);
4741 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4742 tskred_base = create_tmp_var (ptr_type_node);
4743 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4744 gimple_seq_add_stmt (ilist, g);
4745 }
4746 task_reduction_cnt = 0;
4747 task_reduction_cntorig = 0;
4748 task_reduction_other_cnt = 0;
4749 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4750 {
4751 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4752 tree var, new_var;
4753 bool by_ref;
4754 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4755 bool task_reduction_p = false;
4756 bool task_reduction_needs_orig_p = false;
4757 tree cond = NULL_TREE;
4758 tree allocator, allocate_ptr;
4759
4760 switch (c_kind)
4761 {
4762 case OMP_CLAUSE_PRIVATE:
4763 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4764 continue;
4765 break;
4766 case OMP_CLAUSE_SHARED:
4767 /* Ignore shared directives in teams construct inside
4768 of target construct. */
4769 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4770 && !is_host_teams_ctx (ctx))
4771 continue;
4772 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4773 {
4774 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4775 || is_global_var (OMP_CLAUSE_DECL (c)));
4776 continue;
4777 }
4778 case OMP_CLAUSE_FIRSTPRIVATE:
4779 case OMP_CLAUSE_COPYIN:
4780 break;
4781 case OMP_CLAUSE_LINEAR:
4782 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4783 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4784 lastprivate_firstprivate = true;
4785 break;
4786 case OMP_CLAUSE_REDUCTION:
4787 case OMP_CLAUSE_IN_REDUCTION:
4788 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4789 {
4790 task_reduction_p = true;
4791 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4792 {
4793 task_reduction_other_cnt++;
4794 if (pass == 2)
4795 continue;
4796 }
4797 else
4798 task_reduction_cnt++;
4799 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4800 {
4801 var = OMP_CLAUSE_DECL (c);
4802 /* If var is a global variable that isn't privatized
4803 in outer contexts, we don't need to look up the
4804 original address, it is always the address of the
4805 global variable itself. */
4806 if (!DECL_P (var)
4807 || omp_is_reference (var)
4808 || !is_global_var
4809 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4810 {
4811 task_reduction_needs_orig_p = true;
4812 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4813 task_reduction_cntorig++;
4814 }
4815 }
4816 }
4817 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4818 reduction_omp_orig_ref = true;
4819 break;
4820 case OMP_CLAUSE__REDUCTEMP_:
4821 if (!is_taskreg_ctx (ctx))
4822 continue;
4823 /* FALLTHRU */
4824 case OMP_CLAUSE__LOOPTEMP_:
4825 /* Handle _looptemp_/_reductemp_ clauses only on
4826 parallel/task. */
4827 if (fd)
4828 continue;
4829 break;
4830 case OMP_CLAUSE_LASTPRIVATE:
4831 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4832 {
4833 lastprivate_firstprivate = true;
4834 if (pass != 0 || is_taskloop_ctx (ctx))
4835 continue;
4836 }
4837 /* Even without corresponding firstprivate, if
4838 decl is Fortran allocatable, it needs outer var
4839 reference. */
4840 else if (pass == 0
4841 && lang_hooks.decls.omp_private_outer_ref
4842 (OMP_CLAUSE_DECL (c)))
4843 lastprivate_firstprivate = true;
4844 break;
4845 case OMP_CLAUSE_ALIGNED:
4846 if (pass != 1)
4847 continue;
4848 var = OMP_CLAUSE_DECL (c);
4849 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4850 && !is_global_var (var))
4851 {
4852 new_var = maybe_lookup_decl (var, ctx);
4853 if (new_var == NULL_TREE)
4854 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4855 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4856 tree alarg = omp_clause_aligned_alignment (c);
4857 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4858 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4859 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4860 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4861 gimplify_and_add (x, ilist);
4862 }
4863 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4864 && is_global_var (var))
4865 {
4866 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4867 new_var = lookup_decl (var, ctx);
4868 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4869 t = build_fold_addr_expr_loc (clause_loc, t);
4870 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4871 tree alarg = omp_clause_aligned_alignment (c);
4872 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4873 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4874 t = fold_convert_loc (clause_loc, ptype, t);
4875 x = create_tmp_var (ptype);
4876 t = build2 (MODIFY_EXPR, ptype, x, t);
4877 gimplify_and_add (t, ilist);
4878 t = build_simple_mem_ref_loc (clause_loc, x);
4879 SET_DECL_VALUE_EXPR (new_var, t);
4880 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4881 }
4882 continue;
4883 case OMP_CLAUSE__CONDTEMP_:
4884 if (is_parallel_ctx (ctx)
4885 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4886 break;
4887 continue;
4888 default:
4889 continue;
4890 }
4891
4892 if (task_reduction_p != (pass >= 2))
4893 continue;
4894
4895 allocator = NULL_TREE;
4896 allocate_ptr = NULL_TREE;
4897 new_var = var = OMP_CLAUSE_DECL (c);
4898 if ((c_kind == OMP_CLAUSE_REDUCTION
4899 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4900 && TREE_CODE (var) == MEM_REF)
4901 {
4902 var = TREE_OPERAND (var, 0);
4903 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4904 var = TREE_OPERAND (var, 0);
4905 if (TREE_CODE (var) == INDIRECT_REF
4906 || TREE_CODE (var) == ADDR_EXPR)
4907 var = TREE_OPERAND (var, 0);
4908 if (is_variable_sized (var))
4909 {
4910 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4911 var = DECL_VALUE_EXPR (var);
4912 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4913 var = TREE_OPERAND (var, 0);
4914 gcc_assert (DECL_P (var));
4915 }
4916 new_var = var;
4917 }
4918 if (c_kind != OMP_CLAUSE_COPYIN)
4919 new_var = lookup_decl (var, ctx);
4920
4921 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4922 {
4923 if (pass != 0)
4924 continue;
4925 }
4926 /* C/C++ array section reductions. */
4927 else if ((c_kind == OMP_CLAUSE_REDUCTION
4928 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4929 && var != OMP_CLAUSE_DECL (c))
4930 {
4931 if (pass == 0)
4932 continue;
4933
4934 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4935 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4936
4937 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4938 {
4939 tree b = TREE_OPERAND (orig_var, 1);
4940 b = maybe_lookup_decl (b, ctx);
4941 if (b == NULL)
4942 {
4943 b = TREE_OPERAND (orig_var, 1);
4944 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4945 }
4946 if (integer_zerop (bias))
4947 bias = b;
4948 else
4949 {
4950 bias = fold_convert_loc (clause_loc,
4951 TREE_TYPE (b), bias);
4952 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4953 TREE_TYPE (b), b, bias);
4954 }
4955 orig_var = TREE_OPERAND (orig_var, 0);
4956 }
4957 if (pass == 2)
4958 {
4959 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4960 if (is_global_var (out)
4961 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4962 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4963 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4964 != POINTER_TYPE)))
4965 x = var;
4966 else
4967 {
4968 bool by_ref = use_pointer_for_field (var, NULL);
4969 x = build_receiver_ref (var, by_ref, ctx);
4970 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4971 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4972 == POINTER_TYPE))
4973 x = build_fold_addr_expr (x);
4974 }
4975 if (TREE_CODE (orig_var) == INDIRECT_REF)
4976 x = build_simple_mem_ref (x);
4977 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4978 {
4979 if (var == TREE_OPERAND (orig_var, 0))
4980 x = build_fold_addr_expr (x);
4981 }
4982 bias = fold_convert (sizetype, bias);
4983 x = fold_convert (ptr_type_node, x);
4984 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4985 TREE_TYPE (x), x, bias);
4986 unsigned cnt = task_reduction_cnt - 1;
4987 if (!task_reduction_needs_orig_p)
4988 cnt += (task_reduction_cntorig_full
4989 - task_reduction_cntorig);
4990 else
4991 cnt = task_reduction_cntorig - 1;
4992 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4993 size_int (cnt), NULL_TREE, NULL_TREE);
4994 gimplify_assign (r, x, ilist);
4995 continue;
4996 }
4997
4998 if (TREE_CODE (orig_var) == INDIRECT_REF
4999 || TREE_CODE (orig_var) == ADDR_EXPR)
5000 orig_var = TREE_OPERAND (orig_var, 0);
5001 tree d = OMP_CLAUSE_DECL (c);
5002 tree type = TREE_TYPE (d);
5003 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5004 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5005 tree sz = v;
5006 const char *name = get_name (orig_var);
5007 if (pass != 3 && !TREE_CONSTANT (v))
5008 {
5009 tree t = maybe_lookup_decl (v, ctx);
5010 if (t)
5011 v = t;
5012 else
5013 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5014 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5015 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5016 TREE_TYPE (v), v,
5017 build_int_cst (TREE_TYPE (v), 1));
5018 sz = fold_build2_loc (clause_loc, MULT_EXPR,
5019 TREE_TYPE (v), t,
5020 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5021 }
5022 if (pass == 3)
5023 {
5024 tree xv = create_tmp_var (ptr_type_node);
5025 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5026 {
5027 unsigned cnt = task_reduction_cnt - 1;
5028 if (!task_reduction_needs_orig_p)
5029 cnt += (task_reduction_cntorig_full
5030 - task_reduction_cntorig);
5031 else
5032 cnt = task_reduction_cntorig - 1;
5033 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5034 size_int (cnt), NULL_TREE, NULL_TREE);
5035
5036 gimple *g = gimple_build_assign (xv, x);
5037 gimple_seq_add_stmt (ilist, g);
5038 }
5039 else
5040 {
5041 unsigned int idx = *ctx->task_reduction_map->get (c);
5042 tree off;
5043 if (ctx->task_reductions[1 + idx])
5044 off = fold_convert (sizetype,
5045 ctx->task_reductions[1 + idx]);
5046 else
5047 off = task_reduction_read (ilist, tskred_temp, sizetype,
5048 7 + 3 * idx + 1);
5049 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5050 tskred_base, off);
5051 gimple_seq_add_stmt (ilist, g);
5052 }
5053 x = fold_convert (build_pointer_type (boolean_type_node),
5054 xv);
5055 if (TREE_CONSTANT (v))
5056 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5057 TYPE_SIZE_UNIT (type));
5058 else
5059 {
5060 tree t = maybe_lookup_decl (v, ctx);
5061 if (t)
5062 v = t;
5063 else
5064 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5065 gimplify_expr (&v, ilist, NULL, is_gimple_val,
5066 fb_rvalue);
5067 t = fold_build2_loc (clause_loc, PLUS_EXPR,
5068 TREE_TYPE (v), v,
5069 build_int_cst (TREE_TYPE (v), 1));
5070 t = fold_build2_loc (clause_loc, MULT_EXPR,
5071 TREE_TYPE (v), t,
5072 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5073 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5074 }
5075 cond = create_tmp_var (TREE_TYPE (x));
5076 gimplify_assign (cond, x, ilist);
5077 x = xv;
5078 }
5079 else if (lower_private_allocate (var, type, allocator,
5080 allocate_ptr, ilist, ctx,
5081 true,
5082 TREE_CONSTANT (v)
5083 ? TYPE_SIZE_UNIT (type)
5084 : sz))
5085 x = allocate_ptr;
5086 else if (TREE_CONSTANT (v))
5087 {
5088 x = create_tmp_var_raw (type, name);
5089 gimple_add_tmp_var (x);
5090 TREE_ADDRESSABLE (x) = 1;
5091 x = build_fold_addr_expr_loc (clause_loc, x);
5092 }
5093 else
5094 {
5095 tree atmp
5096 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5097 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5098 x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5099 }
5100
5101 tree ptype = build_pointer_type (TREE_TYPE (type));
5102 x = fold_convert_loc (clause_loc, ptype, x);
5103 tree y = create_tmp_var (ptype, name);
5104 gimplify_assign (y, x, ilist);
5105 x = y;
5106 tree yb = y;
5107
5108 if (!integer_zerop (bias))
5109 {
5110 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5111 bias);
5112 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5113 x);
5114 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5115 pointer_sized_int_node, yb, bias);
5116 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5117 yb = create_tmp_var (ptype, name);
5118 gimplify_assign (yb, x, ilist);
5119 x = yb;
5120 }
5121
5122 d = TREE_OPERAND (d, 0);
5123 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5124 d = TREE_OPERAND (d, 0);
5125 if (TREE_CODE (d) == ADDR_EXPR)
5126 {
5127 if (orig_var != var)
5128 {
5129 gcc_assert (is_variable_sized (orig_var));
5130 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5131 x);
5132 gimplify_assign (new_var, x, ilist);
5133 tree new_orig_var = lookup_decl (orig_var, ctx);
5134 tree t = build_fold_indirect_ref (new_var);
5135 DECL_IGNORED_P (new_var) = 0;
5136 TREE_THIS_NOTRAP (t) = 1;
5137 SET_DECL_VALUE_EXPR (new_orig_var, t);
5138 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5139 }
5140 else
5141 {
5142 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5143 build_int_cst (ptype, 0));
5144 SET_DECL_VALUE_EXPR (new_var, x);
5145 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5146 }
5147 }
5148 else
5149 {
5150 gcc_assert (orig_var == var);
5151 if (TREE_CODE (d) == INDIRECT_REF)
5152 {
5153 x = create_tmp_var (ptype, name);
5154 TREE_ADDRESSABLE (x) = 1;
5155 gimplify_assign (x, yb, ilist);
5156 x = build_fold_addr_expr_loc (clause_loc, x);
5157 }
5158 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5159 gimplify_assign (new_var, x, ilist);
5160 }
5161 /* GOMP_taskgroup_reduction_register memsets the whole
5162 array to zero. If the initializer is zero, we don't
5163 need to initialize it again, just mark it as ever
5164 used unconditionally, i.e. cond = true. */
5165 if (cond
5166 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5167 && initializer_zerop (omp_reduction_init (c,
5168 TREE_TYPE (type))))
5169 {
5170 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5171 boolean_true_node);
5172 gimple_seq_add_stmt (ilist, g);
5173 continue;
5174 }
5175 tree end = create_artificial_label (UNKNOWN_LOCATION);
5176 if (cond)
5177 {
5178 gimple *g;
5179 if (!is_parallel_ctx (ctx))
5180 {
5181 tree condv = create_tmp_var (boolean_type_node);
5182 g = gimple_build_assign (condv,
5183 build_simple_mem_ref (cond));
5184 gimple_seq_add_stmt (ilist, g);
5185 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5186 g = gimple_build_cond (NE_EXPR, condv,
5187 boolean_false_node, end, lab1);
5188 gimple_seq_add_stmt (ilist, g);
5189 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5190 }
5191 g = gimple_build_assign (build_simple_mem_ref (cond),
5192 boolean_true_node);
5193 gimple_seq_add_stmt (ilist, g);
5194 }
5195
5196 tree y1 = create_tmp_var (ptype);
5197 gimplify_assign (y1, y, ilist);
5198 tree i2 = NULL_TREE, y2 = NULL_TREE;
5199 tree body2 = NULL_TREE, end2 = NULL_TREE;
5200 tree y3 = NULL_TREE, y4 = NULL_TREE;
5201 if (task_reduction_needs_orig_p)
5202 {
5203 y3 = create_tmp_var (ptype);
5204 tree ref;
5205 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5206 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5207 size_int (task_reduction_cnt_full
5208 + task_reduction_cntorig - 1),
5209 NULL_TREE, NULL_TREE);
5210 else
5211 {
5212 unsigned int idx = *ctx->task_reduction_map->get (c);
5213 ref = task_reduction_read (ilist, tskred_temp, ptype,
5214 7 + 3 * idx);
5215 }
5216 gimplify_assign (y3, ref, ilist);
5217 }
5218 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5219 {
5220 if (pass != 3)
5221 {
5222 y2 = create_tmp_var (ptype);
5223 gimplify_assign (y2, y, ilist);
5224 }
5225 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5226 {
5227 tree ref = build_outer_var_ref (var, ctx);
5228 /* For ref build_outer_var_ref already performs this. */
5229 if (TREE_CODE (d) == INDIRECT_REF)
5230 gcc_assert (omp_is_reference (var));
5231 else if (TREE_CODE (d) == ADDR_EXPR)
5232 ref = build_fold_addr_expr (ref);
5233 else if (omp_is_reference (var))
5234 ref = build_fold_addr_expr (ref);
5235 ref = fold_convert_loc (clause_loc, ptype, ref);
5236 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5237 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5238 {
5239 y3 = create_tmp_var (ptype);
5240 gimplify_assign (y3, unshare_expr (ref), ilist);
5241 }
5242 if (is_simd)
5243 {
5244 y4 = create_tmp_var (ptype);
5245 gimplify_assign (y4, ref, dlist);
5246 }
5247 }
5248 }
5249 tree i = create_tmp_var (TREE_TYPE (v));
5250 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5251 tree body = create_artificial_label (UNKNOWN_LOCATION);
5252 gimple_seq_add_stmt (ilist, gimple_build_label (body));
5253 if (y2)
5254 {
5255 i2 = create_tmp_var (TREE_TYPE (v));
5256 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5257 body2 = create_artificial_label (UNKNOWN_LOCATION);
5258 end2 = create_artificial_label (UNKNOWN_LOCATION);
5259 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5260 }
5261 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5262 {
5263 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5264 tree decl_placeholder
5265 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5266 SET_DECL_VALUE_EXPR (decl_placeholder,
5267 build_simple_mem_ref (y1));
5268 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5269 SET_DECL_VALUE_EXPR (placeholder,
5270 y3 ? build_simple_mem_ref (y3)
5271 : error_mark_node);
5272 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5273 x = lang_hooks.decls.omp_clause_default_ctor
5274 (c, build_simple_mem_ref (y1),
5275 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5276 if (x)
5277 gimplify_and_add (x, ilist);
5278 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5279 {
5280 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5281 lower_omp (&tseq, ctx);
5282 gimple_seq_add_seq (ilist, tseq);
5283 }
5284 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5285 if (is_simd)
5286 {
5287 SET_DECL_VALUE_EXPR (decl_placeholder,
5288 build_simple_mem_ref (y2));
5289 SET_DECL_VALUE_EXPR (placeholder,
5290 build_simple_mem_ref (y4));
5291 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5292 lower_omp (&tseq, ctx);
5293 gimple_seq_add_seq (dlist, tseq);
5294 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5295 }
5296 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5297 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5298 if (y2)
5299 {
5300 x = lang_hooks.decls.omp_clause_dtor
5301 (c, build_simple_mem_ref (y2));
5302 if (x)
5303 gimplify_and_add (x, dlist);
5304 }
5305 }
5306 else
5307 {
5308 x = omp_reduction_init (c, TREE_TYPE (type));
5309 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5310
5311 /* reduction(-:var) sums up the partial results, so it
5312 acts identically to reduction(+:var). */
5313 if (code == MINUS_EXPR)
5314 code = PLUS_EXPR;
5315
5316 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5317 if (is_simd)
5318 {
5319 x = build2 (code, TREE_TYPE (type),
5320 build_simple_mem_ref (y4),
5321 build_simple_mem_ref (y2));
5322 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5323 }
5324 }
5325 gimple *g
5326 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5327 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5328 gimple_seq_add_stmt (ilist, g);
5329 if (y3)
5330 {
5331 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5332 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5333 gimple_seq_add_stmt (ilist, g);
5334 }
5335 g = gimple_build_assign (i, PLUS_EXPR, i,
5336 build_int_cst (TREE_TYPE (i), 1));
5337 gimple_seq_add_stmt (ilist, g);
5338 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5339 gimple_seq_add_stmt (ilist, g);
5340 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5341 if (y2)
5342 {
5343 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5344 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5345 gimple_seq_add_stmt (dlist, g);
5346 if (y4)
5347 {
5348 g = gimple_build_assign
5349 (y4, POINTER_PLUS_EXPR, y4,
5350 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5351 gimple_seq_add_stmt (dlist, g);
5352 }
5353 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5354 build_int_cst (TREE_TYPE (i2), 1));
5355 gimple_seq_add_stmt (dlist, g);
5356 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5357 gimple_seq_add_stmt (dlist, g);
5358 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5359 }
5360 if (allocator)
5361 {
5362 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5363 g = gimple_build_call (f, 2, allocate_ptr, allocator);
5364 gimple_seq_add_stmt (dlist, g);
5365 }
5366 continue;
5367 }
5368 else if (pass == 2)
5369 {
5370 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5371 x = var;
5372 else
5373 {
5374 bool by_ref = use_pointer_for_field (var, ctx);
5375 x = build_receiver_ref (var, by_ref, ctx);
5376 }
5377 if (!omp_is_reference (var))
5378 x = build_fold_addr_expr (x);
5379 x = fold_convert (ptr_type_node, x);
5380 unsigned cnt = task_reduction_cnt - 1;
5381 if (!task_reduction_needs_orig_p)
5382 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5383 else
5384 cnt = task_reduction_cntorig - 1;
5385 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5386 size_int (cnt), NULL_TREE, NULL_TREE);
5387 gimplify_assign (r, x, ilist);
5388 continue;
5389 }
5390 else if (pass == 3)
5391 {
5392 tree type = TREE_TYPE (new_var);
5393 if (!omp_is_reference (var))
5394 type = build_pointer_type (type);
5395 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5396 {
5397 unsigned cnt = task_reduction_cnt - 1;
5398 if (!task_reduction_needs_orig_p)
5399 cnt += (task_reduction_cntorig_full
5400 - task_reduction_cntorig);
5401 else
5402 cnt = task_reduction_cntorig - 1;
5403 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5404 size_int (cnt), NULL_TREE, NULL_TREE);
5405 }
5406 else
5407 {
5408 unsigned int idx = *ctx->task_reduction_map->get (c);
5409 tree off;
5410 if (ctx->task_reductions[1 + idx])
5411 off = fold_convert (sizetype,
5412 ctx->task_reductions[1 + idx]);
5413 else
5414 off = task_reduction_read (ilist, tskred_temp, sizetype,
5415 7 + 3 * idx + 1);
5416 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5417 tskred_base, off);
5418 }
5419 x = fold_convert (type, x);
5420 tree t;
5421 if (omp_is_reference (var))
5422 {
5423 gimplify_assign (new_var, x, ilist);
5424 t = new_var;
5425 new_var = build_simple_mem_ref (new_var);
5426 }
5427 else
5428 {
5429 t = create_tmp_var (type);
5430 gimplify_assign (t, x, ilist);
5431 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5432 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5433 }
5434 t = fold_convert (build_pointer_type (boolean_type_node), t);
5435 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5436 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5437 cond = create_tmp_var (TREE_TYPE (t));
5438 gimplify_assign (cond, t, ilist);
5439 }
5440 else if (is_variable_sized (var))
5441 {
5442 /* For variable sized types, we need to allocate the
5443 actual storage here. Call alloca and store the
5444 result in the pointer decl that we created elsewhere. */
5445 if (pass == 0)
5446 continue;
5447
5448 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5449 {
5450 tree tmp;
5451
5452 ptr = DECL_VALUE_EXPR (new_var);
5453 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5454 ptr = TREE_OPERAND (ptr, 0);
5455 gcc_assert (DECL_P (ptr));
5456 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5457
5458 if (lower_private_allocate (var, new_var, allocator,
5459 allocate_ptr, ilist, ctx,
5460 false, x))
5461 tmp = allocate_ptr;
5462 else
5463 {
5464 /* void *tmp = __builtin_alloca */
5465 tree atmp
5466 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5467 gcall *stmt
5468 = gimple_build_call (atmp, 2, x,
5469 size_int (DECL_ALIGN (var)));
5470 cfun->calls_alloca = 1;
5471 tmp = create_tmp_var_raw (ptr_type_node);
5472 gimple_add_tmp_var (tmp);
5473 gimple_call_set_lhs (stmt, tmp);
5474
5475 gimple_seq_add_stmt (ilist, stmt);
5476 }
5477
5478 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5479 gimplify_assign (ptr, x, ilist);
5480 }
5481 }
5482 else if (omp_is_reference (var)
5483 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5484 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5485 {
5486 /* For references that are being privatized for Fortran,
5487 allocate new backing storage for the new pointer
5488 variable. This allows us to avoid changing all the
5489 code that expects a pointer to something that expects
5490 a direct variable. */
5491 if (pass == 0)
5492 continue;
5493
5494 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5495 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5496 {
5497 x = build_receiver_ref (var, false, ctx);
5498 if (ctx->allocate_map)
5499 if (tree *allocatep = ctx->allocate_map->get (var))
5500 {
5501 allocator = *allocatep;
5502 if (TREE_CODE (allocator) != INTEGER_CST)
5503 allocator = build_outer_var_ref (allocator, ctx);
5504 allocator = fold_convert (pointer_sized_int_node,
5505 allocator);
5506 allocate_ptr = unshare_expr (x);
5507 }
5508 if (allocator == NULL_TREE)
5509 x = build_fold_addr_expr_loc (clause_loc, x);
5510 }
5511 else if (lower_private_allocate (var, new_var, allocator,
5512 allocate_ptr,
5513 ilist, ctx, true, x))
5514 x = allocate_ptr;
5515 else if (TREE_CONSTANT (x))
5516 {
5517 /* For reduction in SIMD loop, defer adding the
5518 initialization of the reference, because if we decide
5519 to use SIMD array for it, the initilization could cause
5520 expansion ICE. Ditto for other privatization clauses. */
5521 if (is_simd)
5522 x = NULL_TREE;
5523 else
5524 {
5525 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5526 get_name (var));
5527 gimple_add_tmp_var (x);
5528 TREE_ADDRESSABLE (x) = 1;
5529 x = build_fold_addr_expr_loc (clause_loc, x);
5530 }
5531 }
5532 else
5533 {
5534 tree atmp
5535 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5536 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5537 tree al = size_int (TYPE_ALIGN (rtype));
5538 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5539 }
5540
5541 if (x)
5542 {
5543 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5544 gimplify_assign (new_var, x, ilist);
5545 }
5546
5547 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5548 }
5549 else if ((c_kind == OMP_CLAUSE_REDUCTION
5550 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5551 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5552 {
5553 if (pass == 0)
5554 continue;
5555 }
5556 else if (pass != 0)
5557 continue;
5558
5559 switch (OMP_CLAUSE_CODE (c))
5560 {
5561 case OMP_CLAUSE_SHARED:
5562 /* Ignore shared directives in teams construct inside
5563 target construct. */
5564 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5565 && !is_host_teams_ctx (ctx))
5566 continue;
5567 /* Shared global vars are just accessed directly. */
5568 if (is_global_var (new_var))
5569 break;
5570 /* For taskloop firstprivate/lastprivate, represented
5571 as firstprivate and shared clause on the task, new_var
5572 is the firstprivate var. */
5573 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5574 break;
5575 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5576 needs to be delayed until after fixup_child_record_type so
5577 that we get the correct type during the dereference. */
5578 by_ref = use_pointer_for_field (var, ctx);
5579 x = build_receiver_ref (var, by_ref, ctx);
5580 SET_DECL_VALUE_EXPR (new_var, x);
5581 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5582
5583 /* ??? If VAR is not passed by reference, and the variable
5584 hasn't been initialized yet, then we'll get a warning for
5585 the store into the omp_data_s structure. Ideally, we'd be
5586 able to notice this and not store anything at all, but
5587 we're generating code too early. Suppress the warning. */
5588 if (!by_ref)
5589 TREE_NO_WARNING (var) = 1;
5590 break;
5591
5592 case OMP_CLAUSE__CONDTEMP_:
5593 if (is_parallel_ctx (ctx))
5594 {
5595 x = build_receiver_ref (var, false, ctx);
5596 SET_DECL_VALUE_EXPR (new_var, x);
5597 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5598 }
5599 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5600 {
5601 x = build_zero_cst (TREE_TYPE (var));
5602 goto do_private;
5603 }
5604 break;
5605
5606 case OMP_CLAUSE_LASTPRIVATE:
5607 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5608 break;
5609 /* FALLTHRU */
5610
5611 case OMP_CLAUSE_PRIVATE:
5612 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5613 x = build_outer_var_ref (var, ctx);
5614 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5615 {
5616 if (is_task_ctx (ctx))
5617 x = build_receiver_ref (var, false, ctx);
5618 else
5619 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5620 }
5621 else
5622 x = NULL;
5623 do_private:
5624 tree nx;
5625 bool copy_ctor;
5626 copy_ctor = false;
5627 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5628 ilist, ctx, false, NULL_TREE);
5629 nx = unshare_expr (new_var);
5630 if (is_simd
5631 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5632 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5633 copy_ctor = true;
5634 if (copy_ctor)
5635 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5636 else
5637 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5638 if (is_simd)
5639 {
5640 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5641 if ((TREE_ADDRESSABLE (new_var) || nx || y
5642 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5643 && (gimple_omp_for_collapse (ctx->stmt) != 1
5644 || (gimple_omp_for_index (ctx->stmt, 0)
5645 != new_var)))
5646 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5647 || omp_is_reference (var))
5648 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5649 ivar, lvar))
5650 {
5651 if (omp_is_reference (var))
5652 {
5653 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5654 tree new_vard = TREE_OPERAND (new_var, 0);
5655 gcc_assert (DECL_P (new_vard));
5656 SET_DECL_VALUE_EXPR (new_vard,
5657 build_fold_addr_expr (lvar));
5658 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5659 }
5660
5661 if (nx)
5662 {
5663 tree iv = unshare_expr (ivar);
5664 if (copy_ctor)
5665 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5666 x);
5667 else
5668 x = lang_hooks.decls.omp_clause_default_ctor (c,
5669 iv,
5670 x);
5671 }
5672 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5673 {
5674 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5675 unshare_expr (ivar), x);
5676 nx = x;
5677 }
5678 if (nx && x)
5679 gimplify_and_add (x, &llist[0]);
5680 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5681 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5682 {
5683 tree v = new_var;
5684 if (!DECL_P (v))
5685 {
5686 gcc_assert (TREE_CODE (v) == MEM_REF);
5687 v = TREE_OPERAND (v, 0);
5688 gcc_assert (DECL_P (v));
5689 }
5690 v = *ctx->lastprivate_conditional_map->get (v);
5691 tree t = create_tmp_var (TREE_TYPE (v));
5692 tree z = build_zero_cst (TREE_TYPE (v));
5693 tree orig_v
5694 = build_outer_var_ref (var, ctx,
5695 OMP_CLAUSE_LASTPRIVATE);
5696 gimple_seq_add_stmt (dlist,
5697 gimple_build_assign (t, z));
5698 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5699 tree civar = DECL_VALUE_EXPR (v);
5700 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5701 civar = unshare_expr (civar);
5702 TREE_OPERAND (civar, 1) = sctx.idx;
5703 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5704 unshare_expr (civar));
5705 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5706 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5707 orig_v, unshare_expr (ivar)));
5708 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5709 civar);
5710 x = build3 (COND_EXPR, void_type_node, cond, x,
5711 void_node);
5712 gimple_seq tseq = NULL;
5713 gimplify_and_add (x, &tseq);
5714 if (ctx->outer)
5715 lower_omp (&tseq, ctx->outer);
5716 gimple_seq_add_seq (&llist[1], tseq);
5717 }
5718 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5719 && ctx->for_simd_scan_phase)
5720 {
5721 x = unshare_expr (ivar);
5722 tree orig_v
5723 = build_outer_var_ref (var, ctx,
5724 OMP_CLAUSE_LASTPRIVATE);
5725 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5726 orig_v);
5727 gimplify_and_add (x, &llist[0]);
5728 }
5729 if (y)
5730 {
5731 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5732 if (y)
5733 gimplify_and_add (y, &llist[1]);
5734 }
5735 break;
5736 }
5737 if (omp_is_reference (var))
5738 {
5739 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5740 tree new_vard = TREE_OPERAND (new_var, 0);
5741 gcc_assert (DECL_P (new_vard));
5742 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5743 x = TYPE_SIZE_UNIT (type);
5744 if (TREE_CONSTANT (x))
5745 {
5746 x = create_tmp_var_raw (type, get_name (var));
5747 gimple_add_tmp_var (x);
5748 TREE_ADDRESSABLE (x) = 1;
5749 x = build_fold_addr_expr_loc (clause_loc, x);
5750 x = fold_convert_loc (clause_loc,
5751 TREE_TYPE (new_vard), x);
5752 gimplify_assign (new_vard, x, ilist);
5753 }
5754 }
5755 }
5756 if (nx)
5757 gimplify_and_add (nx, ilist);
5758 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5759 && is_simd
5760 && ctx->for_simd_scan_phase)
5761 {
5762 tree orig_v = build_outer_var_ref (var, ctx,
5763 OMP_CLAUSE_LASTPRIVATE);
5764 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5765 orig_v);
5766 gimplify_and_add (x, ilist);
5767 }
5768 /* FALLTHRU */
5769
5770 do_dtor:
5771 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5772 if (x)
5773 gimplify_and_add (x, dlist);
5774 if (allocator)
5775 {
5776 if (!is_gimple_val (allocator))
5777 {
5778 tree avar = create_tmp_var (TREE_TYPE (allocator));
5779 gimplify_assign (avar, allocator, dlist);
5780 allocator = avar;
5781 }
5782 if (!is_gimple_val (allocate_ptr))
5783 {
5784 tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
5785 gimplify_assign (apvar, allocate_ptr, dlist);
5786 allocate_ptr = apvar;
5787 }
5788 tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5789 gimple *g
5790 = gimple_build_call (f, 2, allocate_ptr, allocator);
5791 gimple_seq_add_stmt (dlist, g);
5792 }
5793 break;
5794
5795 case OMP_CLAUSE_LINEAR:
5796 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5797 goto do_firstprivate;
5798 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5799 x = NULL;
5800 else
5801 x = build_outer_var_ref (var, ctx);
5802 goto do_private;
5803
5804 case OMP_CLAUSE_FIRSTPRIVATE:
5805 if (is_task_ctx (ctx))
5806 {
5807 if ((omp_is_reference (var)
5808 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5809 || is_variable_sized (var))
5810 goto do_dtor;
5811 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5812 ctx))
5813 || use_pointer_for_field (var, NULL))
5814 {
5815 x = build_receiver_ref (var, false, ctx);
5816 if (ctx->allocate_map)
5817 if (tree *allocatep = ctx->allocate_map->get (var))
5818 {
5819 allocator = *allocatep;
5820 if (TREE_CODE (allocator) != INTEGER_CST)
5821 allocator = build_outer_var_ref (allocator, ctx);
5822 allocator = fold_convert (pointer_sized_int_node,
5823 allocator);
5824 allocate_ptr = unshare_expr (x);
5825 x = build_simple_mem_ref (x);
5826 TREE_THIS_NOTRAP (x) = 1;
5827 }
5828 SET_DECL_VALUE_EXPR (new_var, x);
5829 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5830 goto do_dtor;
5831 }
5832 }
5833 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5834 && omp_is_reference (var))
5835 {
5836 x = build_outer_var_ref (var, ctx);
5837 gcc_assert (TREE_CODE (x) == MEM_REF
5838 && integer_zerop (TREE_OPERAND (x, 1)));
5839 x = TREE_OPERAND (x, 0);
5840 x = lang_hooks.decls.omp_clause_copy_ctor
5841 (c, unshare_expr (new_var), x);
5842 gimplify_and_add (x, ilist);
5843 goto do_dtor;
5844 }
5845 do_firstprivate:
5846 lower_private_allocate (var, new_var, allocator, allocate_ptr,
5847 ilist, ctx, false, NULL_TREE);
5848 x = build_outer_var_ref (var, ctx);
5849 if (is_simd)
5850 {
5851 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5852 && gimple_omp_for_combined_into_p (ctx->stmt))
5853 {
5854 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5855 tree stept = TREE_TYPE (t);
5856 tree ct = omp_find_clause (clauses,
5857 OMP_CLAUSE__LOOPTEMP_);
5858 gcc_assert (ct);
5859 tree l = OMP_CLAUSE_DECL (ct);
5860 tree n1 = fd->loop.n1;
5861 tree step = fd->loop.step;
5862 tree itype = TREE_TYPE (l);
5863 if (POINTER_TYPE_P (itype))
5864 itype = signed_type_for (itype);
5865 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5866 if (TYPE_UNSIGNED (itype)
5867 && fd->loop.cond_code == GT_EXPR)
5868 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5869 fold_build1 (NEGATE_EXPR, itype, l),
5870 fold_build1 (NEGATE_EXPR,
5871 itype, step));
5872 else
5873 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5874 t = fold_build2 (MULT_EXPR, stept,
5875 fold_convert (stept, l), t);
5876
5877 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5878 {
5879 if (omp_is_reference (var))
5880 {
5881 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5882 tree new_vard = TREE_OPERAND (new_var, 0);
5883 gcc_assert (DECL_P (new_vard));
5884 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5885 nx = TYPE_SIZE_UNIT (type);
5886 if (TREE_CONSTANT (nx))
5887 {
5888 nx = create_tmp_var_raw (type,
5889 get_name (var));
5890 gimple_add_tmp_var (nx);
5891 TREE_ADDRESSABLE (nx) = 1;
5892 nx = build_fold_addr_expr_loc (clause_loc,
5893 nx);
5894 nx = fold_convert_loc (clause_loc,
5895 TREE_TYPE (new_vard),
5896 nx);
5897 gimplify_assign (new_vard, nx, ilist);
5898 }
5899 }
5900
5901 x = lang_hooks.decls.omp_clause_linear_ctor
5902 (c, new_var, x, t);
5903 gimplify_and_add (x, ilist);
5904 goto do_dtor;
5905 }
5906
5907 if (POINTER_TYPE_P (TREE_TYPE (x)))
5908 x = fold_build2 (POINTER_PLUS_EXPR,
5909 TREE_TYPE (x), x, t);
5910 else
5911 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5912 }
5913
5914 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5915 || TREE_ADDRESSABLE (new_var)
5916 || omp_is_reference (var))
5917 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5918 ivar, lvar))
5919 {
5920 if (omp_is_reference (var))
5921 {
5922 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5923 tree new_vard = TREE_OPERAND (new_var, 0);
5924 gcc_assert (DECL_P (new_vard));
5925 SET_DECL_VALUE_EXPR (new_vard,
5926 build_fold_addr_expr (lvar));
5927 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5928 }
5929 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5930 {
5931 tree iv = create_tmp_var (TREE_TYPE (new_var));
5932 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5933 gimplify_and_add (x, ilist);
5934 gimple_stmt_iterator gsi
5935 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5936 gassign *g
5937 = gimple_build_assign (unshare_expr (lvar), iv);
5938 gsi_insert_before_without_update (&gsi, g,
5939 GSI_SAME_STMT);
5940 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5941 enum tree_code code = PLUS_EXPR;
5942 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5943 code = POINTER_PLUS_EXPR;
5944 g = gimple_build_assign (iv, code, iv, t);
5945 gsi_insert_before_without_update (&gsi, g,
5946 GSI_SAME_STMT);
5947 break;
5948 }
5949 x = lang_hooks.decls.omp_clause_copy_ctor
5950 (c, unshare_expr (ivar), x);
5951 gimplify_and_add (x, &llist[0]);
5952 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5953 if (x)
5954 gimplify_and_add (x, &llist[1]);
5955 break;
5956 }
5957 if (omp_is_reference (var))
5958 {
5959 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5960 tree new_vard = TREE_OPERAND (new_var, 0);
5961 gcc_assert (DECL_P (new_vard));
5962 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5963 nx = TYPE_SIZE_UNIT (type);
5964 if (TREE_CONSTANT (nx))
5965 {
5966 nx = create_tmp_var_raw (type, get_name (var));
5967 gimple_add_tmp_var (nx);
5968 TREE_ADDRESSABLE (nx) = 1;
5969 nx = build_fold_addr_expr_loc (clause_loc, nx);
5970 nx = fold_convert_loc (clause_loc,
5971 TREE_TYPE (new_vard), nx);
5972 gimplify_assign (new_vard, nx, ilist);
5973 }
5974 }
5975 }
5976 x = lang_hooks.decls.omp_clause_copy_ctor
5977 (c, unshare_expr (new_var), x);
5978 gimplify_and_add (x, ilist);
5979 goto do_dtor;
5980
5981 case OMP_CLAUSE__LOOPTEMP_:
5982 case OMP_CLAUSE__REDUCTEMP_:
5983 gcc_assert (is_taskreg_ctx (ctx));
5984 x = build_outer_var_ref (var, ctx);
5985 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5986 gimplify_and_add (x, ilist);
5987 break;
5988
5989 case OMP_CLAUSE_COPYIN:
5990 by_ref = use_pointer_for_field (var, NULL);
5991 x = build_receiver_ref (var, by_ref, ctx);
5992 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5993 append_to_statement_list (x, ©in_seq);
5994 copyin_by_ref |= by_ref;
5995 break;
5996
5997 case OMP_CLAUSE_REDUCTION:
5998 case OMP_CLAUSE_IN_REDUCTION:
5999 /* OpenACC reductions are initialized using the
6000 GOACC_REDUCTION internal function. */
6001 if (is_gimple_omp_oacc (ctx->stmt))
6002 break;
6003 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6004 {
6005 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6006 gimple *tseq;
6007 tree ptype = TREE_TYPE (placeholder);
6008 if (cond)
6009 {
6010 x = error_mark_node;
6011 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6012 && !task_reduction_needs_orig_p)
6013 x = var;
6014 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6015 {
6016 tree pptype = build_pointer_type (ptype);
6017 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6018 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6019 size_int (task_reduction_cnt_full
6020 + task_reduction_cntorig - 1),
6021 NULL_TREE, NULL_TREE);
6022 else
6023 {
6024 unsigned int idx
6025 = *ctx->task_reduction_map->get (c);
6026 x = task_reduction_read (ilist, tskred_temp,
6027 pptype, 7 + 3 * idx);
6028 }
6029 x = fold_convert (pptype, x);
6030 x = build_simple_mem_ref (x);
6031 }
6032 }
6033 else
6034 {
6035 lower_private_allocate (var, new_var, allocator,
6036 allocate_ptr, ilist, ctx, false,
6037 NULL_TREE);
6038 x = build_outer_var_ref (var, ctx);
6039
6040 if (omp_is_reference (var)
6041 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6042 x = build_fold_addr_expr_loc (clause_loc, x);
6043 }
6044 SET_DECL_VALUE_EXPR (placeholder, x);
6045 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6046 tree new_vard = new_var;
6047 if (omp_is_reference (var))
6048 {
6049 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6050 new_vard = TREE_OPERAND (new_var, 0);
6051 gcc_assert (DECL_P (new_vard));
6052 }
6053 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6054 if (is_simd
6055 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6056 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6057 rvarp = &rvar;
6058 if (is_simd
6059 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6060 ivar, lvar, rvarp,
6061 &rvar2))
6062 {
6063 if (new_vard == new_var)
6064 {
6065 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6066 SET_DECL_VALUE_EXPR (new_var, ivar);
6067 }
6068 else
6069 {
6070 SET_DECL_VALUE_EXPR (new_vard,
6071 build_fold_addr_expr (ivar));
6072 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6073 }
6074 x = lang_hooks.decls.omp_clause_default_ctor
6075 (c, unshare_expr (ivar),
6076 build_outer_var_ref (var, ctx));
6077 if (rvarp && ctx->for_simd_scan_phase)
6078 {
6079 if (x)
6080 gimplify_and_add (x, &llist[0]);
6081 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6082 if (x)
6083 gimplify_and_add (x, &llist[1]);
6084 break;
6085 }
6086 else if (rvarp)
6087 {
6088 if (x)
6089 {
6090 gimplify_and_add (x, &llist[0]);
6091
6092 tree ivar2 = unshare_expr (lvar);
6093 TREE_OPERAND (ivar2, 1) = sctx.idx;
6094 x = lang_hooks.decls.omp_clause_default_ctor
6095 (c, ivar2, build_outer_var_ref (var, ctx));
6096 gimplify_and_add (x, &llist[0]);
6097
6098 if (rvar2)
6099 {
6100 x = lang_hooks.decls.omp_clause_default_ctor
6101 (c, unshare_expr (rvar2),
6102 build_outer_var_ref (var, ctx));
6103 gimplify_and_add (x, &llist[0]);
6104 }
6105
6106 /* For types that need construction, add another
6107 private var which will be default constructed
6108 and optionally initialized with
6109 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6110 loop we want to assign this value instead of
6111 constructing and destructing it in each
6112 iteration. */
6113 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6114 gimple_add_tmp_var (nv);
6115 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6116 ? rvar2
6117 : ivar, 0),
6118 nv);
6119 x = lang_hooks.decls.omp_clause_default_ctor
6120 (c, nv, build_outer_var_ref (var, ctx));
6121 gimplify_and_add (x, ilist);
6122
6123 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6124 {
6125 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6126 x = DECL_VALUE_EXPR (new_vard);
6127 tree vexpr = nv;
6128 if (new_vard != new_var)
6129 vexpr = build_fold_addr_expr (nv);
6130 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6131 lower_omp (&tseq, ctx);
6132 SET_DECL_VALUE_EXPR (new_vard, x);
6133 gimple_seq_add_seq (ilist, tseq);
6134 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6135 }
6136
6137 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6138 if (x)
6139 gimplify_and_add (x, dlist);
6140 }
6141
6142 tree ref = build_outer_var_ref (var, ctx);
6143 x = unshare_expr (ivar);
6144 x = lang_hooks.decls.omp_clause_assign_op (c, x,
6145 ref);
6146 gimplify_and_add (x, &llist[0]);
6147
6148 ref = build_outer_var_ref (var, ctx);
6149 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6150 rvar);
6151 gimplify_and_add (x, &llist[3]);
6152
6153 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6154 if (new_vard == new_var)
6155 SET_DECL_VALUE_EXPR (new_var, lvar);
6156 else
6157 SET_DECL_VALUE_EXPR (new_vard,
6158 build_fold_addr_expr (lvar));
6159
6160 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6161 if (x)
6162 gimplify_and_add (x, &llist[1]);
6163
6164 tree ivar2 = unshare_expr (lvar);
6165 TREE_OPERAND (ivar2, 1) = sctx.idx;
6166 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6167 if (x)
6168 gimplify_and_add (x, &llist[1]);
6169
6170 if (rvar2)
6171 {
6172 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6173 if (x)
6174 gimplify_and_add (x, &llist[1]);
6175 }
6176 break;
6177 }
6178 if (x)
6179 gimplify_and_add (x, &llist[0]);
6180 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6181 {
6182 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6183 lower_omp (&tseq, ctx);
6184 gimple_seq_add_seq (&llist[0], tseq);
6185 }
6186 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6187 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6188 lower_omp (&tseq, ctx);
6189 gimple_seq_add_seq (&llist[1], tseq);
6190 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6191 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6192 if (new_vard == new_var)
6193 SET_DECL_VALUE_EXPR (new_var, lvar);
6194 else
6195 SET_DECL_VALUE_EXPR (new_vard,
6196 build_fold_addr_expr (lvar));
6197 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6198 if (x)
6199 gimplify_and_add (x, &llist[1]);
6200 break;
6201 }
6202 /* If this is a reference to constant size reduction var
6203 with placeholder, we haven't emitted the initializer
6204 for it because it is undesirable if SIMD arrays are used.
6205 But if they aren't used, we need to emit the deferred
6206 initialization now. */
6207 else if (omp_is_reference (var) && is_simd)
6208 handle_simd_reference (clause_loc, new_vard, ilist);
6209
6210 tree lab2 = NULL_TREE;
6211 if (cond)
6212 {
6213 gimple *g;
6214 if (!is_parallel_ctx (ctx))
6215 {
6216 tree condv = create_tmp_var (boolean_type_node);
6217 tree m = build_simple_mem_ref (cond);
6218 g = gimple_build_assign (condv, m);
6219 gimple_seq_add_stmt (ilist, g);
6220 tree lab1
6221 = create_artificial_label (UNKNOWN_LOCATION);
6222 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6223 g = gimple_build_cond (NE_EXPR, condv,
6224 boolean_false_node,
6225 lab2, lab1);
6226 gimple_seq_add_stmt (ilist, g);
6227 gimple_seq_add_stmt (ilist,
6228 gimple_build_label (lab1));
6229 }
6230 g = gimple_build_assign (build_simple_mem_ref (cond),
6231 boolean_true_node);
6232 gimple_seq_add_stmt (ilist, g);
6233 }
6234 x = lang_hooks.decls.omp_clause_default_ctor
6235 (c, unshare_expr (new_var),
6236 cond ? NULL_TREE
6237 : build_outer_var_ref (var, ctx));
6238 if (x)
6239 gimplify_and_add (x, ilist);
6240
6241 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6242 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6243 {
6244 if (ctx->for_simd_scan_phase)
6245 goto do_dtor;
6246 if (x || (!is_simd
6247 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6248 {
6249 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6250 gimple_add_tmp_var (nv);
6251 ctx->cb.decl_map->put (new_vard, nv);
6252 x = lang_hooks.decls.omp_clause_default_ctor
6253 (c, nv, build_outer_var_ref (var, ctx));
6254 if (x)
6255 gimplify_and_add (x, ilist);
6256 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6257 {
6258 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6259 tree vexpr = nv;
6260 if (new_vard != new_var)
6261 vexpr = build_fold_addr_expr (nv);
6262 SET_DECL_VALUE_EXPR (new_vard, vexpr);
6263 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6264 lower_omp (&tseq, ctx);
6265 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6266 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6267 gimple_seq_add_seq (ilist, tseq);
6268 }
6269 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6270 if (is_simd && ctx->scan_exclusive)
6271 {
6272 tree nv2
6273 = create_tmp_var_raw (TREE_TYPE (new_var));
6274 gimple_add_tmp_var (nv2);
6275 ctx->cb.decl_map->put (nv, nv2);
6276 x = lang_hooks.decls.omp_clause_default_ctor
6277 (c, nv2, build_outer_var_ref (var, ctx));
6278 gimplify_and_add (x, ilist);
6279 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6280 if (x)
6281 gimplify_and_add (x, dlist);
6282 }
6283 x = lang_hooks.decls.omp_clause_dtor (c, nv);
6284 if (x)
6285 gimplify_and_add (x, dlist);
6286 }
6287 else if (is_simd
6288 && ctx->scan_exclusive
6289 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6290 {
6291 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6292 gimple_add_tmp_var (nv2);
6293 ctx->cb.decl_map->put (new_vard, nv2);
6294 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6295 if (x)
6296 gimplify_and_add (x, dlist);
6297 }
6298 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6299 goto do_dtor;
6300 }
6301
6302 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6303 {
6304 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6305 lower_omp (&tseq, ctx);
6306 gimple_seq_add_seq (ilist, tseq);
6307 }
6308 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6309 if (is_simd)
6310 {
6311 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6312 lower_omp (&tseq, ctx);
6313 gimple_seq_add_seq (dlist, tseq);
6314 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6315 }
6316 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6317 if (cond)
6318 {
6319 if (lab2)
6320 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6321 break;
6322 }
6323 goto do_dtor;
6324 }
6325 else
6326 {
6327 x = omp_reduction_init (c, TREE_TYPE (new_var));
6328 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6329 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6330
6331 if (cond)
6332 {
6333 gimple *g;
6334 tree lab2 = NULL_TREE;
6335 /* GOMP_taskgroup_reduction_register memsets the whole
6336 array to zero. If the initializer is zero, we don't
6337 need to initialize it again, just mark it as ever
6338 used unconditionally, i.e. cond = true. */
6339 if (initializer_zerop (x))
6340 {
6341 g = gimple_build_assign (build_simple_mem_ref (cond),
6342 boolean_true_node);
6343 gimple_seq_add_stmt (ilist, g);
6344 break;
6345 }
6346
6347 /* Otherwise, emit
6348 if (!cond) { cond = true; new_var = x; } */
6349 if (!is_parallel_ctx (ctx))
6350 {
6351 tree condv = create_tmp_var (boolean_type_node);
6352 tree m = build_simple_mem_ref (cond);
6353 g = gimple_build_assign (condv, m);
6354 gimple_seq_add_stmt (ilist, g);
6355 tree lab1
6356 = create_artificial_label (UNKNOWN_LOCATION);
6357 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6358 g = gimple_build_cond (NE_EXPR, condv,
6359 boolean_false_node,
6360 lab2, lab1);
6361 gimple_seq_add_stmt (ilist, g);
6362 gimple_seq_add_stmt (ilist,
6363 gimple_build_label (lab1));
6364 }
6365 g = gimple_build_assign (build_simple_mem_ref (cond),
6366 boolean_true_node);
6367 gimple_seq_add_stmt (ilist, g);
6368 gimplify_assign (new_var, x, ilist);
6369 if (lab2)
6370 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6371 break;
6372 }
6373
6374 /* reduction(-:var) sums up the partial results, so it
6375 acts identically to reduction(+:var). */
6376 if (code == MINUS_EXPR)
6377 code = PLUS_EXPR;
6378
6379 bool is_truth_op
6380 = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6381 tree new_vard = new_var;
6382 if (is_simd && omp_is_reference (var))
6383 {
6384 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6385 new_vard = TREE_OPERAND (new_var, 0);
6386 gcc_assert (DECL_P (new_vard));
6387 }
6388 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6389 if (is_simd
6390 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6391 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6392 rvarp = &rvar;
6393 if (is_simd
6394 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6395 ivar, lvar, rvarp,
6396 &rvar2))
6397 {
6398 if (new_vard != new_var)
6399 {
6400 SET_DECL_VALUE_EXPR (new_vard,
6401 build_fold_addr_expr (lvar));
6402 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6403 }
6404
6405 tree ref = build_outer_var_ref (var, ctx);
6406
6407 if (rvarp)
6408 {
6409 if (ctx->for_simd_scan_phase)
6410 break;
6411 gimplify_assign (ivar, ref, &llist[0]);
6412 ref = build_outer_var_ref (var, ctx);
6413 gimplify_assign (ref, rvar, &llist[3]);
6414 break;
6415 }
6416
6417 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6418
6419 if (sctx.is_simt)
6420 {
6421 if (!simt_lane)
6422 simt_lane = create_tmp_var (unsigned_type_node);
6423 x = build_call_expr_internal_loc
6424 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6425 TREE_TYPE (ivar), 2, ivar, simt_lane);
6426 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6427 gimplify_assign (ivar, x, &llist[2]);
6428 }
6429 tree ivar2 = ivar;
6430 tree ref2 = ref;
6431 if (is_truth_op)
6432 {
6433 tree zero = build_zero_cst (TREE_TYPE (ivar));
6434 ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6435 boolean_type_node, ivar,
6436 zero);
6437 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6438 boolean_type_node, ref,
6439 zero);
6440 }
6441 x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6442 if (is_truth_op)
6443 x = fold_convert (TREE_TYPE (ref), x);
6444 ref = build_outer_var_ref (var, ctx);
6445 gimplify_assign (ref, x, &llist[1]);
6446
6447 }
6448 else
6449 {
6450 lower_private_allocate (var, new_var, allocator,
6451 allocate_ptr, ilist, ctx,
6452 false, NULL_TREE);
6453 if (omp_is_reference (var) && is_simd)
6454 handle_simd_reference (clause_loc, new_vard, ilist);
6455 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6456 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6457 break;
6458 gimplify_assign (new_var, x, ilist);
6459 if (is_simd)
6460 {
6461 tree ref = build_outer_var_ref (var, ctx);
6462 tree new_var2 = new_var;
6463 tree ref2 = ref;
6464 if (is_truth_op)
6465 {
6466 tree zero = build_zero_cst (TREE_TYPE (new_var));
6467 new_var2
6468 = fold_build2_loc (clause_loc, NE_EXPR,
6469 boolean_type_node, new_var,
6470 zero);
6471 ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6472 boolean_type_node, ref,
6473 zero);
6474 }
6475 x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6476 if (is_truth_op)
6477 x = fold_convert (TREE_TYPE (new_var), x);
6478 ref = build_outer_var_ref (var, ctx);
6479 gimplify_assign (ref, x, dlist);
6480 }
6481 if (allocator)
6482 goto do_dtor;
6483 }
6484 }
6485 break;
6486
6487 default:
6488 gcc_unreachable ();
6489 }
6490 }
6491 }
6492 if (tskred_avar)
6493 {
6494 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6495 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6496 }
6497
6498 if (known_eq (sctx.max_vf, 1U))
6499 {
6500 sctx.is_simt = false;
6501 if (ctx->lastprivate_conditional_map)
6502 {
6503 if (gimple_omp_for_combined_into_p (ctx->stmt))
6504 {
6505 /* Signal to lower_omp_1 that it should use parent context. */
6506 ctx->combined_into_simd_safelen1 = true;
6507 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6508 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6509 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6510 {
6511 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6512 omp_context *outer = ctx->outer;
6513 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6514 outer = outer->outer;
6515 tree *v = ctx->lastprivate_conditional_map->get (o);
6516 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6517 tree *pv = outer->lastprivate_conditional_map->get (po);
6518 *v = *pv;
6519 }
6520 }
6521 else
6522 {
6523 /* When not vectorized, treat lastprivate(conditional:) like
6524 normal lastprivate, as there will be just one simd lane
6525 writing the privatized variable. */
6526 delete ctx->lastprivate_conditional_map;
6527 ctx->lastprivate_conditional_map = NULL;
6528 }
6529 }
6530 }
6531
6532 if (nonconst_simd_if)
6533 {
6534 if (sctx.lane == NULL_TREE)
6535 {
6536 sctx.idx = create_tmp_var (unsigned_type_node);
6537 sctx.lane = create_tmp_var (unsigned_type_node);
6538 }
6539 /* FIXME: For now. */
6540 sctx.is_simt = false;
6541 }
6542
6543 if (sctx.lane || sctx.is_simt)
6544 {
6545 uid = create_tmp_var (ptr_type_node, "simduid");
6546 /* Don't want uninit warnings on simduid, it is always uninitialized,
6547 but we use it not for the value, but for the DECL_UID only. */
6548 TREE_NO_WARNING (uid) = 1;
6549 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6550 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6551 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6552 gimple_omp_for_set_clauses (ctx->stmt, c);
6553 }
6554 /* Emit calls denoting privatized variables and initializing a pointer to
6555 structure that holds private variables as fields after ompdevlow pass. */
6556 if (sctx.is_simt)
6557 {
6558 sctx.simt_eargs[0] = uid;
6559 gimple *g
6560 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6561 gimple_call_set_lhs (g, uid);
6562 gimple_seq_add_stmt (ilist, g);
6563 sctx.simt_eargs.release ();
6564
6565 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6566 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6567 gimple_call_set_lhs (g, simtrec);
6568 gimple_seq_add_stmt (ilist, g);
6569 }
6570 if (sctx.lane)
6571 {
6572 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6573 2 + (nonconst_simd_if != NULL),
6574 uid, integer_zero_node,
6575 nonconst_simd_if);
6576 gimple_call_set_lhs (g, sctx.lane);
6577 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6578 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6579 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6580 build_int_cst (unsigned_type_node, 0));
6581 gimple_seq_add_stmt (ilist, g);
6582 if (sctx.lastlane)
6583 {
6584 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6585 2, uid, sctx.lane);
6586 gimple_call_set_lhs (g, sctx.lastlane);
6587 gimple_seq_add_stmt (dlist, g);
6588 gimple_seq_add_seq (dlist, llist[3]);
6589 }
6590 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6591 if (llist[2])
6592 {
6593 tree simt_vf = create_tmp_var (unsigned_type_node);
6594 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6595 gimple_call_set_lhs (g, simt_vf);
6596 gimple_seq_add_stmt (dlist, g);
6597
6598 tree t = build_int_cst (unsigned_type_node, 1);
6599 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6600 gimple_seq_add_stmt (dlist, g);
6601
6602 t = build_int_cst (unsigned_type_node, 0);
6603 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6604 gimple_seq_add_stmt (dlist, g);
6605
6606 tree body = create_artificial_label (UNKNOWN_LOCATION);
6607 tree header = create_artificial_label (UNKNOWN_LOCATION);
6608 tree end = create_artificial_label (UNKNOWN_LOCATION);
6609 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6610 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6611
6612 gimple_seq_add_seq (dlist, llist[2]);
6613
6614 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6615 gimple_seq_add_stmt (dlist, g);
6616
6617 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6618 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6619 gimple_seq_add_stmt (dlist, g);
6620
6621 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6622 }
6623 for (int i = 0; i < 2; i++)
6624 if (llist[i])
6625 {
6626 tree vf = create_tmp_var (unsigned_type_node);
6627 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6628 gimple_call_set_lhs (g, vf);
6629 gimple_seq *seq = i == 0 ? ilist : dlist;
6630 gimple_seq_add_stmt (seq, g);
6631 tree t = build_int_cst (unsigned_type_node, 0);
6632 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6633 gimple_seq_add_stmt (seq, g);
6634 tree body = create_artificial_label (UNKNOWN_LOCATION);
6635 tree header = create_artificial_label (UNKNOWN_LOCATION);
6636 tree end = create_artificial_label (UNKNOWN_LOCATION);
6637 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6638 gimple_seq_add_stmt (seq, gimple_build_label (body));
6639 gimple_seq_add_seq (seq, llist[i]);
6640 t = build_int_cst (unsigned_type_node, 1);
6641 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6642 gimple_seq_add_stmt (seq, g);
6643 gimple_seq_add_stmt (seq, gimple_build_label (header));
6644 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6645 gimple_seq_add_stmt (seq, g);
6646 gimple_seq_add_stmt (seq, gimple_build_label (end));
6647 }
6648 }
6649 if (sctx.is_simt)
6650 {
6651 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6652 gimple *g
6653 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6654 gimple_seq_add_stmt (dlist, g);
6655 }
6656
6657 /* The copyin sequence is not to be executed by the main thread, since
6658 that would result in self-copies. Perhaps not visible to scalars,
6659 but it certainly is to C++ operator=. */
6660 if (copyin_seq)
6661 {
6662 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6663 0);
6664 x = build2 (NE_EXPR, boolean_type_node, x,
6665 build_int_cst (TREE_TYPE (x), 0));
6666 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6667 gimplify_and_add (x, ilist);
6668 }
6669
6670 /* If any copyin variable is passed by reference, we must ensure the
6671 master thread doesn't modify it before it is copied over in all
6672 threads. Similarly for variables in both firstprivate and
6673 lastprivate clauses we need to ensure the lastprivate copying
6674 happens after firstprivate copying in all threads. And similarly
6675 for UDRs if initializer expression refers to omp_orig. */
6676 if (copyin_by_ref || lastprivate_firstprivate
6677 || (reduction_omp_orig_ref
6678 && !ctx->scan_inclusive
6679 && !ctx->scan_exclusive))
6680 {
6681 /* Don't add any barrier for #pragma omp simd or
6682 #pragma omp distribute. */
6683 if (!is_task_ctx (ctx)
6684 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6685 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6686 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6687 }
6688
6689 /* If max_vf is non-zero, then we can use only a vectorization factor
6690 up to the max_vf we chose. So stick it into the safelen clause. */
6691 if (maybe_ne (sctx.max_vf, 0U))
6692 {
6693 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6694 OMP_CLAUSE_SAFELEN);
6695 poly_uint64 safe_len;
6696 if (c == NULL_TREE
6697 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6698 && maybe_gt (safe_len, sctx.max_vf)))
6699 {
6700 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6701 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6702 sctx.max_vf);
6703 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6704 gimple_omp_for_set_clauses (ctx->stmt, c);
6705 }
6706 }
6707 }
6708
6709 /* Create temporary variables for lastprivate(conditional:) implementation
6710 in context CTX with CLAUSES. */
6711
6712 static void
lower_lastprivate_conditional_clauses(tree * clauses,omp_context * ctx)6713 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6714 {
6715 tree iter_type = NULL_TREE;
6716 tree cond_ptr = NULL_TREE;
6717 tree iter_var = NULL_TREE;
6718 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6719 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6720 tree next = *clauses;
6721 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6722 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6723 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6724 {
6725 if (is_simd)
6726 {
6727 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6728 gcc_assert (cc);
6729 if (iter_type == NULL_TREE)
6730 {
6731 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6732 iter_var = create_tmp_var_raw (iter_type);
6733 DECL_CONTEXT (iter_var) = current_function_decl;
6734 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6735 DECL_CHAIN (iter_var) = ctx->block_vars;
6736 ctx->block_vars = iter_var;
6737 tree c3
6738 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6739 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6740 OMP_CLAUSE_DECL (c3) = iter_var;
6741 OMP_CLAUSE_CHAIN (c3) = *clauses;
6742 *clauses = c3;
6743 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6744 }
6745 next = OMP_CLAUSE_CHAIN (cc);
6746 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6747 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6748 ctx->lastprivate_conditional_map->put (o, v);
6749 continue;
6750 }
6751 if (iter_type == NULL)
6752 {
6753 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6754 {
6755 struct omp_for_data fd;
6756 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6757 NULL);
6758 iter_type = unsigned_type_for (fd.iter_type);
6759 }
6760 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6761 iter_type = unsigned_type_node;
6762 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6763 if (c2)
6764 {
6765 cond_ptr
6766 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6767 OMP_CLAUSE_DECL (c2) = cond_ptr;
6768 }
6769 else
6770 {
6771 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6772 DECL_CONTEXT (cond_ptr) = current_function_decl;
6773 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6774 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6775 ctx->block_vars = cond_ptr;
6776 c2 = build_omp_clause (UNKNOWN_LOCATION,
6777 OMP_CLAUSE__CONDTEMP_);
6778 OMP_CLAUSE_DECL (c2) = cond_ptr;
6779 OMP_CLAUSE_CHAIN (c2) = *clauses;
6780 *clauses = c2;
6781 }
6782 iter_var = create_tmp_var_raw (iter_type);
6783 DECL_CONTEXT (iter_var) = current_function_decl;
6784 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6785 DECL_CHAIN (iter_var) = ctx->block_vars;
6786 ctx->block_vars = iter_var;
6787 tree c3
6788 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6789 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6790 OMP_CLAUSE_DECL (c3) = iter_var;
6791 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6792 OMP_CLAUSE_CHAIN (c2) = c3;
6793 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6794 }
6795 tree v = create_tmp_var_raw (iter_type);
6796 DECL_CONTEXT (v) = current_function_decl;
6797 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6798 DECL_CHAIN (v) = ctx->block_vars;
6799 ctx->block_vars = v;
6800 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6801 ctx->lastprivate_conditional_map->put (o, v);
6802 }
6803 }
6804
6805
6806 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6807 both parallel and workshare constructs. PREDICATE may be NULL if it's
6808 always true. BODY_P is the sequence to insert early initialization
6809 if needed, STMT_LIST is where the non-conditional lastprivate handling
6810 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6811 section. */
6812
6813 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * body_p,gimple_seq * stmt_list,gimple_seq * cstmt_list,omp_context * ctx)6814 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6815 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6816 omp_context *ctx)
6817 {
6818 tree x, c, label = NULL, orig_clauses = clauses;
6819 bool par_clauses = false;
6820 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6821 unsigned HOST_WIDE_INT conditional_off = 0;
6822 gimple_seq post_stmt_list = NULL;
6823
6824 /* Early exit if there are no lastprivate or linear clauses. */
6825 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6826 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6827 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6828 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6829 break;
6830 if (clauses == NULL)
6831 {
6832 /* If this was a workshare clause, see if it had been combined
6833 with its parallel. In that case, look for the clauses on the
6834 parallel statement itself. */
6835 if (is_parallel_ctx (ctx))
6836 return;
6837
6838 ctx = ctx->outer;
6839 if (ctx == NULL || !is_parallel_ctx (ctx))
6840 return;
6841
6842 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6843 OMP_CLAUSE_LASTPRIVATE);
6844 if (clauses == NULL)
6845 return;
6846 par_clauses = true;
6847 }
6848
6849 bool maybe_simt = false;
6850 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6851 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6852 {
6853 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6854 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6855 if (simduid)
6856 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6857 }
6858
6859 if (predicate)
6860 {
6861 gcond *stmt;
6862 tree label_true, arm1, arm2;
6863 enum tree_code pred_code = TREE_CODE (predicate);
6864
6865 label = create_artificial_label (UNKNOWN_LOCATION);
6866 label_true = create_artificial_label (UNKNOWN_LOCATION);
6867 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6868 {
6869 arm1 = TREE_OPERAND (predicate, 0);
6870 arm2 = TREE_OPERAND (predicate, 1);
6871 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6872 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6873 }
6874 else
6875 {
6876 arm1 = predicate;
6877 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6878 arm2 = boolean_false_node;
6879 pred_code = NE_EXPR;
6880 }
6881 if (maybe_simt)
6882 {
6883 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6884 c = fold_convert (integer_type_node, c);
6885 simtcond = create_tmp_var (integer_type_node);
6886 gimplify_assign (simtcond, c, stmt_list);
6887 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6888 1, simtcond);
6889 c = create_tmp_var (integer_type_node);
6890 gimple_call_set_lhs (g, c);
6891 gimple_seq_add_stmt (stmt_list, g);
6892 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6893 label_true, label);
6894 }
6895 else
6896 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6897 gimple_seq_add_stmt (stmt_list, stmt);
6898 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6899 }
6900
6901 tree cond_ptr = NULL_TREE;
6902 for (c = clauses; c ;)
6903 {
6904 tree var, new_var;
6905 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6906 gimple_seq *this_stmt_list = stmt_list;
6907 tree lab2 = NULL_TREE;
6908
6909 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6910 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6911 && ctx->lastprivate_conditional_map
6912 && !ctx->combined_into_simd_safelen1)
6913 {
6914 gcc_assert (body_p);
6915 if (simduid)
6916 goto next;
6917 if (cond_ptr == NULL_TREE)
6918 {
6919 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6920 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6921 }
6922 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6923 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6924 tree v = *ctx->lastprivate_conditional_map->get (o);
6925 gimplify_assign (v, build_zero_cst (type), body_p);
6926 this_stmt_list = cstmt_list;
6927 tree mem;
6928 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6929 {
6930 mem = build2 (MEM_REF, type, cond_ptr,
6931 build_int_cst (TREE_TYPE (cond_ptr),
6932 conditional_off));
6933 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6934 }
6935 else
6936 mem = build4 (ARRAY_REF, type, cond_ptr,
6937 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6938 tree mem2 = copy_node (mem);
6939 gimple_seq seq = NULL;
6940 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6941 gimple_seq_add_seq (this_stmt_list, seq);
6942 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6943 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6944 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6945 gimple_seq_add_stmt (this_stmt_list, g);
6946 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6947 gimplify_assign (mem2, v, this_stmt_list);
6948 }
6949 else if (predicate
6950 && ctx->combined_into_simd_safelen1
6951 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6952 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6953 && ctx->lastprivate_conditional_map)
6954 this_stmt_list = &post_stmt_list;
6955
6956 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6957 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6958 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6959 {
6960 var = OMP_CLAUSE_DECL (c);
6961 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6962 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6963 && is_taskloop_ctx (ctx))
6964 {
6965 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6966 new_var = lookup_decl (var, ctx->outer);
6967 }
6968 else
6969 {
6970 new_var = lookup_decl (var, ctx);
6971 /* Avoid uninitialized warnings for lastprivate and
6972 for linear iterators. */
6973 if (predicate
6974 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6975 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6976 TREE_NO_WARNING (new_var) = 1;
6977 }
6978
6979 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6980 {
6981 tree val = DECL_VALUE_EXPR (new_var);
6982 if (TREE_CODE (val) == ARRAY_REF
6983 && VAR_P (TREE_OPERAND (val, 0))
6984 && lookup_attribute ("omp simd array",
6985 DECL_ATTRIBUTES (TREE_OPERAND (val,
6986 0))))
6987 {
6988 if (lastlane == NULL)
6989 {
6990 lastlane = create_tmp_var (unsigned_type_node);
6991 gcall *g
6992 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6993 2, simduid,
6994 TREE_OPERAND (val, 1));
6995 gimple_call_set_lhs (g, lastlane);
6996 gimple_seq_add_stmt (this_stmt_list, g);
6997 }
6998 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6999 TREE_OPERAND (val, 0), lastlane,
7000 NULL_TREE, NULL_TREE);
7001 TREE_THIS_NOTRAP (new_var) = 1;
7002 }
7003 }
7004 else if (maybe_simt)
7005 {
7006 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7007 ? DECL_VALUE_EXPR (new_var)
7008 : new_var);
7009 if (simtlast == NULL)
7010 {
7011 simtlast = create_tmp_var (unsigned_type_node);
7012 gcall *g = gimple_build_call_internal
7013 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7014 gimple_call_set_lhs (g, simtlast);
7015 gimple_seq_add_stmt (this_stmt_list, g);
7016 }
7017 x = build_call_expr_internal_loc
7018 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7019 TREE_TYPE (val), 2, val, simtlast);
7020 new_var = unshare_expr (new_var);
7021 gimplify_assign (new_var, x, this_stmt_list);
7022 new_var = unshare_expr (new_var);
7023 }
7024
7025 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7026 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7027 {
7028 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7029 gimple_seq_add_seq (this_stmt_list,
7030 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7031 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7032 }
7033 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7034 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7035 {
7036 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7037 gimple_seq_add_seq (this_stmt_list,
7038 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7039 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7040 }
7041
7042 x = NULL_TREE;
7043 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7044 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7045 && is_taskloop_ctx (ctx))
7046 {
7047 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7048 ctx->outer->outer);
7049 if (is_global_var (ovar))
7050 x = ovar;
7051 }
7052 if (!x)
7053 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7054 if (omp_is_reference (var))
7055 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7056 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7057 gimplify_and_add (x, this_stmt_list);
7058
7059 if (lab2)
7060 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7061 }
7062
7063 next:
7064 c = OMP_CLAUSE_CHAIN (c);
7065 if (c == NULL && !par_clauses)
7066 {
7067 /* If this was a workshare clause, see if it had been combined
7068 with its parallel. In that case, continue looking for the
7069 clauses also on the parallel statement itself. */
7070 if (is_parallel_ctx (ctx))
7071 break;
7072
7073 ctx = ctx->outer;
7074 if (ctx == NULL || !is_parallel_ctx (ctx))
7075 break;
7076
7077 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7078 OMP_CLAUSE_LASTPRIVATE);
7079 par_clauses = true;
7080 }
7081 }
7082
7083 if (label)
7084 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7085 gimple_seq_add_seq (stmt_list, post_stmt_list);
7086 }
7087
7088 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7089 (which might be a placeholder). INNER is true if this is an inner
7090 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
7091 join markers. Generate the before-loop forking sequence in
7092 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
7093 general form of these sequences is
7094
7095 GOACC_REDUCTION_SETUP
7096 GOACC_FORK
7097 GOACC_REDUCTION_INIT
7098 ...
7099 GOACC_REDUCTION_FINI
7100 GOACC_JOIN
7101 GOACC_REDUCTION_TEARDOWN. */
7102
7103 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)7104 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7105 gcall *fork, gcall *join, gimple_seq *fork_seq,
7106 gimple_seq *join_seq, omp_context *ctx)
7107 {
7108 gimple_seq before_fork = NULL;
7109 gimple_seq after_fork = NULL;
7110 gimple_seq before_join = NULL;
7111 gimple_seq after_join = NULL;
7112 tree init_code = NULL_TREE, fini_code = NULL_TREE,
7113 setup_code = NULL_TREE, teardown_code = NULL_TREE;
7114 unsigned offset = 0;
7115
7116 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7117 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7118 {
7119 /* No 'reduction' clauses on OpenACC 'kernels'. */
7120 gcc_checking_assert (!is_oacc_kernels (ctx));
7121 /* Likewise, on OpenACC 'kernels' decomposed parts. */
7122 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7123
7124 tree orig = OMP_CLAUSE_DECL (c);
7125 tree var = maybe_lookup_decl (orig, ctx);
7126 tree ref_to_res = NULL_TREE;
7127 tree incoming, outgoing, v1, v2, v3;
7128 bool is_private = false;
7129
7130 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7131 if (rcode == MINUS_EXPR)
7132 rcode = PLUS_EXPR;
7133 else if (rcode == TRUTH_ANDIF_EXPR)
7134 rcode = BIT_AND_EXPR;
7135 else if (rcode == TRUTH_ORIF_EXPR)
7136 rcode = BIT_IOR_EXPR;
7137 tree op = build_int_cst (unsigned_type_node, rcode);
7138
7139 if (!var)
7140 var = orig;
7141
7142 incoming = outgoing = var;
7143
7144 if (!inner)
7145 {
7146 /* See if an outer construct also reduces this variable. */
7147 omp_context *outer = ctx;
7148
7149 while (omp_context *probe = outer->outer)
7150 {
7151 enum gimple_code type = gimple_code (probe->stmt);
7152 tree cls;
7153
7154 switch (type)
7155 {
7156 case GIMPLE_OMP_FOR:
7157 cls = gimple_omp_for_clauses (probe->stmt);
7158 break;
7159
7160 case GIMPLE_OMP_TARGET:
7161 /* No 'reduction' clauses inside OpenACC 'kernels'
7162 regions. */
7163 gcc_checking_assert (!is_oacc_kernels (probe));
7164
7165 if (!is_gimple_omp_offloaded (probe->stmt))
7166 goto do_lookup;
7167
7168 cls = gimple_omp_target_clauses (probe->stmt);
7169 break;
7170
7171 default:
7172 goto do_lookup;
7173 }
7174
7175 outer = probe;
7176 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
7177 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7178 && orig == OMP_CLAUSE_DECL (cls))
7179 {
7180 incoming = outgoing = lookup_decl (orig, probe);
7181 goto has_outer_reduction;
7182 }
7183 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7184 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7185 && orig == OMP_CLAUSE_DECL (cls))
7186 {
7187 is_private = true;
7188 goto do_lookup;
7189 }
7190 }
7191
7192 do_lookup:
7193 /* This is the outermost construct with this reduction,
7194 see if there's a mapping for it. */
7195 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7196 && maybe_lookup_field (orig, outer) && !is_private)
7197 {
7198 ref_to_res = build_receiver_ref (orig, false, outer);
7199 if (omp_is_reference (orig))
7200 ref_to_res = build_simple_mem_ref (ref_to_res);
7201
7202 tree type = TREE_TYPE (var);
7203 if (POINTER_TYPE_P (type))
7204 type = TREE_TYPE (type);
7205
7206 outgoing = var;
7207 incoming = omp_reduction_init_op (loc, rcode, type);
7208 }
7209 else
7210 {
7211 /* Try to look at enclosing contexts for reduction var,
7212 use original if no mapping found. */
7213 tree t = NULL_TREE;
7214 omp_context *c = ctx->outer;
7215 while (c && !t)
7216 {
7217 t = maybe_lookup_decl (orig, c);
7218 c = c->outer;
7219 }
7220 incoming = outgoing = (t ? t : orig);
7221 }
7222
7223 has_outer_reduction:;
7224 }
7225
7226 if (!ref_to_res)
7227 ref_to_res = integer_zero_node;
7228
7229 if (omp_is_reference (orig))
7230 {
7231 tree type = TREE_TYPE (var);
7232 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7233
7234 if (!inner)
7235 {
7236 tree x = create_tmp_var (TREE_TYPE (type), id);
7237 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7238 }
7239
7240 v1 = create_tmp_var (type, id);
7241 v2 = create_tmp_var (type, id);
7242 v3 = create_tmp_var (type, id);
7243
7244 gimplify_assign (v1, var, fork_seq);
7245 gimplify_assign (v2, var, fork_seq);
7246 gimplify_assign (v3, var, fork_seq);
7247
7248 var = build_simple_mem_ref (var);
7249 v1 = build_simple_mem_ref (v1);
7250 v2 = build_simple_mem_ref (v2);
7251 v3 = build_simple_mem_ref (v3);
7252 outgoing = build_simple_mem_ref (outgoing);
7253
7254 if (!TREE_CONSTANT (incoming))
7255 incoming = build_simple_mem_ref (incoming);
7256 }
7257 else
7258 v1 = v2 = v3 = var;
7259
7260 /* Determine position in reduction buffer, which may be used
7261 by target. The parser has ensured that this is not a
7262 variable-sized type. */
7263 fixed_size_mode mode
7264 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7265 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7266 offset = (offset + align - 1) & ~(align - 1);
7267 tree off = build_int_cst (sizetype, offset);
7268 offset += GET_MODE_SIZE (mode);
7269
7270 if (!init_code)
7271 {
7272 init_code = build_int_cst (integer_type_node,
7273 IFN_GOACC_REDUCTION_INIT);
7274 fini_code = build_int_cst (integer_type_node,
7275 IFN_GOACC_REDUCTION_FINI);
7276 setup_code = build_int_cst (integer_type_node,
7277 IFN_GOACC_REDUCTION_SETUP);
7278 teardown_code = build_int_cst (integer_type_node,
7279 IFN_GOACC_REDUCTION_TEARDOWN);
7280 }
7281
7282 tree setup_call
7283 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7284 TREE_TYPE (var), 6, setup_code,
7285 unshare_expr (ref_to_res),
7286 incoming, level, op, off);
7287 tree init_call
7288 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7289 TREE_TYPE (var), 6, init_code,
7290 unshare_expr (ref_to_res),
7291 v1, level, op, off);
7292 tree fini_call
7293 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7294 TREE_TYPE (var), 6, fini_code,
7295 unshare_expr (ref_to_res),
7296 v2, level, op, off);
7297 tree teardown_call
7298 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7299 TREE_TYPE (var), 6, teardown_code,
7300 ref_to_res, v3, level, op, off);
7301
7302 gimplify_assign (v1, setup_call, &before_fork);
7303 gimplify_assign (v2, init_call, &after_fork);
7304 gimplify_assign (v3, fini_call, &before_join);
7305 gimplify_assign (outgoing, teardown_call, &after_join);
7306 }
7307
7308 /* Now stitch things together. */
7309 gimple_seq_add_seq (fork_seq, before_fork);
7310 if (fork)
7311 gimple_seq_add_stmt (fork_seq, fork);
7312 gimple_seq_add_seq (fork_seq, after_fork);
7313
7314 gimple_seq_add_seq (join_seq, before_join);
7315 if (join)
7316 gimple_seq_add_stmt (join_seq, join);
7317 gimple_seq_add_seq (join_seq, after_join);
7318 }
7319
7320 /* Generate code to implement the REDUCTION clauses, append it
7321 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
7322 that should be emitted also inside of the critical section,
7323 in that case clear *CLIST afterwards, otherwise leave it as is
7324 and let the caller emit it itself. */
7325
7326 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,gimple_seq * clist,omp_context * ctx)7327 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7328 gimple_seq *clist, omp_context *ctx)
7329 {
7330 gimple_seq sub_seq = NULL;
7331 gimple *stmt;
7332 tree x, c;
7333 int count = 0;
7334
7335 /* OpenACC loop reductions are handled elsewhere. */
7336 if (is_gimple_omp_oacc (ctx->stmt))
7337 return;
7338
7339 /* SIMD reductions are handled in lower_rec_input_clauses. */
7340 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7341 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7342 return;
7343
7344 /* inscan reductions are handled elsewhere. */
7345 if (ctx->scan_inclusive || ctx->scan_exclusive)
7346 return;
7347
7348 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
7349 update in that case, otherwise use a lock. */
7350 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7351 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7352 && !OMP_CLAUSE_REDUCTION_TASK (c))
7353 {
7354 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7355 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7356 {
7357 /* Never use OMP_ATOMIC for array reductions or UDRs. */
7358 count = -1;
7359 break;
7360 }
7361 count++;
7362 }
7363
7364 if (count == 0)
7365 return;
7366
7367 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7368 {
7369 tree var, ref, new_var, orig_var;
7370 enum tree_code code;
7371 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7372
7373 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7374 || OMP_CLAUSE_REDUCTION_TASK (c))
7375 continue;
7376
7377 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7378 orig_var = var = OMP_CLAUSE_DECL (c);
7379 if (TREE_CODE (var) == MEM_REF)
7380 {
7381 var = TREE_OPERAND (var, 0);
7382 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7383 var = TREE_OPERAND (var, 0);
7384 if (TREE_CODE (var) == ADDR_EXPR)
7385 var = TREE_OPERAND (var, 0);
7386 else
7387 {
7388 /* If this is a pointer or referenced based array
7389 section, the var could be private in the outer
7390 context e.g. on orphaned loop construct. Pretend this
7391 is private variable's outer reference. */
7392 ccode = OMP_CLAUSE_PRIVATE;
7393 if (TREE_CODE (var) == INDIRECT_REF)
7394 var = TREE_OPERAND (var, 0);
7395 }
7396 orig_var = var;
7397 if (is_variable_sized (var))
7398 {
7399 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7400 var = DECL_VALUE_EXPR (var);
7401 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7402 var = TREE_OPERAND (var, 0);
7403 gcc_assert (DECL_P (var));
7404 }
7405 }
7406 new_var = lookup_decl (var, ctx);
7407 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7408 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7409 ref = build_outer_var_ref (var, ctx, ccode);
7410 code = OMP_CLAUSE_REDUCTION_CODE (c);
7411
7412 /* reduction(-:var) sums up the partial results, so it acts
7413 identically to reduction(+:var). */
7414 if (code == MINUS_EXPR)
7415 code = PLUS_EXPR;
7416
7417 bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7418 if (count == 1)
7419 {
7420 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7421
7422 addr = save_expr (addr);
7423 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7424 tree new_var2 = new_var;
7425 tree ref2 = ref;
7426 if (is_truth_op)
7427 {
7428 tree zero = build_zero_cst (TREE_TYPE (new_var));
7429 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7430 boolean_type_node, new_var, zero);
7431 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7432 ref, zero);
7433 }
7434 x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7435 new_var2);
7436 if (is_truth_op)
7437 x = fold_convert (TREE_TYPE (new_var), x);
7438 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7439 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7440 gimplify_and_add (x, stmt_seqp);
7441 return;
7442 }
7443 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7444 {
7445 tree d = OMP_CLAUSE_DECL (c);
7446 tree type = TREE_TYPE (d);
7447 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7448 tree i = create_tmp_var (TREE_TYPE (v));
7449 tree ptype = build_pointer_type (TREE_TYPE (type));
7450 tree bias = TREE_OPERAND (d, 1);
7451 d = TREE_OPERAND (d, 0);
7452 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7453 {
7454 tree b = TREE_OPERAND (d, 1);
7455 b = maybe_lookup_decl (b, ctx);
7456 if (b == NULL)
7457 {
7458 b = TREE_OPERAND (d, 1);
7459 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7460 }
7461 if (integer_zerop (bias))
7462 bias = b;
7463 else
7464 {
7465 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7466 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7467 TREE_TYPE (b), b, bias);
7468 }
7469 d = TREE_OPERAND (d, 0);
7470 }
7471 /* For ref build_outer_var_ref already performs this, so
7472 only new_var needs a dereference. */
7473 if (TREE_CODE (d) == INDIRECT_REF)
7474 {
7475 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7476 gcc_assert (omp_is_reference (var) && var == orig_var);
7477 }
7478 else if (TREE_CODE (d) == ADDR_EXPR)
7479 {
7480 if (orig_var == var)
7481 {
7482 new_var = build_fold_addr_expr (new_var);
7483 ref = build_fold_addr_expr (ref);
7484 }
7485 }
7486 else
7487 {
7488 gcc_assert (orig_var == var);
7489 if (omp_is_reference (var))
7490 ref = build_fold_addr_expr (ref);
7491 }
7492 if (DECL_P (v))
7493 {
7494 tree t = maybe_lookup_decl (v, ctx);
7495 if (t)
7496 v = t;
7497 else
7498 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7499 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7500 }
7501 if (!integer_zerop (bias))
7502 {
7503 bias = fold_convert_loc (clause_loc, sizetype, bias);
7504 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7505 TREE_TYPE (new_var), new_var,
7506 unshare_expr (bias));
7507 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7508 TREE_TYPE (ref), ref, bias);
7509 }
7510 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7511 ref = fold_convert_loc (clause_loc, ptype, ref);
7512 tree m = create_tmp_var (ptype);
7513 gimplify_assign (m, new_var, stmt_seqp);
7514 new_var = m;
7515 m = create_tmp_var (ptype);
7516 gimplify_assign (m, ref, stmt_seqp);
7517 ref = m;
7518 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7519 tree body = create_artificial_label (UNKNOWN_LOCATION);
7520 tree end = create_artificial_label (UNKNOWN_LOCATION);
7521 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7522 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7523 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7524 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7525 {
7526 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7527 tree decl_placeholder
7528 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7529 SET_DECL_VALUE_EXPR (placeholder, out);
7530 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7531 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7532 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7533 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7534 gimple_seq_add_seq (&sub_seq,
7535 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7536 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7537 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7538 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7539 }
7540 else
7541 {
7542 tree out2 = out;
7543 tree priv2 = priv;
7544 if (is_truth_op)
7545 {
7546 tree zero = build_zero_cst (TREE_TYPE (out));
7547 out2 = fold_build2_loc (clause_loc, NE_EXPR,
7548 boolean_type_node, out, zero);
7549 priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7550 boolean_type_node, priv, zero);
7551 }
7552 x = build2 (code, TREE_TYPE (out2), out2, priv2);
7553 if (is_truth_op)
7554 x = fold_convert (TREE_TYPE (out), x);
7555 out = unshare_expr (out);
7556 gimplify_assign (out, x, &sub_seq);
7557 }
7558 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7559 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7560 gimple_seq_add_stmt (&sub_seq, g);
7561 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7562 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7563 gimple_seq_add_stmt (&sub_seq, g);
7564 g = gimple_build_assign (i, PLUS_EXPR, i,
7565 build_int_cst (TREE_TYPE (i), 1));
7566 gimple_seq_add_stmt (&sub_seq, g);
7567 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7568 gimple_seq_add_stmt (&sub_seq, g);
7569 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7570 }
7571 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7572 {
7573 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7574
7575 if (omp_is_reference (var)
7576 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7577 TREE_TYPE (ref)))
7578 ref = build_fold_addr_expr_loc (clause_loc, ref);
7579 SET_DECL_VALUE_EXPR (placeholder, ref);
7580 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7581 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7582 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7583 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7584 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7585 }
7586 else
7587 {
7588 tree new_var2 = new_var;
7589 tree ref2 = ref;
7590 if (is_truth_op)
7591 {
7592 tree zero = build_zero_cst (TREE_TYPE (new_var));
7593 new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7594 boolean_type_node, new_var, zero);
7595 ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7596 ref, zero);
7597 }
7598 x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7599 if (is_truth_op)
7600 x = fold_convert (TREE_TYPE (new_var), x);
7601 ref = build_outer_var_ref (var, ctx);
7602 gimplify_assign (ref, x, &sub_seq);
7603 }
7604 }
7605
7606 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7607 0);
7608 gimple_seq_add_stmt (stmt_seqp, stmt);
7609
7610 gimple_seq_add_seq (stmt_seqp, sub_seq);
7611
7612 if (clist)
7613 {
7614 gimple_seq_add_seq (stmt_seqp, *clist);
7615 *clist = NULL;
7616 }
7617
7618 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7619 0);
7620 gimple_seq_add_stmt (stmt_seqp, stmt);
7621 }
7622
7623
7624 /* Generate code to implement the COPYPRIVATE clauses. */
7625
7626 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)7627 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7628 omp_context *ctx)
7629 {
7630 tree c;
7631
7632 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7633 {
7634 tree var, new_var, ref, x;
7635 bool by_ref;
7636 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7637
7638 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7639 continue;
7640
7641 var = OMP_CLAUSE_DECL (c);
7642 by_ref = use_pointer_for_field (var, NULL);
7643
7644 ref = build_sender_ref (var, ctx);
7645 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7646 if (by_ref)
7647 {
7648 x = build_fold_addr_expr_loc (clause_loc, new_var);
7649 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7650 }
7651 gimplify_assign (ref, x, slist);
7652
7653 ref = build_receiver_ref (var, false, ctx);
7654 if (by_ref)
7655 {
7656 ref = fold_convert_loc (clause_loc,
7657 build_pointer_type (TREE_TYPE (new_var)),
7658 ref);
7659 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7660 }
7661 if (omp_is_reference (var))
7662 {
7663 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7664 ref = build_simple_mem_ref_loc (clause_loc, ref);
7665 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7666 }
7667 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7668 gimplify_and_add (x, rlist);
7669 }
7670 }
7671
7672
7673 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7674 and REDUCTION from the sender (aka parent) side. */
7675
7676 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7677 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7678 omp_context *ctx)
7679 {
7680 tree c, t;
7681 int ignored_looptemp = 0;
7682 bool is_taskloop = false;
7683
7684 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7685 by GOMP_taskloop. */
7686 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7687 {
7688 ignored_looptemp = 2;
7689 is_taskloop = true;
7690 }
7691
7692 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7693 {
7694 tree val, ref, x, var;
7695 bool by_ref, do_in = false, do_out = false;
7696 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7697
7698 switch (OMP_CLAUSE_CODE (c))
7699 {
7700 case OMP_CLAUSE_PRIVATE:
7701 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7702 break;
7703 continue;
7704 case OMP_CLAUSE_FIRSTPRIVATE:
7705 case OMP_CLAUSE_COPYIN:
7706 case OMP_CLAUSE_LASTPRIVATE:
7707 case OMP_CLAUSE_IN_REDUCTION:
7708 case OMP_CLAUSE__REDUCTEMP_:
7709 break;
7710 case OMP_CLAUSE_REDUCTION:
7711 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7712 continue;
7713 break;
7714 case OMP_CLAUSE_SHARED:
7715 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7716 break;
7717 continue;
7718 case OMP_CLAUSE__LOOPTEMP_:
7719 if (ignored_looptemp)
7720 {
7721 ignored_looptemp--;
7722 continue;
7723 }
7724 break;
7725 default:
7726 continue;
7727 }
7728
7729 val = OMP_CLAUSE_DECL (c);
7730 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7731 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7732 && TREE_CODE (val) == MEM_REF)
7733 {
7734 val = TREE_OPERAND (val, 0);
7735 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7736 val = TREE_OPERAND (val, 0);
7737 if (TREE_CODE (val) == INDIRECT_REF
7738 || TREE_CODE (val) == ADDR_EXPR)
7739 val = TREE_OPERAND (val, 0);
7740 if (is_variable_sized (val))
7741 continue;
7742 }
7743
7744 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7745 outer taskloop region. */
7746 omp_context *ctx_for_o = ctx;
7747 if (is_taskloop
7748 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7749 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7750 ctx_for_o = ctx->outer;
7751
7752 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7753
7754 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7755 && is_global_var (var)
7756 && (val == OMP_CLAUSE_DECL (c)
7757 || !is_task_ctx (ctx)
7758 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7759 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7760 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7761 != POINTER_TYPE)))))
7762 continue;
7763
7764 t = omp_member_access_dummy_var (var);
7765 if (t)
7766 {
7767 var = DECL_VALUE_EXPR (var);
7768 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7769 if (o != t)
7770 var = unshare_and_remap (var, t, o);
7771 else
7772 var = unshare_expr (var);
7773 }
7774
7775 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7776 {
7777 /* Handle taskloop firstprivate/lastprivate, where the
7778 lastprivate on GIMPLE_OMP_TASK is represented as
7779 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7780 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7781 x = omp_build_component_ref (ctx->sender_decl, f);
7782 if (use_pointer_for_field (val, ctx))
7783 var = build_fold_addr_expr (var);
7784 gimplify_assign (x, var, ilist);
7785 DECL_ABSTRACT_ORIGIN (f) = NULL;
7786 continue;
7787 }
7788
7789 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7790 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7791 || val == OMP_CLAUSE_DECL (c))
7792 && is_variable_sized (val))
7793 continue;
7794 by_ref = use_pointer_for_field (val, NULL);
7795
7796 switch (OMP_CLAUSE_CODE (c))
7797 {
7798 case OMP_CLAUSE_FIRSTPRIVATE:
7799 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7800 && !by_ref
7801 && is_task_ctx (ctx))
7802 TREE_NO_WARNING (var) = 1;
7803 do_in = true;
7804 break;
7805
7806 case OMP_CLAUSE_PRIVATE:
7807 case OMP_CLAUSE_COPYIN:
7808 case OMP_CLAUSE__LOOPTEMP_:
7809 case OMP_CLAUSE__REDUCTEMP_:
7810 do_in = true;
7811 break;
7812
7813 case OMP_CLAUSE_LASTPRIVATE:
7814 if (by_ref || omp_is_reference (val))
7815 {
7816 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7817 continue;
7818 do_in = true;
7819 }
7820 else
7821 {
7822 do_out = true;
7823 if (lang_hooks.decls.omp_private_outer_ref (val))
7824 do_in = true;
7825 }
7826 break;
7827
7828 case OMP_CLAUSE_REDUCTION:
7829 case OMP_CLAUSE_IN_REDUCTION:
7830 do_in = true;
7831 if (val == OMP_CLAUSE_DECL (c))
7832 {
7833 if (is_task_ctx (ctx))
7834 by_ref = use_pointer_for_field (val, ctx);
7835 else
7836 do_out = !(by_ref || omp_is_reference (val));
7837 }
7838 else
7839 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7840 break;
7841
7842 default:
7843 gcc_unreachable ();
7844 }
7845
7846 if (do_in)
7847 {
7848 ref = build_sender_ref (val, ctx);
7849 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7850 gimplify_assign (ref, x, ilist);
7851 if (is_task_ctx (ctx))
7852 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7853 }
7854
7855 if (do_out)
7856 {
7857 ref = build_sender_ref (val, ctx);
7858 gimplify_assign (var, ref, olist);
7859 }
7860 }
7861 }
7862
7863 /* Generate code to implement SHARED from the sender (aka parent)
7864 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7865 list things that got automatically shared. */
7866
7867 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7868 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7869 {
7870 tree var, ovar, nvar, t, f, x, record_type;
7871
7872 if (ctx->record_type == NULL)
7873 return;
7874
7875 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7876 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7877 {
7878 ovar = DECL_ABSTRACT_ORIGIN (f);
7879 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7880 continue;
7881
7882 nvar = maybe_lookup_decl (ovar, ctx);
7883 if (!nvar
7884 || !DECL_HAS_VALUE_EXPR_P (nvar)
7885 || (ctx->allocate_map
7886 && ctx->allocate_map->get (ovar)))
7887 continue;
7888
7889 /* If CTX is a nested parallel directive. Find the immediately
7890 enclosing parallel or workshare construct that contains a
7891 mapping for OVAR. */
7892 var = lookup_decl_in_outer_ctx (ovar, ctx);
7893
7894 t = omp_member_access_dummy_var (var);
7895 if (t)
7896 {
7897 var = DECL_VALUE_EXPR (var);
7898 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7899 if (o != t)
7900 var = unshare_and_remap (var, t, o);
7901 else
7902 var = unshare_expr (var);
7903 }
7904
7905 if (use_pointer_for_field (ovar, ctx))
7906 {
7907 x = build_sender_ref (ovar, ctx);
7908 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7909 && TREE_TYPE (f) == TREE_TYPE (ovar))
7910 {
7911 gcc_assert (is_parallel_ctx (ctx)
7912 && DECL_ARTIFICIAL (ovar));
7913 /* _condtemp_ clause. */
7914 var = build_constructor (TREE_TYPE (x), NULL);
7915 }
7916 else
7917 var = build_fold_addr_expr (var);
7918 gimplify_assign (x, var, ilist);
7919 }
7920 else
7921 {
7922 x = build_sender_ref (ovar, ctx);
7923 gimplify_assign (x, var, ilist);
7924
7925 if (!TREE_READONLY (var)
7926 /* We don't need to receive a new reference to a result
7927 or parm decl. In fact we may not store to it as we will
7928 invalidate any pending RSO and generate wrong gimple
7929 during inlining. */
7930 && !((TREE_CODE (var) == RESULT_DECL
7931 || TREE_CODE (var) == PARM_DECL)
7932 && DECL_BY_REFERENCE (var)))
7933 {
7934 x = build_sender_ref (ovar, ctx);
7935 gimplify_assign (var, x, olist);
7936 }
7937 }
7938 }
7939 }
7940
7941 /* Emit an OpenACC head marker call, encapulating the partitioning and
7942 other information that must be processed by the target compiler.
7943 Return the maximum number of dimensions the associated loop might
7944 be partitioned over. */
7945
7946 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)7947 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7948 gimple_seq *seq, omp_context *ctx)
7949 {
7950 unsigned levels = 0;
7951 unsigned tag = 0;
7952 tree gang_static = NULL_TREE;
7953 auto_vec<tree, 5> args;
7954
7955 args.quick_push (build_int_cst
7956 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7957 args.quick_push (ddvar);
7958 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7959 {
7960 switch (OMP_CLAUSE_CODE (c))
7961 {
7962 case OMP_CLAUSE_GANG:
7963 tag |= OLF_DIM_GANG;
7964 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7965 /* static:* is represented by -1, and we can ignore it, as
7966 scheduling is always static. */
7967 if (gang_static && integer_minus_onep (gang_static))
7968 gang_static = NULL_TREE;
7969 levels++;
7970 break;
7971
7972 case OMP_CLAUSE_WORKER:
7973 tag |= OLF_DIM_WORKER;
7974 levels++;
7975 break;
7976
7977 case OMP_CLAUSE_VECTOR:
7978 tag |= OLF_DIM_VECTOR;
7979 levels++;
7980 break;
7981
7982 case OMP_CLAUSE_SEQ:
7983 tag |= OLF_SEQ;
7984 break;
7985
7986 case OMP_CLAUSE_AUTO:
7987 tag |= OLF_AUTO;
7988 break;
7989
7990 case OMP_CLAUSE_INDEPENDENT:
7991 tag |= OLF_INDEPENDENT;
7992 break;
7993
7994 case OMP_CLAUSE_TILE:
7995 tag |= OLF_TILE;
7996 break;
7997
7998 default:
7999 continue;
8000 }
8001 }
8002
8003 if (gang_static)
8004 {
8005 if (DECL_P (gang_static))
8006 gang_static = build_outer_var_ref (gang_static, ctx);
8007 tag |= OLF_GANG_STATIC;
8008 }
8009
8010 omp_context *tgt = enclosing_target_ctx (ctx);
8011 if (!tgt || is_oacc_parallel_or_serial (tgt))
8012 ;
8013 else if (is_oacc_kernels (tgt))
8014 /* Not using this loops handling inside OpenACC 'kernels' regions. */
8015 gcc_unreachable ();
8016 else if (is_oacc_kernels_decomposed_part (tgt))
8017 ;
8018 else
8019 gcc_unreachable ();
8020
8021 /* In a parallel region, loops are implicitly INDEPENDENT. */
8022 if (!tgt || is_oacc_parallel_or_serial (tgt))
8023 tag |= OLF_INDEPENDENT;
8024
8025 /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8026 have an explicit 'seq' or 'independent' clause, and no 'auto' clause. */
8027 if (tgt && is_oacc_kernels_decomposed_part (tgt))
8028 {
8029 gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8030 gcc_assert (!(tag & OLF_AUTO));
8031 }
8032
8033 if (tag & OLF_TILE)
8034 /* Tiling could use all 3 levels. */
8035 levels = 3;
8036 else
8037 {
8038 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8039 Ensure at least one level, or 2 for possible auto
8040 partitioning */
8041 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8042 << OLF_DIM_BASE) | OLF_SEQ));
8043
8044 if (levels < 1u + maybe_auto)
8045 levels = 1u + maybe_auto;
8046 }
8047
8048 args.quick_push (build_int_cst (integer_type_node, levels));
8049 args.quick_push (build_int_cst (integer_type_node, tag));
8050 if (gang_static)
8051 args.quick_push (gang_static);
8052
8053 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8054 gimple_set_location (call, loc);
8055 gimple_set_lhs (call, ddvar);
8056 gimple_seq_add_stmt (seq, call);
8057
8058 return levels;
8059 }
8060
8061 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
8062 partitioning level of the enclosed region. */
8063
8064 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)8065 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8066 tree tofollow, gimple_seq *seq)
8067 {
8068 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8069 : IFN_UNIQUE_OACC_TAIL_MARK);
8070 tree marker = build_int_cst (integer_type_node, marker_kind);
8071 int nargs = 2 + (tofollow != NULL_TREE);
8072 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8073 marker, ddvar, tofollow);
8074 gimple_set_location (call, loc);
8075 gimple_set_lhs (call, ddvar);
8076 gimple_seq_add_stmt (seq, call);
8077 }
8078
8079 /* Generate the before and after OpenACC loop sequences. CLAUSES are
8080 the loop clauses, from which we extract reductions. Initialize
8081 HEAD and TAIL. */
8082
8083 static void
lower_oacc_head_tail(location_t loc,tree clauses,gimple_seq * head,gimple_seq * tail,omp_context * ctx)8084 lower_oacc_head_tail (location_t loc, tree clauses,
8085 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8086 {
8087 bool inner = false;
8088 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8089 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8090
8091 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8092 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8093 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8094
8095 gcc_assert (count);
8096 for (unsigned done = 1; count; count--, done++)
8097 {
8098 gimple_seq fork_seq = NULL;
8099 gimple_seq join_seq = NULL;
8100
8101 tree place = build_int_cst (integer_type_node, -1);
8102 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8103 fork_kind, ddvar, place);
8104 gimple_set_location (fork, loc);
8105 gimple_set_lhs (fork, ddvar);
8106
8107 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8108 join_kind, ddvar, place);
8109 gimple_set_location (join, loc);
8110 gimple_set_lhs (join, ddvar);
8111
8112 /* Mark the beginning of this level sequence. */
8113 if (inner)
8114 lower_oacc_loop_marker (loc, ddvar, true,
8115 build_int_cst (integer_type_node, count),
8116 &fork_seq);
8117 lower_oacc_loop_marker (loc, ddvar, false,
8118 build_int_cst (integer_type_node, done),
8119 &join_seq);
8120
8121 lower_oacc_reductions (loc, clauses, place, inner,
8122 fork, join, &fork_seq, &join_seq, ctx);
8123
8124 /* Append this level to head. */
8125 gimple_seq_add_seq (head, fork_seq);
8126 /* Prepend it to tail. */
8127 gimple_seq_add_seq (&join_seq, *tail);
8128 *tail = join_seq;
8129
8130 inner = true;
8131 }
8132
8133 /* Mark the end of the sequence. */
8134 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8135 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8136 }
8137
8138 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8139 catch handler and return it. This prevents programs from violating the
8140 structured block semantics with throws. */
8141
8142 static gimple_seq
maybe_catch_exception(gimple_seq body)8143 maybe_catch_exception (gimple_seq body)
8144 {
8145 gimple *g;
8146 tree decl;
8147
8148 if (!flag_exceptions)
8149 return body;
8150
8151 if (lang_hooks.eh_protect_cleanup_actions != NULL)
8152 decl = lang_hooks.eh_protect_cleanup_actions ();
8153 else
8154 decl = builtin_decl_explicit (BUILT_IN_TRAP);
8155
8156 g = gimple_build_eh_must_not_throw (decl);
8157 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8158 GIMPLE_TRY_CATCH);
8159
8160 return gimple_seq_alloc_with_stmt (g);
8161 }
8162
8163
8164 /* Routines to lower OMP directives into OMP-GIMPLE. */
8165
8166 /* If ctx is a worksharing context inside of a cancellable parallel
8167 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8168 and conditional branch to parallel's cancel_label to handle
8169 cancellation in the implicit barrier. */
8170
8171 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple * omp_return,gimple_seq * body)8172 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8173 gimple_seq *body)
8174 {
8175 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8176 if (gimple_omp_return_nowait_p (omp_return))
8177 return;
8178 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8179 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8180 && outer->cancellable)
8181 {
8182 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8183 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8184 tree lhs = create_tmp_var (c_bool_type);
8185 gimple_omp_return_set_lhs (omp_return, lhs);
8186 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8187 gimple *g = gimple_build_cond (NE_EXPR, lhs,
8188 fold_convert (c_bool_type,
8189 boolean_false_node),
8190 outer->cancel_label, fallthru_label);
8191 gimple_seq_add_stmt (body, g);
8192 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8193 }
8194 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8195 return;
8196 }
8197
8198 /* Find the first task_reduction or reduction clause or return NULL
8199 if there are none. */
8200
8201 static inline tree
omp_task_reductions_find_first(tree clauses,enum tree_code code,enum omp_clause_code ccode)8202 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8203 enum omp_clause_code ccode)
8204 {
8205 while (1)
8206 {
8207 clauses = omp_find_clause (clauses, ccode);
8208 if (clauses == NULL_TREE)
8209 return NULL_TREE;
8210 if (ccode != OMP_CLAUSE_REDUCTION
8211 || code == OMP_TASKLOOP
8212 || OMP_CLAUSE_REDUCTION_TASK (clauses))
8213 return clauses;
8214 clauses = OMP_CLAUSE_CHAIN (clauses);
8215 }
8216 }
8217
8218 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8219 gimple_seq *, gimple_seq *);
8220
8221 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8222 CTX is the enclosing OMP context for the current statement. */
8223
8224 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)8225 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8226 {
8227 tree block, control;
8228 gimple_stmt_iterator tgsi;
8229 gomp_sections *stmt;
8230 gimple *t;
8231 gbind *new_stmt, *bind;
8232 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8233
8234 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8235
8236 push_gimplify_context ();
8237
8238 dlist = NULL;
8239 ilist = NULL;
8240
8241 tree rclauses
8242 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8243 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8244 tree rtmp = NULL_TREE;
8245 if (rclauses)
8246 {
8247 tree type = build_pointer_type (pointer_sized_int_node);
8248 tree temp = create_tmp_var (type);
8249 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8250 OMP_CLAUSE_DECL (c) = temp;
8251 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8252 gimple_omp_sections_set_clauses (stmt, c);
8253 lower_omp_task_reductions (ctx, OMP_SECTIONS,
8254 gimple_omp_sections_clauses (stmt),
8255 &ilist, &tred_dlist);
8256 rclauses = c;
8257 rtmp = make_ssa_name (type);
8258 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8259 }
8260
8261 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8262 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8263
8264 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8265 &ilist, &dlist, ctx, NULL);
8266
8267 control = create_tmp_var (unsigned_type_node, ".section");
8268 gimple_omp_sections_set_control (stmt, control);
8269
8270 new_body = gimple_omp_body (stmt);
8271 gimple_omp_set_body (stmt, NULL);
8272 tgsi = gsi_start (new_body);
8273 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8274 {
8275 omp_context *sctx;
8276 gimple *sec_start;
8277
8278 sec_start = gsi_stmt (tgsi);
8279 sctx = maybe_lookup_ctx (sec_start);
8280 gcc_assert (sctx);
8281
8282 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8283 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8284 GSI_CONTINUE_LINKING);
8285 gimple_omp_set_body (sec_start, NULL);
8286
8287 if (gsi_one_before_end_p (tgsi))
8288 {
8289 gimple_seq l = NULL;
8290 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8291 &ilist, &l, &clist, ctx);
8292 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8293 gimple_omp_section_set_last (sec_start);
8294 }
8295
8296 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8297 GSI_CONTINUE_LINKING);
8298 }
8299
8300 block = make_node (BLOCK);
8301 bind = gimple_build_bind (NULL, new_body, block);
8302
8303 olist = NULL;
8304 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8305 &clist, ctx);
8306 if (clist)
8307 {
8308 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8309 gcall *g = gimple_build_call (fndecl, 0);
8310 gimple_seq_add_stmt (&olist, g);
8311 gimple_seq_add_seq (&olist, clist);
8312 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8313 g = gimple_build_call (fndecl, 0);
8314 gimple_seq_add_stmt (&olist, g);
8315 }
8316
8317 block = make_node (BLOCK);
8318 new_stmt = gimple_build_bind (NULL, NULL, block);
8319 gsi_replace (gsi_p, new_stmt, true);
8320
8321 pop_gimplify_context (new_stmt);
8322 gimple_bind_append_vars (new_stmt, ctx->block_vars);
8323 BLOCK_VARS (block) = gimple_bind_vars (bind);
8324 if (BLOCK_VARS (block))
8325 TREE_USED (block) = 1;
8326
8327 new_body = NULL;
8328 gimple_seq_add_seq (&new_body, ilist);
8329 gimple_seq_add_stmt (&new_body, stmt);
8330 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8331 gimple_seq_add_stmt (&new_body, bind);
8332
8333 t = gimple_build_omp_continue (control, control);
8334 gimple_seq_add_stmt (&new_body, t);
8335
8336 gimple_seq_add_seq (&new_body, olist);
8337 if (ctx->cancellable)
8338 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8339 gimple_seq_add_seq (&new_body, dlist);
8340
8341 new_body = maybe_catch_exception (new_body);
8342
8343 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8344 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8345 t = gimple_build_omp_return (nowait);
8346 gimple_seq_add_stmt (&new_body, t);
8347 gimple_seq_add_seq (&new_body, tred_dlist);
8348 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8349
8350 if (rclauses)
8351 OMP_CLAUSE_DECL (rclauses) = rtmp;
8352
8353 gimple_bind_set_body (new_stmt, new_body);
8354 }
8355
8356
8357 /* A subroutine of lower_omp_single. Expand the simple form of
8358 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8359
8360 if (GOMP_single_start ())
8361 BODY;
8362 [ GOMP_barrier (); ] -> unless 'nowait' is present.
8363
8364 FIXME. It may be better to delay expanding the logic of this until
8365 pass_expand_omp. The expanded logic may make the job more difficult
8366 to a synchronization analysis pass. */
8367
8368 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)8369 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8370 {
8371 location_t loc = gimple_location (single_stmt);
8372 tree tlabel = create_artificial_label (loc);
8373 tree flabel = create_artificial_label (loc);
8374 gimple *call, *cond;
8375 tree lhs, decl;
8376
8377 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8378 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8379 call = gimple_build_call (decl, 0);
8380 gimple_call_set_lhs (call, lhs);
8381 gimple_seq_add_stmt (pre_p, call);
8382
8383 cond = gimple_build_cond (EQ_EXPR, lhs,
8384 fold_convert_loc (loc, TREE_TYPE (lhs),
8385 boolean_true_node),
8386 tlabel, flabel);
8387 gimple_seq_add_stmt (pre_p, cond);
8388 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8389 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8390 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8391 }
8392
8393
8394 /* A subroutine of lower_omp_single. Expand the simple form of
8395 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8396
8397 #pragma omp single copyprivate (a, b, c)
8398
8399 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8400
8401 {
8402 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8403 {
8404 BODY;
8405 copyout.a = a;
8406 copyout.b = b;
8407 copyout.c = c;
8408 GOMP_single_copy_end (©out);
8409 }
8410 else
8411 {
8412 a = copyout_p->a;
8413 b = copyout_p->b;
8414 c = copyout_p->c;
8415 }
8416 GOMP_barrier ();
8417 }
8418
8419 FIXME. It may be better to delay expanding the logic of this until
8420 pass_expand_omp. The expanded logic may make the job more difficult
8421 to a synchronization analysis pass. */
8422
8423 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)8424 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8425 omp_context *ctx)
8426 {
8427 tree ptr_type, t, l0, l1, l2, bfn_decl;
8428 gimple_seq copyin_seq;
8429 location_t loc = gimple_location (single_stmt);
8430
8431 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8432
8433 ptr_type = build_pointer_type (ctx->record_type);
8434 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8435
8436 l0 = create_artificial_label (loc);
8437 l1 = create_artificial_label (loc);
8438 l2 = create_artificial_label (loc);
8439
8440 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8441 t = build_call_expr_loc (loc, bfn_decl, 0);
8442 t = fold_convert_loc (loc, ptr_type, t);
8443 gimplify_assign (ctx->receiver_decl, t, pre_p);
8444
8445 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8446 build_int_cst (ptr_type, 0));
8447 t = build3 (COND_EXPR, void_type_node, t,
8448 build_and_jump (&l0), build_and_jump (&l1));
8449 gimplify_and_add (t, pre_p);
8450
8451 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8452
8453 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8454
8455 copyin_seq = NULL;
8456 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8457 ©in_seq, ctx);
8458
8459 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8460 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8461 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8462 gimplify_and_add (t, pre_p);
8463
8464 t = build_and_jump (&l2);
8465 gimplify_and_add (t, pre_p);
8466
8467 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8468
8469 gimple_seq_add_seq (pre_p, copyin_seq);
8470
8471 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8472 }
8473
8474
8475 /* Expand code for an OpenMP single directive. */
8476
8477 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)8478 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8479 {
8480 tree block;
8481 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8482 gbind *bind;
8483 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8484
8485 push_gimplify_context ();
8486
8487 block = make_node (BLOCK);
8488 bind = gimple_build_bind (NULL, NULL, block);
8489 gsi_replace (gsi_p, bind, true);
8490 bind_body = NULL;
8491 dlist = NULL;
8492 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8493 &bind_body, &dlist, ctx, NULL);
8494 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8495
8496 gimple_seq_add_stmt (&bind_body, single_stmt);
8497
8498 if (ctx->record_type)
8499 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8500 else
8501 lower_omp_single_simple (single_stmt, &bind_body);
8502
8503 gimple_omp_set_body (single_stmt, NULL);
8504
8505 gimple_seq_add_seq (&bind_body, dlist);
8506
8507 bind_body = maybe_catch_exception (bind_body);
8508
8509 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8510 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8511 gimple *g = gimple_build_omp_return (nowait);
8512 gimple_seq_add_stmt (&bind_body_tail, g);
8513 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8514 if (ctx->record_type)
8515 {
8516 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8517 tree clobber = build_clobber (ctx->record_type);
8518 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8519 clobber), GSI_SAME_STMT);
8520 }
8521 gimple_seq_add_seq (&bind_body, bind_body_tail);
8522 gimple_bind_set_body (bind, bind_body);
8523
8524 pop_gimplify_context (bind);
8525
8526 gimple_bind_append_vars (bind, ctx->block_vars);
8527 BLOCK_VARS (block) = ctx->block_vars;
8528 if (BLOCK_VARS (block))
8529 TREE_USED (block) = 1;
8530 }
8531
8532
8533 /* Expand code for an OpenMP master directive. */
8534
8535 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)8536 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8537 {
8538 tree block, lab = NULL, x, bfn_decl;
8539 gimple *stmt = gsi_stmt (*gsi_p);
8540 gbind *bind;
8541 location_t loc = gimple_location (stmt);
8542 gimple_seq tseq;
8543
8544 push_gimplify_context ();
8545
8546 block = make_node (BLOCK);
8547 bind = gimple_build_bind (NULL, NULL, block);
8548 gsi_replace (gsi_p, bind, true);
8549 gimple_bind_add_stmt (bind, stmt);
8550
8551 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8552 x = build_call_expr_loc (loc, bfn_decl, 0);
8553 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8554 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8555 tseq = NULL;
8556 gimplify_and_add (x, &tseq);
8557 gimple_bind_add_seq (bind, tseq);
8558
8559 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8560 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8561 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8562 gimple_omp_set_body (stmt, NULL);
8563
8564 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8565
8566 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8567
8568 pop_gimplify_context (bind);
8569
8570 gimple_bind_append_vars (bind, ctx->block_vars);
8571 BLOCK_VARS (block) = ctx->block_vars;
8572 }
8573
8574 /* Helper function for lower_omp_task_reductions. For a specific PASS
8575 find out the current clause it should be processed, or return false
8576 if all have been processed already. */
8577
8578 static inline bool
omp_task_reduction_iterate(int pass,enum tree_code code,enum omp_clause_code ccode,tree * c,tree * decl,tree * type,tree * next)8579 omp_task_reduction_iterate (int pass, enum tree_code code,
8580 enum omp_clause_code ccode, tree *c, tree *decl,
8581 tree *type, tree *next)
8582 {
8583 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8584 {
8585 if (ccode == OMP_CLAUSE_REDUCTION
8586 && code != OMP_TASKLOOP
8587 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8588 continue;
8589 *decl = OMP_CLAUSE_DECL (*c);
8590 *type = TREE_TYPE (*decl);
8591 if (TREE_CODE (*decl) == MEM_REF)
8592 {
8593 if (pass != 1)
8594 continue;
8595 }
8596 else
8597 {
8598 if (omp_is_reference (*decl))
8599 *type = TREE_TYPE (*type);
8600 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8601 continue;
8602 }
8603 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8604 return true;
8605 }
8606 *decl = NULL_TREE;
8607 *type = NULL_TREE;
8608 *next = NULL_TREE;
8609 return false;
8610 }
8611
8612 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8613 OMP_TASKGROUP only with task modifier). Register mapping of those in
8614 START sequence and reducing them and unregister them in the END sequence. */
8615
8616 static void
lower_omp_task_reductions(omp_context * ctx,enum tree_code code,tree clauses,gimple_seq * start,gimple_seq * end)8617 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8618 gimple_seq *start, gimple_seq *end)
8619 {
8620 enum omp_clause_code ccode
8621 = (code == OMP_TASKGROUP
8622 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8623 tree cancellable = NULL_TREE;
8624 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8625 if (clauses == NULL_TREE)
8626 return;
8627 if (code == OMP_FOR || code == OMP_SECTIONS)
8628 {
8629 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8630 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8631 && outer->cancellable)
8632 {
8633 cancellable = error_mark_node;
8634 break;
8635 }
8636 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8637 break;
8638 }
8639 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8640 tree *last = &TYPE_FIELDS (record_type);
8641 unsigned cnt = 0;
8642 if (cancellable)
8643 {
8644 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8645 ptr_type_node);
8646 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8647 integer_type_node);
8648 *last = field;
8649 DECL_CHAIN (field) = ifield;
8650 last = &DECL_CHAIN (ifield);
8651 DECL_CONTEXT (field) = record_type;
8652 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8653 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8654 DECL_CONTEXT (ifield) = record_type;
8655 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8656 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8657 }
8658 for (int pass = 0; pass < 2; pass++)
8659 {
8660 tree decl, type, next;
8661 for (tree c = clauses;
8662 omp_task_reduction_iterate (pass, code, ccode,
8663 &c, &decl, &type, &next); c = next)
8664 {
8665 ++cnt;
8666 tree new_type = type;
8667 if (ctx->outer)
8668 new_type = remap_type (type, &ctx->outer->cb);
8669 tree field
8670 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8671 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8672 new_type);
8673 if (DECL_P (decl) && type == TREE_TYPE (decl))
8674 {
8675 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8676 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8677 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8678 }
8679 else
8680 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8681 DECL_CONTEXT (field) = record_type;
8682 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8683 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8684 *last = field;
8685 last = &DECL_CHAIN (field);
8686 tree bfield
8687 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8688 boolean_type_node);
8689 DECL_CONTEXT (bfield) = record_type;
8690 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8691 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8692 *last = bfield;
8693 last = &DECL_CHAIN (bfield);
8694 }
8695 }
8696 *last = NULL_TREE;
8697 layout_type (record_type);
8698
8699 /* Build up an array which registers with the runtime all the reductions
8700 and deregisters them at the end. Format documented in libgomp/task.c. */
8701 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8702 tree avar = create_tmp_var_raw (atype);
8703 gimple_add_tmp_var (avar);
8704 TREE_ADDRESSABLE (avar) = 1;
8705 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8706 NULL_TREE, NULL_TREE);
8707 tree t = build_int_cst (pointer_sized_int_node, cnt);
8708 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8709 gimple_seq seq = NULL;
8710 tree sz = fold_convert (pointer_sized_int_node,
8711 TYPE_SIZE_UNIT (record_type));
8712 int cachesz = 64;
8713 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8714 build_int_cst (pointer_sized_int_node, cachesz - 1));
8715 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8716 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8717 ctx->task_reductions.create (1 + cnt);
8718 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8719 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8720 ? sz : NULL_TREE);
8721 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8722 gimple_seq_add_seq (start, seq);
8723 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8724 NULL_TREE, NULL_TREE);
8725 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8726 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8727 NULL_TREE, NULL_TREE);
8728 t = build_int_cst (pointer_sized_int_node,
8729 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8730 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8731 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8732 NULL_TREE, NULL_TREE);
8733 t = build_int_cst (pointer_sized_int_node, -1);
8734 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8735 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8736 NULL_TREE, NULL_TREE);
8737 t = build_int_cst (pointer_sized_int_node, 0);
8738 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8739
8740 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8741 and for each task reduction checks a bool right after the private variable
8742 within that thread's chunk; if the bool is clear, it hasn't been
8743 initialized and thus isn't going to be reduced nor destructed, otherwise
8744 reduce and destruct it. */
8745 tree idx = create_tmp_var (size_type_node);
8746 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8747 tree num_thr_sz = create_tmp_var (size_type_node);
8748 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8749 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8750 tree lab3 = NULL_TREE, lab7 = NULL_TREE;
8751 gimple *g;
8752 if (code == OMP_FOR || code == OMP_SECTIONS)
8753 {
8754 /* For worksharing constructs, only perform it in the master thread,
8755 with the exception of cancelled implicit barriers - then only handle
8756 the current thread. */
8757 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8758 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8759 tree thr_num = create_tmp_var (integer_type_node);
8760 g = gimple_build_call (t, 0);
8761 gimple_call_set_lhs (g, thr_num);
8762 gimple_seq_add_stmt (end, g);
8763 if (cancellable)
8764 {
8765 tree c;
8766 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8767 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8768 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8769 if (code == OMP_FOR)
8770 c = gimple_omp_for_clauses (ctx->stmt);
8771 else /* if (code == OMP_SECTIONS) */
8772 c = gimple_omp_sections_clauses (ctx->stmt);
8773 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8774 cancellable = c;
8775 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8776 lab5, lab6);
8777 gimple_seq_add_stmt (end, g);
8778 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8779 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8780 gimple_seq_add_stmt (end, g);
8781 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8782 build_one_cst (TREE_TYPE (idx)));
8783 gimple_seq_add_stmt (end, g);
8784 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8785 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8786 }
8787 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8788 gimple_seq_add_stmt (end, g);
8789 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8790 }
8791 if (code != OMP_PARALLEL)
8792 {
8793 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8794 tree num_thr = create_tmp_var (integer_type_node);
8795 g = gimple_build_call (t, 0);
8796 gimple_call_set_lhs (g, num_thr);
8797 gimple_seq_add_stmt (end, g);
8798 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8799 gimple_seq_add_stmt (end, g);
8800 if (cancellable)
8801 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8802 }
8803 else
8804 {
8805 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8806 OMP_CLAUSE__REDUCTEMP_);
8807 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8808 t = fold_convert (size_type_node, t);
8809 gimplify_assign (num_thr_sz, t, end);
8810 }
8811 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8812 NULL_TREE, NULL_TREE);
8813 tree data = create_tmp_var (pointer_sized_int_node);
8814 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8815 if (code == OMP_TASKLOOP)
8816 {
8817 lab7 = create_artificial_label (UNKNOWN_LOCATION);
8818 g = gimple_build_cond (NE_EXPR, data,
8819 build_zero_cst (pointer_sized_int_node),
8820 lab1, lab7);
8821 gimple_seq_add_stmt (end, g);
8822 }
8823 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8824 tree ptr;
8825 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8826 ptr = create_tmp_var (build_pointer_type (record_type));
8827 else
8828 ptr = create_tmp_var (ptr_type_node);
8829 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8830
8831 tree field = TYPE_FIELDS (record_type);
8832 cnt = 0;
8833 if (cancellable)
8834 field = DECL_CHAIN (DECL_CHAIN (field));
8835 for (int pass = 0; pass < 2; pass++)
8836 {
8837 tree decl, type, next;
8838 for (tree c = clauses;
8839 omp_task_reduction_iterate (pass, code, ccode,
8840 &c, &decl, &type, &next); c = next)
8841 {
8842 tree var = decl, ref;
8843 if (TREE_CODE (decl) == MEM_REF)
8844 {
8845 var = TREE_OPERAND (var, 0);
8846 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8847 var = TREE_OPERAND (var, 0);
8848 tree v = var;
8849 if (TREE_CODE (var) == ADDR_EXPR)
8850 var = TREE_OPERAND (var, 0);
8851 else if (TREE_CODE (var) == INDIRECT_REF)
8852 var = TREE_OPERAND (var, 0);
8853 tree orig_var = var;
8854 if (is_variable_sized (var))
8855 {
8856 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8857 var = DECL_VALUE_EXPR (var);
8858 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8859 var = TREE_OPERAND (var, 0);
8860 gcc_assert (DECL_P (var));
8861 }
8862 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8863 if (orig_var != var)
8864 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8865 else if (TREE_CODE (v) == ADDR_EXPR)
8866 t = build_fold_addr_expr (t);
8867 else if (TREE_CODE (v) == INDIRECT_REF)
8868 t = build_fold_indirect_ref (t);
8869 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8870 {
8871 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8872 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8873 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8874 }
8875 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8876 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8877 fold_convert (size_type_node,
8878 TREE_OPERAND (decl, 1)));
8879 }
8880 else
8881 {
8882 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8883 if (!omp_is_reference (decl))
8884 t = build_fold_addr_expr (t);
8885 }
8886 t = fold_convert (pointer_sized_int_node, t);
8887 seq = NULL;
8888 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8889 gimple_seq_add_seq (start, seq);
8890 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8891 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8892 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8893 t = unshare_expr (byte_position (field));
8894 t = fold_convert (pointer_sized_int_node, t);
8895 ctx->task_reduction_map->put (c, cnt);
8896 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8897 ? t : NULL_TREE);
8898 seq = NULL;
8899 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8900 gimple_seq_add_seq (start, seq);
8901 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8902 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8903 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8904
8905 tree bfield = DECL_CHAIN (field);
8906 tree cond;
8907 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8908 /* In parallel or worksharing all threads unconditionally
8909 initialize all their task reduction private variables. */
8910 cond = boolean_true_node;
8911 else if (TREE_TYPE (ptr) == ptr_type_node)
8912 {
8913 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8914 unshare_expr (byte_position (bfield)));
8915 seq = NULL;
8916 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8917 gimple_seq_add_seq (end, seq);
8918 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8919 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8920 build_int_cst (pbool, 0));
8921 }
8922 else
8923 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8924 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8925 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8926 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8927 tree condv = create_tmp_var (boolean_type_node);
8928 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8929 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8930 lab3, lab4);
8931 gimple_seq_add_stmt (end, g);
8932 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8933 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8934 {
8935 /* If this reduction doesn't need destruction and parallel
8936 has been cancelled, there is nothing to do for this
8937 reduction, so jump around the merge operation. */
8938 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8939 g = gimple_build_cond (NE_EXPR, cancellable,
8940 build_zero_cst (TREE_TYPE (cancellable)),
8941 lab4, lab5);
8942 gimple_seq_add_stmt (end, g);
8943 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8944 }
8945
8946 tree new_var;
8947 if (TREE_TYPE (ptr) == ptr_type_node)
8948 {
8949 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8950 unshare_expr (byte_position (field)));
8951 seq = NULL;
8952 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8953 gimple_seq_add_seq (end, seq);
8954 tree pbool = build_pointer_type (TREE_TYPE (field));
8955 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8956 build_int_cst (pbool, 0));
8957 }
8958 else
8959 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8960 build_simple_mem_ref (ptr), field, NULL_TREE);
8961
8962 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8963 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8964 ref = build_simple_mem_ref (ref);
8965 /* reduction(-:var) sums up the partial results, so it acts
8966 identically to reduction(+:var). */
8967 if (rcode == MINUS_EXPR)
8968 rcode = PLUS_EXPR;
8969 if (TREE_CODE (decl) == MEM_REF)
8970 {
8971 tree type = TREE_TYPE (new_var);
8972 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8973 tree i = create_tmp_var (TREE_TYPE (v));
8974 tree ptype = build_pointer_type (TREE_TYPE (type));
8975 if (DECL_P (v))
8976 {
8977 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8978 tree vv = create_tmp_var (TREE_TYPE (v));
8979 gimplify_assign (vv, v, start);
8980 v = vv;
8981 }
8982 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8983 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8984 new_var = build_fold_addr_expr (new_var);
8985 new_var = fold_convert (ptype, new_var);
8986 ref = fold_convert (ptype, ref);
8987 tree m = create_tmp_var (ptype);
8988 gimplify_assign (m, new_var, end);
8989 new_var = m;
8990 m = create_tmp_var (ptype);
8991 gimplify_assign (m, ref, end);
8992 ref = m;
8993 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8994 tree body = create_artificial_label (UNKNOWN_LOCATION);
8995 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8996 gimple_seq_add_stmt (end, gimple_build_label (body));
8997 tree priv = build_simple_mem_ref (new_var);
8998 tree out = build_simple_mem_ref (ref);
8999 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9000 {
9001 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9002 tree decl_placeholder
9003 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9004 tree lab6 = NULL_TREE;
9005 if (cancellable)
9006 {
9007 /* If this reduction needs destruction and parallel
9008 has been cancelled, jump around the merge operation
9009 to the destruction. */
9010 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9011 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9012 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9013 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9014 lab6, lab5);
9015 gimple_seq_add_stmt (end, g);
9016 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9017 }
9018 SET_DECL_VALUE_EXPR (placeholder, out);
9019 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9020 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9021 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9022 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9023 gimple_seq_add_seq (end,
9024 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9025 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9026 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9027 {
9028 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9029 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9030 }
9031 if (cancellable)
9032 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9033 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9034 if (x)
9035 {
9036 gimple_seq tseq = NULL;
9037 gimplify_stmt (&x, &tseq);
9038 gimple_seq_add_seq (end, tseq);
9039 }
9040 }
9041 else
9042 {
9043 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9044 out = unshare_expr (out);
9045 gimplify_assign (out, x, end);
9046 }
9047 gimple *g
9048 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9049 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9050 gimple_seq_add_stmt (end, g);
9051 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9052 TYPE_SIZE_UNIT (TREE_TYPE (type)));
9053 gimple_seq_add_stmt (end, g);
9054 g = gimple_build_assign (i, PLUS_EXPR, i,
9055 build_int_cst (TREE_TYPE (i), 1));
9056 gimple_seq_add_stmt (end, g);
9057 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9058 gimple_seq_add_stmt (end, g);
9059 gimple_seq_add_stmt (end, gimple_build_label (endl));
9060 }
9061 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9062 {
9063 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9064 tree oldv = NULL_TREE;
9065 tree lab6 = NULL_TREE;
9066 if (cancellable)
9067 {
9068 /* If this reduction needs destruction and parallel
9069 has been cancelled, jump around the merge operation
9070 to the destruction. */
9071 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9072 lab6 = create_artificial_label (UNKNOWN_LOCATION);
9073 tree zero = build_zero_cst (TREE_TYPE (cancellable));
9074 g = gimple_build_cond (NE_EXPR, cancellable, zero,
9075 lab6, lab5);
9076 gimple_seq_add_stmt (end, g);
9077 gimple_seq_add_stmt (end, gimple_build_label (lab5));
9078 }
9079 if (omp_is_reference (decl)
9080 && !useless_type_conversion_p (TREE_TYPE (placeholder),
9081 TREE_TYPE (ref)))
9082 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9083 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9084 tree refv = create_tmp_var (TREE_TYPE (ref));
9085 gimplify_assign (refv, ref, end);
9086 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9087 SET_DECL_VALUE_EXPR (placeholder, ref);
9088 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9089 tree d = maybe_lookup_decl (decl, ctx);
9090 gcc_assert (d);
9091 if (DECL_HAS_VALUE_EXPR_P (d))
9092 oldv = DECL_VALUE_EXPR (d);
9093 if (omp_is_reference (var))
9094 {
9095 tree v = fold_convert (TREE_TYPE (d),
9096 build_fold_addr_expr (new_var));
9097 SET_DECL_VALUE_EXPR (d, v);
9098 }
9099 else
9100 SET_DECL_VALUE_EXPR (d, new_var);
9101 DECL_HAS_VALUE_EXPR_P (d) = 1;
9102 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9103 if (oldv)
9104 SET_DECL_VALUE_EXPR (d, oldv);
9105 else
9106 {
9107 SET_DECL_VALUE_EXPR (d, NULL_TREE);
9108 DECL_HAS_VALUE_EXPR_P (d) = 0;
9109 }
9110 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9111 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9112 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9113 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9114 if (cancellable)
9115 gimple_seq_add_stmt (end, gimple_build_label (lab6));
9116 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9117 if (x)
9118 {
9119 gimple_seq tseq = NULL;
9120 gimplify_stmt (&x, &tseq);
9121 gimple_seq_add_seq (end, tseq);
9122 }
9123 }
9124 else
9125 {
9126 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9127 ref = unshare_expr (ref);
9128 gimplify_assign (ref, x, end);
9129 }
9130 gimple_seq_add_stmt (end, gimple_build_label (lab4));
9131 ++cnt;
9132 field = DECL_CHAIN (bfield);
9133 }
9134 }
9135
9136 if (code == OMP_TASKGROUP)
9137 {
9138 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9139 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9140 gimple_seq_add_stmt (start, g);
9141 }
9142 else
9143 {
9144 tree c;
9145 if (code == OMP_FOR)
9146 c = gimple_omp_for_clauses (ctx->stmt);
9147 else if (code == OMP_SECTIONS)
9148 c = gimple_omp_sections_clauses (ctx->stmt);
9149 else
9150 c = gimple_omp_taskreg_clauses (ctx->stmt);
9151 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9152 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9153 build_fold_addr_expr (avar));
9154 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9155 }
9156
9157 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9158 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9159 size_one_node));
9160 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9161 gimple_seq_add_stmt (end, g);
9162 gimple_seq_add_stmt (end, gimple_build_label (lab2));
9163 if (code == OMP_FOR || code == OMP_SECTIONS)
9164 {
9165 enum built_in_function bfn
9166 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9167 t = builtin_decl_explicit (bfn);
9168 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9169 tree arg;
9170 if (cancellable)
9171 {
9172 arg = create_tmp_var (c_bool_type);
9173 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9174 cancellable));
9175 }
9176 else
9177 arg = build_int_cst (c_bool_type, 0);
9178 g = gimple_build_call (t, 1, arg);
9179 }
9180 else
9181 {
9182 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9183 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9184 }
9185 gimple_seq_add_stmt (end, g);
9186 if (lab7)
9187 gimple_seq_add_stmt (end, gimple_build_label (lab7));
9188 t = build_constructor (atype, NULL);
9189 TREE_THIS_VOLATILE (t) = 1;
9190 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9191 }
9192
9193 /* Expand code for an OpenMP taskgroup directive. */
9194
9195 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)9196 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9197 {
9198 gimple *stmt = gsi_stmt (*gsi_p);
9199 gcall *x;
9200 gbind *bind;
9201 gimple_seq dseq = NULL;
9202 tree block = make_node (BLOCK);
9203
9204 bind = gimple_build_bind (NULL, NULL, block);
9205 gsi_replace (gsi_p, bind, true);
9206 gimple_bind_add_stmt (bind, stmt);
9207
9208 push_gimplify_context ();
9209
9210 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9211 0);
9212 gimple_bind_add_stmt (bind, x);
9213
9214 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9215 gimple_omp_taskgroup_clauses (stmt),
9216 gimple_bind_body_ptr (bind), &dseq);
9217
9218 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9219 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9220 gimple_omp_set_body (stmt, NULL);
9221
9222 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9223 gimple_bind_add_seq (bind, dseq);
9224
9225 pop_gimplify_context (bind);
9226
9227 gimple_bind_append_vars (bind, ctx->block_vars);
9228 BLOCK_VARS (block) = ctx->block_vars;
9229 }
9230
9231
9232 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
9233
9234 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)9235 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9236 omp_context *ctx)
9237 {
9238 struct omp_for_data fd;
9239 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9240 return;
9241
9242 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9243 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9244 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9245 if (!fd.ordered)
9246 return;
9247
9248 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9249 tree c = gimple_omp_ordered_clauses (ord_stmt);
9250 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9251 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9252 {
9253 /* Merge depend clauses from multiple adjacent
9254 #pragma omp ordered depend(sink:...) constructs
9255 into one #pragma omp ordered depend(sink:...), so that
9256 we can optimize them together. */
9257 gimple_stmt_iterator gsi = *gsi_p;
9258 gsi_next (&gsi);
9259 while (!gsi_end_p (gsi))
9260 {
9261 gimple *stmt = gsi_stmt (gsi);
9262 if (is_gimple_debug (stmt)
9263 || gimple_code (stmt) == GIMPLE_NOP)
9264 {
9265 gsi_next (&gsi);
9266 continue;
9267 }
9268 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9269 break;
9270 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9271 c = gimple_omp_ordered_clauses (ord_stmt2);
9272 if (c == NULL_TREE
9273 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9274 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9275 break;
9276 while (*list_p)
9277 list_p = &OMP_CLAUSE_CHAIN (*list_p);
9278 *list_p = c;
9279 gsi_remove (&gsi, true);
9280 }
9281 }
9282
9283 /* Canonicalize sink dependence clauses into one folded clause if
9284 possible.
9285
9286 The basic algorithm is to create a sink vector whose first
9287 element is the GCD of all the first elements, and whose remaining
9288 elements are the minimum of the subsequent columns.
9289
9290 We ignore dependence vectors whose first element is zero because
9291 such dependencies are known to be executed by the same thread.
9292
9293 We take into account the direction of the loop, so a minimum
9294 becomes a maximum if the loop is iterating forwards. We also
9295 ignore sink clauses where the loop direction is unknown, or where
9296 the offsets are clearly invalid because they are not a multiple
9297 of the loop increment.
9298
9299 For example:
9300
9301 #pragma omp for ordered(2)
9302 for (i=0; i < N; ++i)
9303 for (j=0; j < M; ++j)
9304 {
9305 #pragma omp ordered \
9306 depend(sink:i-8,j-2) \
9307 depend(sink:i,j-1) \ // Completely ignored because i+0.
9308 depend(sink:i-4,j-3) \
9309 depend(sink:i-6,j-4)
9310 #pragma omp ordered depend(source)
9311 }
9312
9313 Folded clause is:
9314
9315 depend(sink:-gcd(8,4,6),-min(2,3,4))
9316 -or-
9317 depend(sink:-2,-2)
9318 */
9319
9320 /* FIXME: Computing GCD's where the first element is zero is
9321 non-trivial in the presence of collapsed loops. Do this later. */
9322 if (fd.collapse > 1)
9323 return;
9324
9325 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9326
9327 /* wide_int is not a POD so it must be default-constructed. */
9328 for (unsigned i = 0; i != 2 * len - 1; ++i)
9329 new (static_cast<void*>(folded_deps + i)) wide_int ();
9330
9331 tree folded_dep = NULL_TREE;
9332 /* TRUE if the first dimension's offset is negative. */
9333 bool neg_offset_p = false;
9334
9335 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9336 unsigned int i;
9337 while ((c = *list_p) != NULL)
9338 {
9339 bool remove = false;
9340
9341 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9342 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9343 goto next_ordered_clause;
9344
9345 tree vec;
9346 for (vec = OMP_CLAUSE_DECL (c), i = 0;
9347 vec && TREE_CODE (vec) == TREE_LIST;
9348 vec = TREE_CHAIN (vec), ++i)
9349 {
9350 gcc_assert (i < len);
9351
9352 /* omp_extract_for_data has canonicalized the condition. */
9353 gcc_assert (fd.loops[i].cond_code == LT_EXPR
9354 || fd.loops[i].cond_code == GT_EXPR);
9355 bool forward = fd.loops[i].cond_code == LT_EXPR;
9356 bool maybe_lexically_later = true;
9357
9358 /* While the committee makes up its mind, bail if we have any
9359 non-constant steps. */
9360 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9361 goto lower_omp_ordered_ret;
9362
9363 tree itype = TREE_TYPE (TREE_VALUE (vec));
9364 if (POINTER_TYPE_P (itype))
9365 itype = sizetype;
9366 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9367 TYPE_PRECISION (itype),
9368 TYPE_SIGN (itype));
9369
9370 /* Ignore invalid offsets that are not multiples of the step. */
9371 if (!wi::multiple_of_p (wi::abs (offset),
9372 wi::abs (wi::to_wide (fd.loops[i].step)),
9373 UNSIGNED))
9374 {
9375 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9376 "ignoring sink clause with offset that is not "
9377 "a multiple of the loop step");
9378 remove = true;
9379 goto next_ordered_clause;
9380 }
9381
9382 /* Calculate the first dimension. The first dimension of
9383 the folded dependency vector is the GCD of the first
9384 elements, while ignoring any first elements whose offset
9385 is 0. */
9386 if (i == 0)
9387 {
9388 /* Ignore dependence vectors whose first dimension is 0. */
9389 if (offset == 0)
9390 {
9391 remove = true;
9392 goto next_ordered_clause;
9393 }
9394 else
9395 {
9396 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9397 {
9398 error_at (OMP_CLAUSE_LOCATION (c),
9399 "first offset must be in opposite direction "
9400 "of loop iterations");
9401 goto lower_omp_ordered_ret;
9402 }
9403 if (forward)
9404 offset = -offset;
9405 neg_offset_p = forward;
9406 /* Initialize the first time around. */
9407 if (folded_dep == NULL_TREE)
9408 {
9409 folded_dep = c;
9410 folded_deps[0] = offset;
9411 }
9412 else
9413 folded_deps[0] = wi::gcd (folded_deps[0],
9414 offset, UNSIGNED);
9415 }
9416 }
9417 /* Calculate minimum for the remaining dimensions. */
9418 else
9419 {
9420 folded_deps[len + i - 1] = offset;
9421 if (folded_dep == c)
9422 folded_deps[i] = offset;
9423 else if (maybe_lexically_later
9424 && !wi::eq_p (folded_deps[i], offset))
9425 {
9426 if (forward ^ wi::gts_p (folded_deps[i], offset))
9427 {
9428 unsigned int j;
9429 folded_dep = c;
9430 for (j = 1; j <= i; j++)
9431 folded_deps[j] = folded_deps[len + j - 1];
9432 }
9433 else
9434 maybe_lexically_later = false;
9435 }
9436 }
9437 }
9438 gcc_assert (i == len);
9439
9440 remove = true;
9441
9442 next_ordered_clause:
9443 if (remove)
9444 *list_p = OMP_CLAUSE_CHAIN (c);
9445 else
9446 list_p = &OMP_CLAUSE_CHAIN (c);
9447 }
9448
9449 if (folded_dep)
9450 {
9451 if (neg_offset_p)
9452 folded_deps[0] = -folded_deps[0];
9453
9454 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9455 if (POINTER_TYPE_P (itype))
9456 itype = sizetype;
9457
9458 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9459 = wide_int_to_tree (itype, folded_deps[0]);
9460 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9461 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9462 }
9463
9464 lower_omp_ordered_ret:
9465
9466 /* Ordered without clauses is #pragma omp threads, while we want
9467 a nop instead if we remove all clauses. */
9468 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9469 gsi_replace (gsi_p, gimple_build_nop (), true);
9470 }
9471
9472
9473 /* Expand code for an OpenMP ordered directive. */
9474
9475 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)9476 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9477 {
9478 tree block;
9479 gimple *stmt = gsi_stmt (*gsi_p), *g;
9480 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9481 gcall *x;
9482 gbind *bind;
9483 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9484 OMP_CLAUSE_SIMD);
9485 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9486 loop. */
9487 bool maybe_simt
9488 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9489 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9490 OMP_CLAUSE_THREADS);
9491
9492 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9493 OMP_CLAUSE_DEPEND))
9494 {
9495 /* FIXME: This is needs to be moved to the expansion to verify various
9496 conditions only testable on cfg with dominators computed, and also
9497 all the depend clauses to be merged still might need to be available
9498 for the runtime checks. */
9499 if (0)
9500 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9501 return;
9502 }
9503
9504 push_gimplify_context ();
9505
9506 block = make_node (BLOCK);
9507 bind = gimple_build_bind (NULL, NULL, block);
9508 gsi_replace (gsi_p, bind, true);
9509 gimple_bind_add_stmt (bind, stmt);
9510
9511 if (simd)
9512 {
9513 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9514 build_int_cst (NULL_TREE, threads));
9515 cfun->has_simduid_loops = true;
9516 }
9517 else
9518 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9519 0);
9520 gimple_bind_add_stmt (bind, x);
9521
9522 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9523 if (maybe_simt)
9524 {
9525 counter = create_tmp_var (integer_type_node);
9526 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9527 gimple_call_set_lhs (g, counter);
9528 gimple_bind_add_stmt (bind, g);
9529
9530 body = create_artificial_label (UNKNOWN_LOCATION);
9531 test = create_artificial_label (UNKNOWN_LOCATION);
9532 gimple_bind_add_stmt (bind, gimple_build_label (body));
9533
9534 tree simt_pred = create_tmp_var (integer_type_node);
9535 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9536 gimple_call_set_lhs (g, simt_pred);
9537 gimple_bind_add_stmt (bind, g);
9538
9539 tree t = create_artificial_label (UNKNOWN_LOCATION);
9540 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9541 gimple_bind_add_stmt (bind, g);
9542
9543 gimple_bind_add_stmt (bind, gimple_build_label (t));
9544 }
9545 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9546 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9547 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9548 gimple_omp_set_body (stmt, NULL);
9549
9550 if (maybe_simt)
9551 {
9552 gimple_bind_add_stmt (bind, gimple_build_label (test));
9553 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9554 gimple_bind_add_stmt (bind, g);
9555
9556 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9557 tree nonneg = create_tmp_var (integer_type_node);
9558 gimple_seq tseq = NULL;
9559 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9560 gimple_bind_add_seq (bind, tseq);
9561
9562 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9563 gimple_call_set_lhs (g, nonneg);
9564 gimple_bind_add_stmt (bind, g);
9565
9566 tree end = create_artificial_label (UNKNOWN_LOCATION);
9567 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9568 gimple_bind_add_stmt (bind, g);
9569
9570 gimple_bind_add_stmt (bind, gimple_build_label (end));
9571 }
9572 if (simd)
9573 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9574 build_int_cst (NULL_TREE, threads));
9575 else
9576 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9577 0);
9578 gimple_bind_add_stmt (bind, x);
9579
9580 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9581
9582 pop_gimplify_context (bind);
9583
9584 gimple_bind_append_vars (bind, ctx->block_vars);
9585 BLOCK_VARS (block) = gimple_bind_vars (bind);
9586 }
9587
9588
9589 /* Expand code for an OpenMP scan directive and the structured block
9590 before the scan directive. */
9591
9592 static void
lower_omp_scan(gimple_stmt_iterator * gsi_p,omp_context * ctx)9593 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9594 {
9595 gimple *stmt = gsi_stmt (*gsi_p);
9596 bool has_clauses
9597 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9598 tree lane = NULL_TREE;
9599 gimple_seq before = NULL;
9600 omp_context *octx = ctx->outer;
9601 gcc_assert (octx);
9602 if (octx->scan_exclusive && !has_clauses)
9603 {
9604 gimple_stmt_iterator gsi2 = *gsi_p;
9605 gsi_next (&gsi2);
9606 gimple *stmt2 = gsi_stmt (gsi2);
9607 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9608 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9609 the one with exclusive clause(s), comes first. */
9610 if (stmt2
9611 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9612 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9613 {
9614 gsi_remove (gsi_p, false);
9615 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9616 ctx = maybe_lookup_ctx (stmt2);
9617 gcc_assert (ctx);
9618 lower_omp_scan (gsi_p, ctx);
9619 return;
9620 }
9621 }
9622
9623 bool input_phase = has_clauses ^ octx->scan_inclusive;
9624 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9625 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9626 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9627 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9628 && !gimple_omp_for_combined_p (octx->stmt));
9629 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9630 if (is_for_simd && octx->for_simd_scan_phase)
9631 is_simd = false;
9632 if (is_simd)
9633 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9634 OMP_CLAUSE__SIMDUID_))
9635 {
9636 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9637 lane = create_tmp_var (unsigned_type_node);
9638 tree t = build_int_cst (integer_type_node,
9639 input_phase ? 1
9640 : octx->scan_inclusive ? 2 : 3);
9641 gimple *g
9642 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9643 gimple_call_set_lhs (g, lane);
9644 gimple_seq_add_stmt (&before, g);
9645 }
9646
9647 if (is_simd || is_for)
9648 {
9649 for (tree c = gimple_omp_for_clauses (octx->stmt);
9650 c; c = OMP_CLAUSE_CHAIN (c))
9651 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9652 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9653 {
9654 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9655 tree var = OMP_CLAUSE_DECL (c);
9656 tree new_var = lookup_decl (var, octx);
9657 tree val = new_var;
9658 tree var2 = NULL_TREE;
9659 tree var3 = NULL_TREE;
9660 tree var4 = NULL_TREE;
9661 tree lane0 = NULL_TREE;
9662 tree new_vard = new_var;
9663 if (omp_is_reference (var))
9664 {
9665 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9666 val = new_var;
9667 }
9668 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9669 {
9670 val = DECL_VALUE_EXPR (new_vard);
9671 if (new_vard != new_var)
9672 {
9673 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9674 val = TREE_OPERAND (val, 0);
9675 }
9676 if (TREE_CODE (val) == ARRAY_REF
9677 && VAR_P (TREE_OPERAND (val, 0)))
9678 {
9679 tree v = TREE_OPERAND (val, 0);
9680 if (lookup_attribute ("omp simd array",
9681 DECL_ATTRIBUTES (v)))
9682 {
9683 val = unshare_expr (val);
9684 lane0 = TREE_OPERAND (val, 1);
9685 TREE_OPERAND (val, 1) = lane;
9686 var2 = lookup_decl (v, octx);
9687 if (octx->scan_exclusive)
9688 var4 = lookup_decl (var2, octx);
9689 if (input_phase
9690 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9691 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9692 if (!input_phase)
9693 {
9694 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9695 var2, lane, NULL_TREE, NULL_TREE);
9696 TREE_THIS_NOTRAP (var2) = 1;
9697 if (octx->scan_exclusive)
9698 {
9699 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9700 var4, lane, NULL_TREE,
9701 NULL_TREE);
9702 TREE_THIS_NOTRAP (var4) = 1;
9703 }
9704 }
9705 else
9706 var2 = val;
9707 }
9708 }
9709 gcc_assert (var2);
9710 }
9711 else
9712 {
9713 var2 = build_outer_var_ref (var, octx);
9714 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9715 {
9716 var3 = maybe_lookup_decl (new_vard, octx);
9717 if (var3 == new_vard || var3 == NULL_TREE)
9718 var3 = NULL_TREE;
9719 else if (is_simd && octx->scan_exclusive && !input_phase)
9720 {
9721 var4 = maybe_lookup_decl (var3, octx);
9722 if (var4 == var3 || var4 == NULL_TREE)
9723 {
9724 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9725 {
9726 var4 = var3;
9727 var3 = NULL_TREE;
9728 }
9729 else
9730 var4 = NULL_TREE;
9731 }
9732 }
9733 }
9734 if (is_simd
9735 && octx->scan_exclusive
9736 && !input_phase
9737 && var4 == NULL_TREE)
9738 var4 = create_tmp_var (TREE_TYPE (val));
9739 }
9740 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9741 {
9742 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9743 if (input_phase)
9744 {
9745 if (var3)
9746 {
9747 /* If we've added a separate identity element
9748 variable, copy it over into val. */
9749 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9750 var3);
9751 gimplify_and_add (x, &before);
9752 }
9753 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9754 {
9755 /* Otherwise, assign to it the identity element. */
9756 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9757 if (is_for)
9758 tseq = copy_gimple_seq_and_replace_locals (tseq);
9759 tree ref = build_outer_var_ref (var, octx);
9760 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9761 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9762 if (x)
9763 {
9764 if (new_vard != new_var)
9765 val = build_fold_addr_expr_loc (clause_loc, val);
9766 SET_DECL_VALUE_EXPR (new_vard, val);
9767 }
9768 SET_DECL_VALUE_EXPR (placeholder, ref);
9769 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9770 lower_omp (&tseq, octx);
9771 if (x)
9772 SET_DECL_VALUE_EXPR (new_vard, x);
9773 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9774 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9775 gimple_seq_add_seq (&before, tseq);
9776 if (is_simd)
9777 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9778 }
9779 }
9780 else if (is_simd)
9781 {
9782 tree x;
9783 if (octx->scan_exclusive)
9784 {
9785 tree v4 = unshare_expr (var4);
9786 tree v2 = unshare_expr (var2);
9787 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9788 gimplify_and_add (x, &before);
9789 }
9790 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9791 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9792 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9793 tree vexpr = val;
9794 if (x && new_vard != new_var)
9795 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9796 if (x)
9797 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9798 SET_DECL_VALUE_EXPR (placeholder, var2);
9799 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9800 lower_omp (&tseq, octx);
9801 gimple_seq_add_seq (&before, tseq);
9802 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9803 if (x)
9804 SET_DECL_VALUE_EXPR (new_vard, x);
9805 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9806 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9807 if (octx->scan_inclusive)
9808 {
9809 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9810 var2);
9811 gimplify_and_add (x, &before);
9812 }
9813 else if (lane0 == NULL_TREE)
9814 {
9815 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9816 var4);
9817 gimplify_and_add (x, &before);
9818 }
9819 }
9820 }
9821 else
9822 {
9823 if (input_phase)
9824 {
9825 /* input phase. Set val to initializer before
9826 the body. */
9827 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9828 gimplify_assign (val, x, &before);
9829 }
9830 else if (is_simd)
9831 {
9832 /* scan phase. */
9833 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9834 if (code == MINUS_EXPR)
9835 code = PLUS_EXPR;
9836
9837 tree x = build2 (code, TREE_TYPE (var2),
9838 unshare_expr (var2), unshare_expr (val));
9839 if (octx->scan_inclusive)
9840 {
9841 gimplify_assign (unshare_expr (var2), x, &before);
9842 gimplify_assign (val, var2, &before);
9843 }
9844 else
9845 {
9846 gimplify_assign (unshare_expr (var4),
9847 unshare_expr (var2), &before);
9848 gimplify_assign (var2, x, &before);
9849 if (lane0 == NULL_TREE)
9850 gimplify_assign (val, var4, &before);
9851 }
9852 }
9853 }
9854 if (octx->scan_exclusive && !input_phase && lane0)
9855 {
9856 tree vexpr = unshare_expr (var4);
9857 TREE_OPERAND (vexpr, 1) = lane0;
9858 if (new_vard != new_var)
9859 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9860 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9861 }
9862 }
9863 }
9864 if (is_simd && !is_for_simd)
9865 {
9866 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9867 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9868 gsi_replace (gsi_p, gimple_build_nop (), true);
9869 return;
9870 }
9871 lower_omp (gimple_omp_body_ptr (stmt), octx);
9872 if (before)
9873 {
9874 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9875 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9876 }
9877 }
9878
9879
9880 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9881 substitution of a couple of function calls. But in the NAMED case,
9882 requires that languages coordinate a symbol name. It is therefore
9883 best put here in common code. */
9884
9885 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9886
9887 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)9888 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9889 {
9890 tree block;
9891 tree name, lock, unlock;
9892 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9893 gbind *bind;
9894 location_t loc = gimple_location (stmt);
9895 gimple_seq tbody;
9896
9897 name = gimple_omp_critical_name (stmt);
9898 if (name)
9899 {
9900 tree decl;
9901
9902 if (!critical_name_mutexes)
9903 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9904
9905 tree *n = critical_name_mutexes->get (name);
9906 if (n == NULL)
9907 {
9908 char *new_str;
9909
9910 decl = create_tmp_var_raw (ptr_type_node);
9911
9912 new_str = ACONCAT ((".gomp_critical_user_",
9913 IDENTIFIER_POINTER (name), NULL));
9914 DECL_NAME (decl) = get_identifier (new_str);
9915 TREE_PUBLIC (decl) = 1;
9916 TREE_STATIC (decl) = 1;
9917 DECL_COMMON (decl) = 1;
9918 DECL_ARTIFICIAL (decl) = 1;
9919 DECL_IGNORED_P (decl) = 1;
9920
9921 varpool_node::finalize_decl (decl);
9922
9923 critical_name_mutexes->put (name, decl);
9924 }
9925 else
9926 decl = *n;
9927
9928 /* If '#pragma omp critical' is inside offloaded region or
9929 inside function marked as offloadable, the symbol must be
9930 marked as offloadable too. */
9931 omp_context *octx;
9932 if (cgraph_node::get (current_function_decl)->offloadable)
9933 varpool_node::get_create (decl)->offloadable = 1;
9934 else
9935 for (octx = ctx->outer; octx; octx = octx->outer)
9936 if (is_gimple_omp_offloaded (octx->stmt))
9937 {
9938 varpool_node::get_create (decl)->offloadable = 1;
9939 break;
9940 }
9941
9942 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9943 lock = build_call_expr_loc (loc, lock, 1,
9944 build_fold_addr_expr_loc (loc, decl));
9945
9946 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9947 unlock = build_call_expr_loc (loc, unlock, 1,
9948 build_fold_addr_expr_loc (loc, decl));
9949 }
9950 else
9951 {
9952 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9953 lock = build_call_expr_loc (loc, lock, 0);
9954
9955 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9956 unlock = build_call_expr_loc (loc, unlock, 0);
9957 }
9958
9959 push_gimplify_context ();
9960
9961 block = make_node (BLOCK);
9962 bind = gimple_build_bind (NULL, NULL, block);
9963 gsi_replace (gsi_p, bind, true);
9964 gimple_bind_add_stmt (bind, stmt);
9965
9966 tbody = gimple_bind_body (bind);
9967 gimplify_and_add (lock, &tbody);
9968 gimple_bind_set_body (bind, tbody);
9969
9970 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9971 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9972 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9973 gimple_omp_set_body (stmt, NULL);
9974
9975 tbody = gimple_bind_body (bind);
9976 gimplify_and_add (unlock, &tbody);
9977 gimple_bind_set_body (bind, tbody);
9978
9979 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9980
9981 pop_gimplify_context (bind);
9982 gimple_bind_append_vars (bind, ctx->block_vars);
9983 BLOCK_VARS (block) = gimple_bind_vars (bind);
9984 }
9985
9986 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9987 for a lastprivate clause. Given a loop control predicate of (V
9988 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9989 is appended to *DLIST, iterator initialization is appended to
9990 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9991 to be emitted in a critical section. */
9992
9993 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,gimple_seq * clist,struct omp_context * ctx)9994 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9995 gimple_seq *dlist, gimple_seq *clist,
9996 struct omp_context *ctx)
9997 {
9998 tree clauses, cond, vinit;
9999 enum tree_code cond_code;
10000 gimple_seq stmts;
10001
10002 cond_code = fd->loop.cond_code;
10003 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10004
10005 /* When possible, use a strict equality expression. This can let VRP
10006 type optimizations deduce the value and remove a copy. */
10007 if (tree_fits_shwi_p (fd->loop.step))
10008 {
10009 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10010 if (step == 1 || step == -1)
10011 cond_code = EQ_EXPR;
10012 }
10013
10014 tree n2 = fd->loop.n2;
10015 if (fd->collapse > 1
10016 && TREE_CODE (n2) != INTEGER_CST
10017 && gimple_omp_for_combined_into_p (fd->for_stmt))
10018 {
10019 struct omp_context *taskreg_ctx = NULL;
10020 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10021 {
10022 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10023 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10024 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10025 {
10026 if (gimple_omp_for_combined_into_p (gfor))
10027 {
10028 gcc_assert (ctx->outer->outer
10029 && is_parallel_ctx (ctx->outer->outer));
10030 taskreg_ctx = ctx->outer->outer;
10031 }
10032 else
10033 {
10034 struct omp_for_data outer_fd;
10035 omp_extract_for_data (gfor, &outer_fd, NULL);
10036 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10037 }
10038 }
10039 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10040 taskreg_ctx = ctx->outer->outer;
10041 }
10042 else if (is_taskreg_ctx (ctx->outer))
10043 taskreg_ctx = ctx->outer;
10044 if (taskreg_ctx)
10045 {
10046 int i;
10047 tree taskreg_clauses
10048 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10049 tree innerc = omp_find_clause (taskreg_clauses,
10050 OMP_CLAUSE__LOOPTEMP_);
10051 gcc_assert (innerc);
10052 int count = fd->collapse;
10053 if (fd->non_rect
10054 && fd->last_nonrect == fd->first_nonrect + 1)
10055 if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10056 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10057 count += 4;
10058 for (i = 0; i < count; i++)
10059 {
10060 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10061 OMP_CLAUSE__LOOPTEMP_);
10062 gcc_assert (innerc);
10063 }
10064 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10065 OMP_CLAUSE__LOOPTEMP_);
10066 if (innerc)
10067 n2 = fold_convert (TREE_TYPE (n2),
10068 lookup_decl (OMP_CLAUSE_DECL (innerc),
10069 taskreg_ctx));
10070 }
10071 }
10072 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10073
10074 clauses = gimple_omp_for_clauses (fd->for_stmt);
10075 stmts = NULL;
10076 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10077 if (!gimple_seq_empty_p (stmts))
10078 {
10079 gimple_seq_add_seq (&stmts, *dlist);
10080 *dlist = stmts;
10081
10082 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
10083 vinit = fd->loop.n1;
10084 if (cond_code == EQ_EXPR
10085 && tree_fits_shwi_p (fd->loop.n2)
10086 && ! integer_zerop (fd->loop.n2))
10087 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10088 else
10089 vinit = unshare_expr (vinit);
10090
10091 /* Initialize the iterator variable, so that threads that don't execute
10092 any iterations don't execute the lastprivate clauses by accident. */
10093 gimplify_assign (fd->loop.v, vinit, body_p);
10094 }
10095 }
10096
10097 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
10098
10099 static tree
omp_find_scan(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10100 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10101 struct walk_stmt_info *wi)
10102 {
10103 gimple *stmt = gsi_stmt (*gsi_p);
10104
10105 *handled_ops_p = true;
10106 switch (gimple_code (stmt))
10107 {
10108 WALK_SUBSTMTS;
10109
10110 case GIMPLE_OMP_FOR:
10111 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10112 && gimple_omp_for_combined_into_p (stmt))
10113 *handled_ops_p = false;
10114 break;
10115
10116 case GIMPLE_OMP_SCAN:
10117 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10118 return integer_zero_node;
10119 default:
10120 break;
10121 }
10122 return NULL;
10123 }
10124
10125 /* Helper function for lower_omp_for, add transformations for a worksharing
10126 loop with scan directives inside of it.
10127 For worksharing loop not combined with simd, transform:
10128 #pragma omp for reduction(inscan,+:r) private(i)
10129 for (i = 0; i < n; i = i + 1)
10130 {
10131 {
10132 update (r);
10133 }
10134 #pragma omp scan inclusive(r)
10135 {
10136 use (r);
10137 }
10138 }
10139
10140 into two worksharing loops + code to merge results:
10141
10142 num_threads = omp_get_num_threads ();
10143 thread_num = omp_get_thread_num ();
10144 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10145 <D.2099>:
10146 var2 = r;
10147 goto <D.2101>;
10148 <D.2100>:
10149 // For UDRs this is UDR init, or if ctors are needed, copy from
10150 // var3 that has been constructed to contain the neutral element.
10151 var2 = 0;
10152 <D.2101>:
10153 ivar = 0;
10154 // The _scantemp_ clauses will arrange for rpriva to be initialized to
10155 // a shared array with num_threads elements and rprivb to a local array
10156 // number of elements equal to the number of (contiguous) iterations the
10157 // current thread will perform. controlb and controlp variables are
10158 // temporaries to handle deallocation of rprivb at the end of second
10159 // GOMP_FOR.
10160 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10161 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10162 for (i = 0; i < n; i = i + 1)
10163 {
10164 {
10165 // For UDRs this is UDR init or copy from var3.
10166 r = 0;
10167 // This is the input phase from user code.
10168 update (r);
10169 }
10170 {
10171 // For UDRs this is UDR merge.
10172 var2 = var2 + r;
10173 // Rather than handing it over to the user, save to local thread's
10174 // array.
10175 rprivb[ivar] = var2;
10176 // For exclusive scan, the above two statements are swapped.
10177 ivar = ivar + 1;
10178 }
10179 }
10180 // And remember the final value from this thread's into the shared
10181 // rpriva array.
10182 rpriva[(sizetype) thread_num] = var2;
10183 // If more than one thread, compute using Work-Efficient prefix sum
10184 // the inclusive parallel scan of the rpriva array.
10185 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10186 <D.2102>:
10187 GOMP_barrier ();
10188 down = 0;
10189 k = 1;
10190 num_threadsu = (unsigned int) num_threads;
10191 thread_numup1 = (unsigned int) thread_num + 1;
10192 <D.2108>:
10193 twok = k << 1;
10194 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10195 <D.2110>:
10196 down = 4294967295;
10197 k = k >> 1;
10198 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10199 <D.2112>:
10200 k = k >> 1;
10201 <D.2111>:
10202 twok = k << 1;
10203 cplx = .MUL_OVERFLOW (thread_nump1, twok);
10204 mul = REALPART_EXPR <cplx>;
10205 ovf = IMAGPART_EXPR <cplx>;
10206 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10207 <D.2116>:
10208 andv = k & down;
10209 andvm1 = andv + 4294967295;
10210 l = mul + andvm1;
10211 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10212 <D.2120>:
10213 // For UDRs this is UDR merge, performed using var2 variable as temporary,
10214 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10215 rpriva[l] = rpriva[l - k] + rpriva[l];
10216 <D.2117>:
10217 if (down == 0) goto <D.2121>; else goto <D.2122>;
10218 <D.2121>:
10219 k = k << 1;
10220 goto <D.2123>;
10221 <D.2122>:
10222 k = k >> 1;
10223 <D.2123>:
10224 GOMP_barrier ();
10225 if (k != 0) goto <D.2108>; else goto <D.2103>;
10226 <D.2103>:
10227 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10228 <D.2124>:
10229 // For UDRs this is UDR init or copy from var3.
10230 var2 = 0;
10231 goto <D.2126>;
10232 <D.2125>:
10233 var2 = rpriva[thread_num - 1];
10234 <D.2126>:
10235 ivar = 0;
10236 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10237 reduction(inscan,+:r) private(i)
10238 for (i = 0; i < n; i = i + 1)
10239 {
10240 {
10241 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10242 r = var2 + rprivb[ivar];
10243 }
10244 {
10245 // This is the scan phase from user code.
10246 use (r);
10247 // Plus a bump of the iterator.
10248 ivar = ivar + 1;
10249 }
10250 } */
10251
10252 static void
lower_omp_for_scan(gimple_seq * body_p,gimple_seq * dlist,gomp_for * stmt,struct omp_for_data * fd,omp_context * ctx)10253 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10254 struct omp_for_data *fd, omp_context *ctx)
10255 {
10256 bool is_for_simd = gimple_omp_for_combined_p (stmt);
10257 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10258
10259 gimple_seq body = gimple_omp_body (stmt);
10260 gimple_stmt_iterator input1_gsi = gsi_none ();
10261 struct walk_stmt_info wi;
10262 memset (&wi, 0, sizeof (wi));
10263 wi.val_only = true;
10264 wi.info = (void *) &input1_gsi;
10265 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10266 gcc_assert (!gsi_end_p (input1_gsi));
10267
10268 gimple *input_stmt1 = gsi_stmt (input1_gsi);
10269 gimple_stmt_iterator gsi = input1_gsi;
10270 gsi_next (&gsi);
10271 gimple_stmt_iterator scan1_gsi = gsi;
10272 gimple *scan_stmt1 = gsi_stmt (gsi);
10273 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10274
10275 gimple_seq input_body = gimple_omp_body (input_stmt1);
10276 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10277 gimple_omp_set_body (input_stmt1, NULL);
10278 gimple_omp_set_body (scan_stmt1, NULL);
10279 gimple_omp_set_body (stmt, NULL);
10280
10281 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10282 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10283 gimple_omp_set_body (stmt, body);
10284 gimple_omp_set_body (input_stmt1, input_body);
10285
10286 gimple_stmt_iterator input2_gsi = gsi_none ();
10287 memset (&wi, 0, sizeof (wi));
10288 wi.val_only = true;
10289 wi.info = (void *) &input2_gsi;
10290 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10291 gcc_assert (!gsi_end_p (input2_gsi));
10292
10293 gimple *input_stmt2 = gsi_stmt (input2_gsi);
10294 gsi = input2_gsi;
10295 gsi_next (&gsi);
10296 gimple_stmt_iterator scan2_gsi = gsi;
10297 gimple *scan_stmt2 = gsi_stmt (gsi);
10298 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10299 gimple_omp_set_body (scan_stmt2, scan_body);
10300
10301 gimple_stmt_iterator input3_gsi = gsi_none ();
10302 gimple_stmt_iterator scan3_gsi = gsi_none ();
10303 gimple_stmt_iterator input4_gsi = gsi_none ();
10304 gimple_stmt_iterator scan4_gsi = gsi_none ();
10305 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10306 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10307 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10308 if (is_for_simd)
10309 {
10310 memset (&wi, 0, sizeof (wi));
10311 wi.val_only = true;
10312 wi.info = (void *) &input3_gsi;
10313 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10314 gcc_assert (!gsi_end_p (input3_gsi));
10315
10316 input_stmt3 = gsi_stmt (input3_gsi);
10317 gsi = input3_gsi;
10318 gsi_next (&gsi);
10319 scan3_gsi = gsi;
10320 scan_stmt3 = gsi_stmt (gsi);
10321 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10322
10323 memset (&wi, 0, sizeof (wi));
10324 wi.val_only = true;
10325 wi.info = (void *) &input4_gsi;
10326 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10327 gcc_assert (!gsi_end_p (input4_gsi));
10328
10329 input_stmt4 = gsi_stmt (input4_gsi);
10330 gsi = input4_gsi;
10331 gsi_next (&gsi);
10332 scan4_gsi = gsi;
10333 scan_stmt4 = gsi_stmt (gsi);
10334 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10335
10336 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10337 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10338 }
10339
10340 tree num_threads = create_tmp_var (integer_type_node);
10341 tree thread_num = create_tmp_var (integer_type_node);
10342 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10343 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10344 gimple *g = gimple_build_call (nthreads_decl, 0);
10345 gimple_call_set_lhs (g, num_threads);
10346 gimple_seq_add_stmt (body_p, g);
10347 g = gimple_build_call (threadnum_decl, 0);
10348 gimple_call_set_lhs (g, thread_num);
10349 gimple_seq_add_stmt (body_p, g);
10350
10351 tree ivar = create_tmp_var (sizetype);
10352 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10353 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10354 tree k = create_tmp_var (unsigned_type_node);
10355 tree l = create_tmp_var (unsigned_type_node);
10356
10357 gimple_seq clist = NULL, mdlist = NULL;
10358 gimple_seq thr01_list = NULL, thrn1_list = NULL;
10359 gimple_seq thr02_list = NULL, thrn2_list = NULL;
10360 gimple_seq scan1_list = NULL, input2_list = NULL;
10361 gimple_seq last_list = NULL, reduc_list = NULL;
10362 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10363 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10364 && OMP_CLAUSE_REDUCTION_INSCAN (c))
10365 {
10366 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10367 tree var = OMP_CLAUSE_DECL (c);
10368 tree new_var = lookup_decl (var, ctx);
10369 tree var3 = NULL_TREE;
10370 tree new_vard = new_var;
10371 if (omp_is_reference (var))
10372 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10373 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10374 {
10375 var3 = maybe_lookup_decl (new_vard, ctx);
10376 if (var3 == new_vard)
10377 var3 = NULL_TREE;
10378 }
10379
10380 tree ptype = build_pointer_type (TREE_TYPE (new_var));
10381 tree rpriva = create_tmp_var (ptype);
10382 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10383 OMP_CLAUSE_DECL (nc) = rpriva;
10384 *cp1 = nc;
10385 cp1 = &OMP_CLAUSE_CHAIN (nc);
10386
10387 tree rprivb = create_tmp_var (ptype);
10388 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10389 OMP_CLAUSE_DECL (nc) = rprivb;
10390 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10391 *cp1 = nc;
10392 cp1 = &OMP_CLAUSE_CHAIN (nc);
10393
10394 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10395 if (new_vard != new_var)
10396 TREE_ADDRESSABLE (var2) = 1;
10397 gimple_add_tmp_var (var2);
10398
10399 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10400 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10401 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10402 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10403 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10404
10405 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10406 thread_num, integer_minus_one_node);
10407 x = fold_convert_loc (clause_loc, sizetype, x);
10408 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10409 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10410 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10411 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10412
10413 x = fold_convert_loc (clause_loc, sizetype, l);
10414 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10415 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10416 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10417 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10418
10419 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10420 x = fold_convert_loc (clause_loc, sizetype, x);
10421 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10422 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10423 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10424 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10425
10426 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10427 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10428 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10429 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10430
10431 tree var4 = is_for_simd ? new_var : var2;
10432 tree var5 = NULL_TREE, var6 = NULL_TREE;
10433 if (is_for_simd)
10434 {
10435 var5 = lookup_decl (var, input_simd_ctx);
10436 var6 = lookup_decl (var, scan_simd_ctx);
10437 if (new_vard != new_var)
10438 {
10439 var5 = build_simple_mem_ref_loc (clause_loc, var5);
10440 var6 = build_simple_mem_ref_loc (clause_loc, var6);
10441 }
10442 }
10443 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10444 {
10445 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10446 tree val = var2;
10447
10448 x = lang_hooks.decls.omp_clause_default_ctor
10449 (c, var2, build_outer_var_ref (var, ctx));
10450 if (x)
10451 gimplify_and_add (x, &clist);
10452
10453 x = build_outer_var_ref (var, ctx);
10454 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10455 x);
10456 gimplify_and_add (x, &thr01_list);
10457
10458 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10459 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10460 if (var3)
10461 {
10462 x = unshare_expr (var4);
10463 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10464 gimplify_and_add (x, &thrn1_list);
10465 x = unshare_expr (var4);
10466 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10467 gimplify_and_add (x, &thr02_list);
10468 }
10469 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10470 {
10471 /* Otherwise, assign to it the identity element. */
10472 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10473 tseq = copy_gimple_seq_and_replace_locals (tseq);
10474 if (!is_for_simd)
10475 {
10476 if (new_vard != new_var)
10477 val = build_fold_addr_expr_loc (clause_loc, val);
10478 SET_DECL_VALUE_EXPR (new_vard, val);
10479 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10480 }
10481 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10482 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10483 lower_omp (&tseq, ctx);
10484 gimple_seq_add_seq (&thrn1_list, tseq);
10485 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10486 lower_omp (&tseq, ctx);
10487 gimple_seq_add_seq (&thr02_list, tseq);
10488 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10489 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10490 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10491 if (y)
10492 SET_DECL_VALUE_EXPR (new_vard, y);
10493 else
10494 {
10495 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10496 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10497 }
10498 }
10499
10500 x = unshare_expr (var4);
10501 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10502 gimplify_and_add (x, &thrn2_list);
10503
10504 if (is_for_simd)
10505 {
10506 x = unshare_expr (rprivb_ref);
10507 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10508 gimplify_and_add (x, &scan1_list);
10509 }
10510 else
10511 {
10512 if (ctx->scan_exclusive)
10513 {
10514 x = unshare_expr (rprivb_ref);
10515 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10516 gimplify_and_add (x, &scan1_list);
10517 }
10518
10519 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10520 tseq = copy_gimple_seq_and_replace_locals (tseq);
10521 SET_DECL_VALUE_EXPR (placeholder, var2);
10522 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10523 lower_omp (&tseq, ctx);
10524 gimple_seq_add_seq (&scan1_list, tseq);
10525
10526 if (ctx->scan_inclusive)
10527 {
10528 x = unshare_expr (rprivb_ref);
10529 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10530 gimplify_and_add (x, &scan1_list);
10531 }
10532 }
10533
10534 x = unshare_expr (rpriva_ref);
10535 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10536 unshare_expr (var4));
10537 gimplify_and_add (x, &mdlist);
10538
10539 x = unshare_expr (is_for_simd ? var6 : new_var);
10540 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10541 gimplify_and_add (x, &input2_list);
10542
10543 val = rprivb_ref;
10544 if (new_vard != new_var)
10545 val = build_fold_addr_expr_loc (clause_loc, val);
10546
10547 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10548 tseq = copy_gimple_seq_and_replace_locals (tseq);
10549 SET_DECL_VALUE_EXPR (new_vard, val);
10550 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10551 if (is_for_simd)
10552 {
10553 SET_DECL_VALUE_EXPR (placeholder, var6);
10554 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10555 }
10556 else
10557 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10558 lower_omp (&tseq, ctx);
10559 if (y)
10560 SET_DECL_VALUE_EXPR (new_vard, y);
10561 else
10562 {
10563 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10564 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10565 }
10566 if (!is_for_simd)
10567 {
10568 SET_DECL_VALUE_EXPR (placeholder, new_var);
10569 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10570 lower_omp (&tseq, ctx);
10571 }
10572 gimple_seq_add_seq (&input2_list, tseq);
10573
10574 x = build_outer_var_ref (var, ctx);
10575 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10576 gimplify_and_add (x, &last_list);
10577
10578 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10579 gimplify_and_add (x, &reduc_list);
10580 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10581 tseq = copy_gimple_seq_and_replace_locals (tseq);
10582 val = rprival_ref;
10583 if (new_vard != new_var)
10584 val = build_fold_addr_expr_loc (clause_loc, val);
10585 SET_DECL_VALUE_EXPR (new_vard, val);
10586 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10587 SET_DECL_VALUE_EXPR (placeholder, var2);
10588 lower_omp (&tseq, ctx);
10589 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10590 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10591 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10592 if (y)
10593 SET_DECL_VALUE_EXPR (new_vard, y);
10594 else
10595 {
10596 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10597 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10598 }
10599 gimple_seq_add_seq (&reduc_list, tseq);
10600 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10601 gimplify_and_add (x, &reduc_list);
10602
10603 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10604 if (x)
10605 gimplify_and_add (x, dlist);
10606 }
10607 else
10608 {
10609 x = build_outer_var_ref (var, ctx);
10610 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10611
10612 x = omp_reduction_init (c, TREE_TYPE (new_var));
10613 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10614 &thrn1_list);
10615 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10616
10617 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10618
10619 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10620 if (code == MINUS_EXPR)
10621 code = PLUS_EXPR;
10622
10623 if (is_for_simd)
10624 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10625 else
10626 {
10627 if (ctx->scan_exclusive)
10628 gimplify_assign (unshare_expr (rprivb_ref), var2,
10629 &scan1_list);
10630 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10631 gimplify_assign (var2, x, &scan1_list);
10632 if (ctx->scan_inclusive)
10633 gimplify_assign (unshare_expr (rprivb_ref), var2,
10634 &scan1_list);
10635 }
10636
10637 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10638 &mdlist);
10639
10640 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10641 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10642
10643 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10644 &last_list);
10645
10646 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10647 unshare_expr (rprival_ref));
10648 gimplify_assign (rprival_ref, x, &reduc_list);
10649 }
10650 }
10651
10652 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10653 gimple_seq_add_stmt (&scan1_list, g);
10654 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10655 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10656 ? scan_stmt4 : scan_stmt2), g);
10657
10658 tree controlb = create_tmp_var (boolean_type_node);
10659 tree controlp = create_tmp_var (ptr_type_node);
10660 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10661 OMP_CLAUSE_DECL (nc) = controlb;
10662 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10663 *cp1 = nc;
10664 cp1 = &OMP_CLAUSE_CHAIN (nc);
10665 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10666 OMP_CLAUSE_DECL (nc) = controlp;
10667 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10668 *cp1 = nc;
10669 cp1 = &OMP_CLAUSE_CHAIN (nc);
10670 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10671 OMP_CLAUSE_DECL (nc) = controlb;
10672 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10673 *cp2 = nc;
10674 cp2 = &OMP_CLAUSE_CHAIN (nc);
10675 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10676 OMP_CLAUSE_DECL (nc) = controlp;
10677 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10678 *cp2 = nc;
10679 cp2 = &OMP_CLAUSE_CHAIN (nc);
10680
10681 *cp1 = gimple_omp_for_clauses (stmt);
10682 gimple_omp_for_set_clauses (stmt, new_clauses1);
10683 *cp2 = gimple_omp_for_clauses (new_stmt);
10684 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10685
10686 if (is_for_simd)
10687 {
10688 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10689 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10690
10691 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10692 GSI_SAME_STMT);
10693 gsi_remove (&input3_gsi, true);
10694 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10695 GSI_SAME_STMT);
10696 gsi_remove (&scan3_gsi, true);
10697 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10698 GSI_SAME_STMT);
10699 gsi_remove (&input4_gsi, true);
10700 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10701 GSI_SAME_STMT);
10702 gsi_remove (&scan4_gsi, true);
10703 }
10704 else
10705 {
10706 gimple_omp_set_body (scan_stmt1, scan1_list);
10707 gimple_omp_set_body (input_stmt2, input2_list);
10708 }
10709
10710 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10711 GSI_SAME_STMT);
10712 gsi_remove (&input1_gsi, true);
10713 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10714 GSI_SAME_STMT);
10715 gsi_remove (&scan1_gsi, true);
10716 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10717 GSI_SAME_STMT);
10718 gsi_remove (&input2_gsi, true);
10719 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10720 GSI_SAME_STMT);
10721 gsi_remove (&scan2_gsi, true);
10722
10723 gimple_seq_add_seq (body_p, clist);
10724
10725 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10726 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10727 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10728 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10729 gimple_seq_add_stmt (body_p, g);
10730 g = gimple_build_label (lab1);
10731 gimple_seq_add_stmt (body_p, g);
10732 gimple_seq_add_seq (body_p, thr01_list);
10733 g = gimple_build_goto (lab3);
10734 gimple_seq_add_stmt (body_p, g);
10735 g = gimple_build_label (lab2);
10736 gimple_seq_add_stmt (body_p, g);
10737 gimple_seq_add_seq (body_p, thrn1_list);
10738 g = gimple_build_label (lab3);
10739 gimple_seq_add_stmt (body_p, g);
10740
10741 g = gimple_build_assign (ivar, size_zero_node);
10742 gimple_seq_add_stmt (body_p, g);
10743
10744 gimple_seq_add_stmt (body_p, stmt);
10745 gimple_seq_add_seq (body_p, body);
10746 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10747 fd->loop.v));
10748
10749 g = gimple_build_omp_return (true);
10750 gimple_seq_add_stmt (body_p, g);
10751 gimple_seq_add_seq (body_p, mdlist);
10752
10753 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10754 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10755 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10756 gimple_seq_add_stmt (body_p, g);
10757 g = gimple_build_label (lab1);
10758 gimple_seq_add_stmt (body_p, g);
10759
10760 g = omp_build_barrier (NULL);
10761 gimple_seq_add_stmt (body_p, g);
10762
10763 tree down = create_tmp_var (unsigned_type_node);
10764 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10765 gimple_seq_add_stmt (body_p, g);
10766
10767 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10768 gimple_seq_add_stmt (body_p, g);
10769
10770 tree num_threadsu = create_tmp_var (unsigned_type_node);
10771 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10772 gimple_seq_add_stmt (body_p, g);
10773
10774 tree thread_numu = create_tmp_var (unsigned_type_node);
10775 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10776 gimple_seq_add_stmt (body_p, g);
10777
10778 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10779 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10780 build_int_cst (unsigned_type_node, 1));
10781 gimple_seq_add_stmt (body_p, g);
10782
10783 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10784 g = gimple_build_label (lab3);
10785 gimple_seq_add_stmt (body_p, g);
10786
10787 tree twok = create_tmp_var (unsigned_type_node);
10788 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10789 gimple_seq_add_stmt (body_p, g);
10790
10791 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10792 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10793 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10794 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10795 gimple_seq_add_stmt (body_p, g);
10796 g = gimple_build_label (lab4);
10797 gimple_seq_add_stmt (body_p, g);
10798 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10799 gimple_seq_add_stmt (body_p, g);
10800 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10801 gimple_seq_add_stmt (body_p, g);
10802
10803 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10804 gimple_seq_add_stmt (body_p, g);
10805 g = gimple_build_label (lab6);
10806 gimple_seq_add_stmt (body_p, g);
10807
10808 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10809 gimple_seq_add_stmt (body_p, g);
10810
10811 g = gimple_build_label (lab5);
10812 gimple_seq_add_stmt (body_p, g);
10813
10814 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10815 gimple_seq_add_stmt (body_p, g);
10816
10817 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10818 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10819 gimple_call_set_lhs (g, cplx);
10820 gimple_seq_add_stmt (body_p, g);
10821 tree mul = create_tmp_var (unsigned_type_node);
10822 g = gimple_build_assign (mul, REALPART_EXPR,
10823 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10824 gimple_seq_add_stmt (body_p, g);
10825 tree ovf = create_tmp_var (unsigned_type_node);
10826 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10827 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10828 gimple_seq_add_stmt (body_p, g);
10829
10830 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10831 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10832 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10833 lab7, lab8);
10834 gimple_seq_add_stmt (body_p, g);
10835 g = gimple_build_label (lab7);
10836 gimple_seq_add_stmt (body_p, g);
10837
10838 tree andv = create_tmp_var (unsigned_type_node);
10839 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10840 gimple_seq_add_stmt (body_p, g);
10841 tree andvm1 = create_tmp_var (unsigned_type_node);
10842 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10843 build_minus_one_cst (unsigned_type_node));
10844 gimple_seq_add_stmt (body_p, g);
10845
10846 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10847 gimple_seq_add_stmt (body_p, g);
10848
10849 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10850 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10851 gimple_seq_add_stmt (body_p, g);
10852 g = gimple_build_label (lab9);
10853 gimple_seq_add_stmt (body_p, g);
10854 gimple_seq_add_seq (body_p, reduc_list);
10855 g = gimple_build_label (lab8);
10856 gimple_seq_add_stmt (body_p, g);
10857
10858 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10859 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10860 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10861 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10862 lab10, lab11);
10863 gimple_seq_add_stmt (body_p, g);
10864 g = gimple_build_label (lab10);
10865 gimple_seq_add_stmt (body_p, g);
10866 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10867 gimple_seq_add_stmt (body_p, g);
10868 g = gimple_build_goto (lab12);
10869 gimple_seq_add_stmt (body_p, g);
10870 g = gimple_build_label (lab11);
10871 gimple_seq_add_stmt (body_p, g);
10872 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10873 gimple_seq_add_stmt (body_p, g);
10874 g = gimple_build_label (lab12);
10875 gimple_seq_add_stmt (body_p, g);
10876
10877 g = omp_build_barrier (NULL);
10878 gimple_seq_add_stmt (body_p, g);
10879
10880 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10881 lab3, lab2);
10882 gimple_seq_add_stmt (body_p, g);
10883
10884 g = gimple_build_label (lab2);
10885 gimple_seq_add_stmt (body_p, g);
10886
10887 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10888 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10889 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10890 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10891 gimple_seq_add_stmt (body_p, g);
10892 g = gimple_build_label (lab1);
10893 gimple_seq_add_stmt (body_p, g);
10894 gimple_seq_add_seq (body_p, thr02_list);
10895 g = gimple_build_goto (lab3);
10896 gimple_seq_add_stmt (body_p, g);
10897 g = gimple_build_label (lab2);
10898 gimple_seq_add_stmt (body_p, g);
10899 gimple_seq_add_seq (body_p, thrn2_list);
10900 g = gimple_build_label (lab3);
10901 gimple_seq_add_stmt (body_p, g);
10902
10903 g = gimple_build_assign (ivar, size_zero_node);
10904 gimple_seq_add_stmt (body_p, g);
10905 gimple_seq_add_stmt (body_p, new_stmt);
10906 gimple_seq_add_seq (body_p, new_body);
10907
10908 gimple_seq new_dlist = NULL;
10909 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10910 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10911 tree num_threadsm1 = create_tmp_var (integer_type_node);
10912 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10913 integer_minus_one_node);
10914 gimple_seq_add_stmt (&new_dlist, g);
10915 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10916 gimple_seq_add_stmt (&new_dlist, g);
10917 g = gimple_build_label (lab1);
10918 gimple_seq_add_stmt (&new_dlist, g);
10919 gimple_seq_add_seq (&new_dlist, last_list);
10920 g = gimple_build_label (lab2);
10921 gimple_seq_add_stmt (&new_dlist, g);
10922 gimple_seq_add_seq (&new_dlist, *dlist);
10923 *dlist = new_dlist;
10924 }
10925
10926 /* Lower code for an OMP loop directive. */
10927
10928 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)10929 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10930 {
10931 tree *rhs_p, block;
10932 struct omp_for_data fd, *fdp = NULL;
10933 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10934 gbind *new_stmt;
10935 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10936 gimple_seq cnt_list = NULL, clist = NULL;
10937 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10938 size_t i;
10939
10940 push_gimplify_context ();
10941
10942 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10943
10944 block = make_node (BLOCK);
10945 new_stmt = gimple_build_bind (NULL, NULL, block);
10946 /* Replace at gsi right away, so that 'stmt' is no member
10947 of a sequence anymore as we're going to add to a different
10948 one below. */
10949 gsi_replace (gsi_p, new_stmt, true);
10950
10951 /* Move declaration of temporaries in the loop body before we make
10952 it go away. */
10953 omp_for_body = gimple_omp_body (stmt);
10954 if (!gimple_seq_empty_p (omp_for_body)
10955 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10956 {
10957 gbind *inner_bind
10958 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10959 tree vars = gimple_bind_vars (inner_bind);
10960 gimple_bind_append_vars (new_stmt, vars);
10961 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10962 keep them on the inner_bind and it's block. */
10963 gimple_bind_set_vars (inner_bind, NULL_TREE);
10964 if (gimple_bind_block (inner_bind))
10965 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10966 }
10967
10968 if (gimple_omp_for_combined_into_p (stmt))
10969 {
10970 omp_extract_for_data (stmt, &fd, NULL);
10971 fdp = &fd;
10972
10973 /* We need two temporaries with fd.loop.v type (istart/iend)
10974 and then (fd.collapse - 1) temporaries with the same
10975 type for count2 ... countN-1 vars if not constant. */
10976 size_t count = 2;
10977 tree type = fd.iter_type;
10978 if (fd.collapse > 1
10979 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10980 count += fd.collapse - 1;
10981 size_t count2 = 0;
10982 tree type2 = NULL_TREE;
10983 bool taskreg_for
10984 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10985 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10986 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10987 tree simtc = NULL;
10988 tree clauses = *pc;
10989 if (fd.collapse > 1
10990 && fd.non_rect
10991 && fd.last_nonrect == fd.first_nonrect + 1
10992 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10993 if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
10994 if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10995 {
10996 v = gimple_omp_for_index (stmt, fd.first_nonrect);
10997 type2 = TREE_TYPE (v);
10998 count++;
10999 count2 = 3;
11000 }
11001 if (taskreg_for)
11002 outerc
11003 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11004 OMP_CLAUSE__LOOPTEMP_);
11005 if (ctx->simt_stmt)
11006 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11007 OMP_CLAUSE__LOOPTEMP_);
11008 for (i = 0; i < count + count2; i++)
11009 {
11010 tree temp;
11011 if (taskreg_for)
11012 {
11013 gcc_assert (outerc);
11014 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11015 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11016 OMP_CLAUSE__LOOPTEMP_);
11017 }
11018 else
11019 {
11020 /* If there are 2 adjacent SIMD stmts, one with _simt_
11021 clause, another without, make sure they have the same
11022 decls in _looptemp_ clauses, because the outer stmt
11023 they are combined into will look up just one inner_stmt. */
11024 if (ctx->simt_stmt)
11025 temp = OMP_CLAUSE_DECL (simtc);
11026 else
11027 temp = create_tmp_var (i >= count ? type2 : type);
11028 insert_decl_map (&ctx->outer->cb, temp, temp);
11029 }
11030 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11031 OMP_CLAUSE_DECL (*pc) = temp;
11032 pc = &OMP_CLAUSE_CHAIN (*pc);
11033 if (ctx->simt_stmt)
11034 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11035 OMP_CLAUSE__LOOPTEMP_);
11036 }
11037 *pc = clauses;
11038 }
11039
11040 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
11041 dlist = NULL;
11042 body = NULL;
11043 tree rclauses
11044 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11045 OMP_CLAUSE_REDUCTION);
11046 tree rtmp = NULL_TREE;
11047 if (rclauses)
11048 {
11049 tree type = build_pointer_type (pointer_sized_int_node);
11050 tree temp = create_tmp_var (type);
11051 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11052 OMP_CLAUSE_DECL (c) = temp;
11053 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11054 gimple_omp_for_set_clauses (stmt, c);
11055 lower_omp_task_reductions (ctx, OMP_FOR,
11056 gimple_omp_for_clauses (stmt),
11057 &tred_ilist, &tred_dlist);
11058 rclauses = c;
11059 rtmp = make_ssa_name (type);
11060 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11061 }
11062
11063 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11064 ctx);
11065
11066 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11067 fdp);
11068 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11069 gimple_omp_for_pre_body (stmt));
11070
11071 lower_omp (gimple_omp_body_ptr (stmt), ctx);
11072
11073 /* Lower the header expressions. At this point, we can assume that
11074 the header is of the form:
11075
11076 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11077
11078 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11079 using the .omp_data_s mapping, if needed. */
11080 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11081 {
11082 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11083 if (TREE_CODE (*rhs_p) == TREE_VEC)
11084 {
11085 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11086 TREE_VEC_ELT (*rhs_p, 1)
11087 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11088 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11089 TREE_VEC_ELT (*rhs_p, 2)
11090 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11091 }
11092 else if (!is_gimple_min_invariant (*rhs_p))
11093 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11094 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11095 recompute_tree_invariant_for_addr_expr (*rhs_p);
11096
11097 rhs_p = gimple_omp_for_final_ptr (stmt, i);
11098 if (TREE_CODE (*rhs_p) == TREE_VEC)
11099 {
11100 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11101 TREE_VEC_ELT (*rhs_p, 1)
11102 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11103 if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11104 TREE_VEC_ELT (*rhs_p, 2)
11105 = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11106 }
11107 else if (!is_gimple_min_invariant (*rhs_p))
11108 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11109 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11110 recompute_tree_invariant_for_addr_expr (*rhs_p);
11111
11112 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11113 if (!is_gimple_min_invariant (*rhs_p))
11114 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11115 }
11116 if (rclauses)
11117 gimple_seq_add_seq (&tred_ilist, cnt_list);
11118 else
11119 gimple_seq_add_seq (&body, cnt_list);
11120
11121 /* Once lowered, extract the bounds and clauses. */
11122 omp_extract_for_data (stmt, &fd, NULL);
11123
11124 if (is_gimple_omp_oacc (ctx->stmt)
11125 && !ctx_in_oacc_kernels_region (ctx))
11126 lower_oacc_head_tail (gimple_location (stmt),
11127 gimple_omp_for_clauses (stmt),
11128 &oacc_head, &oacc_tail, ctx);
11129
11130 /* Add OpenACC partitioning and reduction markers just before the loop. */
11131 if (oacc_head)
11132 gimple_seq_add_seq (&body, oacc_head);
11133
11134 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11135
11136 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11137 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11138 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11139 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11140 {
11141 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11142 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11143 OMP_CLAUSE_LINEAR_STEP (c)
11144 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11145 ctx);
11146 }
11147
11148 if ((ctx->scan_inclusive || ctx->scan_exclusive)
11149 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11150 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11151 else
11152 {
11153 gimple_seq_add_stmt (&body, stmt);
11154 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11155 }
11156
11157 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11158 fd.loop.v));
11159
11160 /* After the loop, add exit clauses. */
11161 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11162
11163 if (clist)
11164 {
11165 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11166 gcall *g = gimple_build_call (fndecl, 0);
11167 gimple_seq_add_stmt (&body, g);
11168 gimple_seq_add_seq (&body, clist);
11169 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11170 g = gimple_build_call (fndecl, 0);
11171 gimple_seq_add_stmt (&body, g);
11172 }
11173
11174 if (ctx->cancellable)
11175 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11176
11177 gimple_seq_add_seq (&body, dlist);
11178
11179 if (rclauses)
11180 {
11181 gimple_seq_add_seq (&tred_ilist, body);
11182 body = tred_ilist;
11183 }
11184
11185 body = maybe_catch_exception (body);
11186
11187 /* Region exit marker goes at the end of the loop body. */
11188 gimple *g = gimple_build_omp_return (fd.have_nowait);
11189 gimple_seq_add_stmt (&body, g);
11190
11191 gimple_seq_add_seq (&body, tred_dlist);
11192
11193 maybe_add_implicit_barrier_cancel (ctx, g, &body);
11194
11195 if (rclauses)
11196 OMP_CLAUSE_DECL (rclauses) = rtmp;
11197
11198 /* Add OpenACC joining and reduction markers just after the loop. */
11199 if (oacc_tail)
11200 gimple_seq_add_seq (&body, oacc_tail);
11201
11202 pop_gimplify_context (new_stmt);
11203
11204 gimple_bind_append_vars (new_stmt, ctx->block_vars);
11205 maybe_remove_omp_member_access_dummy_vars (new_stmt);
11206 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11207 if (BLOCK_VARS (block))
11208 TREE_USED (block) = 1;
11209
11210 gimple_bind_set_body (new_stmt, body);
11211 gimple_omp_set_body (stmt, NULL);
11212 gimple_omp_for_set_pre_body (stmt, NULL);
11213 }
11214
11215 /* Callback for walk_stmts. Check if the current statement only contains
11216 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
11217
11218 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)11219 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11220 bool *handled_ops_p,
11221 struct walk_stmt_info *wi)
11222 {
11223 int *info = (int *) wi->info;
11224 gimple *stmt = gsi_stmt (*gsi_p);
11225
11226 *handled_ops_p = true;
11227 switch (gimple_code (stmt))
11228 {
11229 WALK_SUBSTMTS;
11230
11231 case GIMPLE_DEBUG:
11232 break;
11233 case GIMPLE_OMP_FOR:
11234 case GIMPLE_OMP_SECTIONS:
11235 *info = *info == 0 ? 1 : -1;
11236 break;
11237 default:
11238 *info = -1;
11239 break;
11240 }
11241 return NULL;
11242 }
11243
11244 struct omp_taskcopy_context
11245 {
11246 /* This field must be at the beginning, as we do "inheritance": Some
11247 callback functions for tree-inline.c (e.g., omp_copy_decl)
11248 receive a copy_body_data pointer that is up-casted to an
11249 omp_context pointer. */
11250 copy_body_data cb;
11251 omp_context *ctx;
11252 };
11253
11254 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)11255 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11256 {
11257 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11258
11259 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11260 return create_tmp_var (TREE_TYPE (var));
11261
11262 return var;
11263 }
11264
11265 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)11266 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11267 {
11268 tree name, new_fields = NULL, type, f;
11269
11270 type = lang_hooks.types.make_type (RECORD_TYPE);
11271 name = DECL_NAME (TYPE_NAME (orig_type));
11272 name = build_decl (gimple_location (tcctx->ctx->stmt),
11273 TYPE_DECL, name, type);
11274 TYPE_NAME (type) = name;
11275
11276 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11277 {
11278 tree new_f = copy_node (f);
11279 DECL_CONTEXT (new_f) = type;
11280 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11281 TREE_CHAIN (new_f) = new_fields;
11282 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11283 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11284 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11285 &tcctx->cb, NULL);
11286 new_fields = new_f;
11287 tcctx->cb.decl_map->put (f, new_f);
11288 }
11289 TYPE_FIELDS (type) = nreverse (new_fields);
11290 layout_type (type);
11291 return type;
11292 }
11293
11294 /* Create task copyfn. */
11295
11296 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)11297 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11298 {
11299 struct function *child_cfun;
11300 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11301 tree record_type, srecord_type, bind, list;
11302 bool record_needs_remap = false, srecord_needs_remap = false;
11303 splay_tree_node n;
11304 struct omp_taskcopy_context tcctx;
11305 location_t loc = gimple_location (task_stmt);
11306 size_t looptempno = 0;
11307
11308 child_fn = gimple_omp_task_copy_fn (task_stmt);
11309 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11310 gcc_assert (child_cfun->cfg == NULL);
11311 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11312
11313 /* Reset DECL_CONTEXT on function arguments. */
11314 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11315 DECL_CONTEXT (t) = child_fn;
11316
11317 /* Populate the function. */
11318 push_gimplify_context ();
11319 push_cfun (child_cfun);
11320
11321 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11322 TREE_SIDE_EFFECTS (bind) = 1;
11323 list = NULL;
11324 DECL_SAVED_TREE (child_fn) = bind;
11325 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11326
11327 /* Remap src and dst argument types if needed. */
11328 record_type = ctx->record_type;
11329 srecord_type = ctx->srecord_type;
11330 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11331 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11332 {
11333 record_needs_remap = true;
11334 break;
11335 }
11336 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11337 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11338 {
11339 srecord_needs_remap = true;
11340 break;
11341 }
11342
11343 if (record_needs_remap || srecord_needs_remap)
11344 {
11345 memset (&tcctx, '\0', sizeof (tcctx));
11346 tcctx.cb.src_fn = ctx->cb.src_fn;
11347 tcctx.cb.dst_fn = child_fn;
11348 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11349 gcc_checking_assert (tcctx.cb.src_node);
11350 tcctx.cb.dst_node = tcctx.cb.src_node;
11351 tcctx.cb.src_cfun = ctx->cb.src_cfun;
11352 tcctx.cb.copy_decl = task_copyfn_copy_decl;
11353 tcctx.cb.eh_lp_nr = 0;
11354 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11355 tcctx.cb.decl_map = new hash_map<tree, tree>;
11356 tcctx.ctx = ctx;
11357
11358 if (record_needs_remap)
11359 record_type = task_copyfn_remap_type (&tcctx, record_type);
11360 if (srecord_needs_remap)
11361 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11362 }
11363 else
11364 tcctx.cb.decl_map = NULL;
11365
11366 arg = DECL_ARGUMENTS (child_fn);
11367 TREE_TYPE (arg) = build_pointer_type (record_type);
11368 sarg = DECL_CHAIN (arg);
11369 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11370
11371 /* First pass: initialize temporaries used in record_type and srecord_type
11372 sizes and field offsets. */
11373 if (tcctx.cb.decl_map)
11374 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11375 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11376 {
11377 tree *p;
11378
11379 decl = OMP_CLAUSE_DECL (c);
11380 p = tcctx.cb.decl_map->get (decl);
11381 if (p == NULL)
11382 continue;
11383 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11384 sf = (tree) n->value;
11385 sf = *tcctx.cb.decl_map->get (sf);
11386 src = build_simple_mem_ref_loc (loc, sarg);
11387 src = omp_build_component_ref (src, sf);
11388 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
11389 append_to_statement_list (t, &list);
11390 }
11391
11392 /* Second pass: copy shared var pointers and copy construct non-VLA
11393 firstprivate vars. */
11394 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11395 switch (OMP_CLAUSE_CODE (c))
11396 {
11397 splay_tree_key key;
11398 case OMP_CLAUSE_SHARED:
11399 decl = OMP_CLAUSE_DECL (c);
11400 key = (splay_tree_key) decl;
11401 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
11402 key = (splay_tree_key) &DECL_UID (decl);
11403 n = splay_tree_lookup (ctx->field_map, key);
11404 if (n == NULL)
11405 break;
11406 f = (tree) n->value;
11407 if (tcctx.cb.decl_map)
11408 f = *tcctx.cb.decl_map->get (f);
11409 n = splay_tree_lookup (ctx->sfield_map, key);
11410 sf = (tree) n->value;
11411 if (tcctx.cb.decl_map)
11412 sf = *tcctx.cb.decl_map->get (sf);
11413 src = build_simple_mem_ref_loc (loc, sarg);
11414 src = omp_build_component_ref (src, sf);
11415 dst = build_simple_mem_ref_loc (loc, arg);
11416 dst = omp_build_component_ref (dst, f);
11417 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11418 append_to_statement_list (t, &list);
11419 break;
11420 case OMP_CLAUSE_REDUCTION:
11421 case OMP_CLAUSE_IN_REDUCTION:
11422 decl = OMP_CLAUSE_DECL (c);
11423 if (TREE_CODE (decl) == MEM_REF)
11424 {
11425 decl = TREE_OPERAND (decl, 0);
11426 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11427 decl = TREE_OPERAND (decl, 0);
11428 if (TREE_CODE (decl) == INDIRECT_REF
11429 || TREE_CODE (decl) == ADDR_EXPR)
11430 decl = TREE_OPERAND (decl, 0);
11431 }
11432 key = (splay_tree_key) decl;
11433 n = splay_tree_lookup (ctx->field_map, key);
11434 if (n == NULL)
11435 break;
11436 f = (tree) n->value;
11437 if (tcctx.cb.decl_map)
11438 f = *tcctx.cb.decl_map->get (f);
11439 n = splay_tree_lookup (ctx->sfield_map, key);
11440 sf = (tree) n->value;
11441 if (tcctx.cb.decl_map)
11442 sf = *tcctx.cb.decl_map->get (sf);
11443 src = build_simple_mem_ref_loc (loc, sarg);
11444 src = omp_build_component_ref (src, sf);
11445 if (decl != OMP_CLAUSE_DECL (c)
11446 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11447 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11448 src = build_simple_mem_ref_loc (loc, src);
11449 dst = build_simple_mem_ref_loc (loc, arg);
11450 dst = omp_build_component_ref (dst, f);
11451 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11452 append_to_statement_list (t, &list);
11453 break;
11454 case OMP_CLAUSE__LOOPTEMP_:
11455 /* Fields for first two _looptemp_ clauses are initialized by
11456 GOMP_taskloop*, the rest are handled like firstprivate. */
11457 if (looptempno < 2)
11458 {
11459 looptempno++;
11460 break;
11461 }
11462 /* FALLTHRU */
11463 case OMP_CLAUSE__REDUCTEMP_:
11464 case OMP_CLAUSE_FIRSTPRIVATE:
11465 decl = OMP_CLAUSE_DECL (c);
11466 if (is_variable_sized (decl))
11467 break;
11468 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11469 if (n == NULL)
11470 break;
11471 f = (tree) n->value;
11472 if (tcctx.cb.decl_map)
11473 f = *tcctx.cb.decl_map->get (f);
11474 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11475 if (n != NULL)
11476 {
11477 sf = (tree) n->value;
11478 if (tcctx.cb.decl_map)
11479 sf = *tcctx.cb.decl_map->get (sf);
11480 src = build_simple_mem_ref_loc (loc, sarg);
11481 src = omp_build_component_ref (src, sf);
11482 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11483 src = build_simple_mem_ref_loc (loc, src);
11484 }
11485 else
11486 src = decl;
11487 dst = build_simple_mem_ref_loc (loc, arg);
11488 dst = omp_build_component_ref (dst, f);
11489 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11490 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11491 else
11492 {
11493 if (ctx->allocate_map)
11494 if (tree *allocatorp = ctx->allocate_map->get (decl))
11495 {
11496 tree allocator = *allocatorp;
11497 if (TREE_CODE (allocator) != INTEGER_CST)
11498 {
11499 n = splay_tree_lookup (ctx->sfield_map,
11500 (splay_tree_key) allocator);
11501 allocator = (tree) n->value;
11502 if (tcctx.cb.decl_map)
11503 allocator = *tcctx.cb.decl_map->get (allocator);
11504 tree a = build_simple_mem_ref_loc (loc, sarg);
11505 allocator = omp_build_component_ref (a, allocator);
11506 }
11507 allocator = fold_convert (pointer_sized_int_node, allocator);
11508 tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
11509 tree align = build_int_cst (size_type_node,
11510 DECL_ALIGN_UNIT (decl));
11511 tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
11512 tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
11513 allocator);
11514 ptr = fold_convert (TREE_TYPE (dst), ptr);
11515 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
11516 append_to_statement_list (t, &list);
11517 dst = build_simple_mem_ref_loc (loc, dst);
11518 }
11519 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11520 }
11521 append_to_statement_list (t, &list);
11522 break;
11523 case OMP_CLAUSE_PRIVATE:
11524 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11525 break;
11526 decl = OMP_CLAUSE_DECL (c);
11527 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11528 f = (tree) n->value;
11529 if (tcctx.cb.decl_map)
11530 f = *tcctx.cb.decl_map->get (f);
11531 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11532 if (n != NULL)
11533 {
11534 sf = (tree) n->value;
11535 if (tcctx.cb.decl_map)
11536 sf = *tcctx.cb.decl_map->get (sf);
11537 src = build_simple_mem_ref_loc (loc, sarg);
11538 src = omp_build_component_ref (src, sf);
11539 if (use_pointer_for_field (decl, NULL))
11540 src = build_simple_mem_ref_loc (loc, src);
11541 }
11542 else
11543 src = decl;
11544 dst = build_simple_mem_ref_loc (loc, arg);
11545 dst = omp_build_component_ref (dst, f);
11546 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11547 append_to_statement_list (t, &list);
11548 break;
11549 default:
11550 break;
11551 }
11552
11553 /* Last pass: handle VLA firstprivates. */
11554 if (tcctx.cb.decl_map)
11555 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11556 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11557 {
11558 tree ind, ptr, df;
11559
11560 decl = OMP_CLAUSE_DECL (c);
11561 if (!is_variable_sized (decl))
11562 continue;
11563 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11564 if (n == NULL)
11565 continue;
11566 f = (tree) n->value;
11567 f = *tcctx.cb.decl_map->get (f);
11568 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11569 ind = DECL_VALUE_EXPR (decl);
11570 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11571 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11572 n = splay_tree_lookup (ctx->sfield_map,
11573 (splay_tree_key) TREE_OPERAND (ind, 0));
11574 sf = (tree) n->value;
11575 sf = *tcctx.cb.decl_map->get (sf);
11576 src = build_simple_mem_ref_loc (loc, sarg);
11577 src = omp_build_component_ref (src, sf);
11578 src = build_simple_mem_ref_loc (loc, src);
11579 dst = build_simple_mem_ref_loc (loc, arg);
11580 dst = omp_build_component_ref (dst, f);
11581 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11582 append_to_statement_list (t, &list);
11583 n = splay_tree_lookup (ctx->field_map,
11584 (splay_tree_key) TREE_OPERAND (ind, 0));
11585 df = (tree) n->value;
11586 df = *tcctx.cb.decl_map->get (df);
11587 ptr = build_simple_mem_ref_loc (loc, arg);
11588 ptr = omp_build_component_ref (ptr, df);
11589 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11590 build_fold_addr_expr_loc (loc, dst));
11591 append_to_statement_list (t, &list);
11592 }
11593
11594 t = build1 (RETURN_EXPR, void_type_node, NULL);
11595 append_to_statement_list (t, &list);
11596
11597 if (tcctx.cb.decl_map)
11598 delete tcctx.cb.decl_map;
11599 pop_gimplify_context (NULL);
11600 BIND_EXPR_BODY (bind) = list;
11601 pop_cfun ();
11602 }
11603
11604 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)11605 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11606 {
11607 tree c, clauses;
11608 gimple *g;
11609 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11610
11611 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11612 gcc_assert (clauses);
11613 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11614 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11615 switch (OMP_CLAUSE_DEPEND_KIND (c))
11616 {
11617 case OMP_CLAUSE_DEPEND_LAST:
11618 /* Lowering already done at gimplification. */
11619 return;
11620 case OMP_CLAUSE_DEPEND_IN:
11621 cnt[2]++;
11622 break;
11623 case OMP_CLAUSE_DEPEND_OUT:
11624 case OMP_CLAUSE_DEPEND_INOUT:
11625 cnt[0]++;
11626 break;
11627 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11628 cnt[1]++;
11629 break;
11630 case OMP_CLAUSE_DEPEND_DEPOBJ:
11631 cnt[3]++;
11632 break;
11633 case OMP_CLAUSE_DEPEND_SOURCE:
11634 case OMP_CLAUSE_DEPEND_SINK:
11635 /* FALLTHRU */
11636 default:
11637 gcc_unreachable ();
11638 }
11639 if (cnt[1] || cnt[3])
11640 idx = 5;
11641 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11642 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11643 tree array = create_tmp_var (type);
11644 TREE_ADDRESSABLE (array) = 1;
11645 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11646 NULL_TREE);
11647 if (idx == 5)
11648 {
11649 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11650 gimple_seq_add_stmt (iseq, g);
11651 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11652 NULL_TREE);
11653 }
11654 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11655 gimple_seq_add_stmt (iseq, g);
11656 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11657 {
11658 r = build4 (ARRAY_REF, ptr_type_node, array,
11659 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11660 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11661 gimple_seq_add_stmt (iseq, g);
11662 }
11663 for (i = 0; i < 4; i++)
11664 {
11665 if (cnt[i] == 0)
11666 continue;
11667 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11668 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11669 continue;
11670 else
11671 {
11672 switch (OMP_CLAUSE_DEPEND_KIND (c))
11673 {
11674 case OMP_CLAUSE_DEPEND_IN:
11675 if (i != 2)
11676 continue;
11677 break;
11678 case OMP_CLAUSE_DEPEND_OUT:
11679 case OMP_CLAUSE_DEPEND_INOUT:
11680 if (i != 0)
11681 continue;
11682 break;
11683 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11684 if (i != 1)
11685 continue;
11686 break;
11687 case OMP_CLAUSE_DEPEND_DEPOBJ:
11688 if (i != 3)
11689 continue;
11690 break;
11691 default:
11692 gcc_unreachable ();
11693 }
11694 tree t = OMP_CLAUSE_DECL (c);
11695 t = fold_convert (ptr_type_node, t);
11696 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11697 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11698 NULL_TREE, NULL_TREE);
11699 g = gimple_build_assign (r, t);
11700 gimple_seq_add_stmt (iseq, g);
11701 }
11702 }
11703 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11704 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11705 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11706 OMP_CLAUSE_CHAIN (c) = *pclauses;
11707 *pclauses = c;
11708 tree clobber = build_clobber (type);
11709 g = gimple_build_assign (array, clobber);
11710 gimple_seq_add_stmt (oseq, g);
11711 }
11712
11713 /* Lower the OpenMP parallel or task directive in the current statement
11714 in GSI_P. CTX holds context information for the directive. */
11715
11716 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)11717 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11718 {
11719 tree clauses;
11720 tree child_fn, t;
11721 gimple *stmt = gsi_stmt (*gsi_p);
11722 gbind *par_bind, *bind, *dep_bind = NULL;
11723 gimple_seq par_body;
11724 location_t loc = gimple_location (stmt);
11725
11726 clauses = gimple_omp_taskreg_clauses (stmt);
11727 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11728 && gimple_omp_task_taskwait_p (stmt))
11729 {
11730 par_bind = NULL;
11731 par_body = NULL;
11732 }
11733 else
11734 {
11735 par_bind
11736 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11737 par_body = gimple_bind_body (par_bind);
11738 }
11739 child_fn = ctx->cb.dst_fn;
11740 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11741 && !gimple_omp_parallel_combined_p (stmt))
11742 {
11743 struct walk_stmt_info wi;
11744 int ws_num = 0;
11745
11746 memset (&wi, 0, sizeof (wi));
11747 wi.info = &ws_num;
11748 wi.val_only = true;
11749 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11750 if (ws_num == 1)
11751 gimple_omp_parallel_set_combined_p (stmt, true);
11752 }
11753 gimple_seq dep_ilist = NULL;
11754 gimple_seq dep_olist = NULL;
11755 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11756 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11757 {
11758 push_gimplify_context ();
11759 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11760 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11761 &dep_ilist, &dep_olist);
11762 }
11763
11764 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11765 && gimple_omp_task_taskwait_p (stmt))
11766 {
11767 if (dep_bind)
11768 {
11769 gsi_replace (gsi_p, dep_bind, true);
11770 gimple_bind_add_seq (dep_bind, dep_ilist);
11771 gimple_bind_add_stmt (dep_bind, stmt);
11772 gimple_bind_add_seq (dep_bind, dep_olist);
11773 pop_gimplify_context (dep_bind);
11774 }
11775 return;
11776 }
11777
11778 if (ctx->srecord_type)
11779 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11780
11781 gimple_seq tskred_ilist = NULL;
11782 gimple_seq tskred_olist = NULL;
11783 if ((is_task_ctx (ctx)
11784 && gimple_omp_task_taskloop_p (ctx->stmt)
11785 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11786 OMP_CLAUSE_REDUCTION))
11787 || (is_parallel_ctx (ctx)
11788 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11789 OMP_CLAUSE__REDUCTEMP_)))
11790 {
11791 if (dep_bind == NULL)
11792 {
11793 push_gimplify_context ();
11794 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11795 }
11796 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11797 : OMP_PARALLEL,
11798 gimple_omp_taskreg_clauses (ctx->stmt),
11799 &tskred_ilist, &tskred_olist);
11800 }
11801
11802 push_gimplify_context ();
11803
11804 gimple_seq par_olist = NULL;
11805 gimple_seq par_ilist = NULL;
11806 gimple_seq par_rlist = NULL;
11807 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11808 lower_omp (&par_body, ctx);
11809 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11810 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11811
11812 /* Declare all the variables created by mapping and the variables
11813 declared in the scope of the parallel body. */
11814 record_vars_into (ctx->block_vars, child_fn);
11815 maybe_remove_omp_member_access_dummy_vars (par_bind);
11816 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11817
11818 if (ctx->record_type)
11819 {
11820 ctx->sender_decl
11821 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11822 : ctx->record_type, ".omp_data_o");
11823 DECL_NAMELESS (ctx->sender_decl) = 1;
11824 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11825 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11826 }
11827
11828 gimple_seq olist = NULL;
11829 gimple_seq ilist = NULL;
11830 lower_send_clauses (clauses, &ilist, &olist, ctx);
11831 lower_send_shared_vars (&ilist, &olist, ctx);
11832
11833 if (ctx->record_type)
11834 {
11835 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11836 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11837 clobber));
11838 }
11839
11840 /* Once all the expansions are done, sequence all the different
11841 fragments inside gimple_omp_body. */
11842
11843 gimple_seq new_body = NULL;
11844
11845 if (ctx->record_type)
11846 {
11847 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11848 /* fixup_child_record_type might have changed receiver_decl's type. */
11849 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11850 gimple_seq_add_stmt (&new_body,
11851 gimple_build_assign (ctx->receiver_decl, t));
11852 }
11853
11854 gimple_seq_add_seq (&new_body, par_ilist);
11855 gimple_seq_add_seq (&new_body, par_body);
11856 gimple_seq_add_seq (&new_body, par_rlist);
11857 if (ctx->cancellable)
11858 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11859 gimple_seq_add_seq (&new_body, par_olist);
11860 new_body = maybe_catch_exception (new_body);
11861 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11862 gimple_seq_add_stmt (&new_body,
11863 gimple_build_omp_continue (integer_zero_node,
11864 integer_zero_node));
11865 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11866 gimple_omp_set_body (stmt, new_body);
11867
11868 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11869 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11870 else
11871 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11872 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11873 gimple_bind_add_seq (bind, ilist);
11874 gimple_bind_add_stmt (bind, stmt);
11875 gimple_bind_add_seq (bind, olist);
11876
11877 pop_gimplify_context (NULL);
11878
11879 if (dep_bind)
11880 {
11881 gimple_bind_add_seq (dep_bind, dep_ilist);
11882 gimple_bind_add_seq (dep_bind, tskred_ilist);
11883 gimple_bind_add_stmt (dep_bind, bind);
11884 gimple_bind_add_seq (dep_bind, tskred_olist);
11885 gimple_bind_add_seq (dep_bind, dep_olist);
11886 pop_gimplify_context (dep_bind);
11887 }
11888 }
11889
11890 /* Lower the GIMPLE_OMP_TARGET in the current statement
11891 in GSI_P. CTX holds context information for the directive. */
11892
11893 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)11894 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11895 {
11896 tree clauses;
11897 tree child_fn, t, c;
11898 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11899 gbind *tgt_bind, *bind, *dep_bind = NULL;
11900 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11901 location_t loc = gimple_location (stmt);
11902 bool offloaded, data_region;
11903 unsigned int map_cnt = 0;
11904
11905 offloaded = is_gimple_omp_offloaded (stmt);
11906 switch (gimple_omp_target_kind (stmt))
11907 {
11908 case GF_OMP_TARGET_KIND_REGION:
11909 case GF_OMP_TARGET_KIND_UPDATE:
11910 case GF_OMP_TARGET_KIND_ENTER_DATA:
11911 case GF_OMP_TARGET_KIND_EXIT_DATA:
11912 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11913 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11914 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11915 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11916 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11917 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11918 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
11919 case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
11920 data_region = false;
11921 break;
11922 case GF_OMP_TARGET_KIND_DATA:
11923 case GF_OMP_TARGET_KIND_OACC_DATA:
11924 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11925 case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
11926 data_region = true;
11927 break;
11928 default:
11929 gcc_unreachable ();
11930 }
11931
11932 clauses = gimple_omp_target_clauses (stmt);
11933
11934 gimple_seq dep_ilist = NULL;
11935 gimple_seq dep_olist = NULL;
11936 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11937 {
11938 push_gimplify_context ();
11939 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11940 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11941 &dep_ilist, &dep_olist);
11942 }
11943
11944 tgt_bind = NULL;
11945 tgt_body = NULL;
11946 if (offloaded)
11947 {
11948 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11949 tgt_body = gimple_bind_body (tgt_bind);
11950 }
11951 else if (data_region)
11952 tgt_body = gimple_omp_body (stmt);
11953 child_fn = ctx->cb.dst_fn;
11954
11955 push_gimplify_context ();
11956 fplist = NULL;
11957
11958 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11959 switch (OMP_CLAUSE_CODE (c))
11960 {
11961 tree var, x;
11962
11963 default:
11964 break;
11965 case OMP_CLAUSE_MAP:
11966 #if CHECKING_P
11967 /* First check what we're prepared to handle in the following. */
11968 switch (OMP_CLAUSE_MAP_KIND (c))
11969 {
11970 case GOMP_MAP_ALLOC:
11971 case GOMP_MAP_TO:
11972 case GOMP_MAP_FROM:
11973 case GOMP_MAP_TOFROM:
11974 case GOMP_MAP_POINTER:
11975 case GOMP_MAP_TO_PSET:
11976 case GOMP_MAP_DELETE:
11977 case GOMP_MAP_RELEASE:
11978 case GOMP_MAP_ALWAYS_TO:
11979 case GOMP_MAP_ALWAYS_FROM:
11980 case GOMP_MAP_ALWAYS_TOFROM:
11981 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11982 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11983 case GOMP_MAP_STRUCT:
11984 case GOMP_MAP_ALWAYS_POINTER:
11985 case GOMP_MAP_ATTACH:
11986 case GOMP_MAP_DETACH:
11987 break;
11988 case GOMP_MAP_IF_PRESENT:
11989 case GOMP_MAP_FORCE_ALLOC:
11990 case GOMP_MAP_FORCE_TO:
11991 case GOMP_MAP_FORCE_FROM:
11992 case GOMP_MAP_FORCE_TOFROM:
11993 case GOMP_MAP_FORCE_PRESENT:
11994 case GOMP_MAP_FORCE_DEVICEPTR:
11995 case GOMP_MAP_DEVICE_RESIDENT:
11996 case GOMP_MAP_LINK:
11997 case GOMP_MAP_FORCE_DETACH:
11998 gcc_assert (is_gimple_omp_oacc (stmt));
11999 break;
12000 default:
12001 gcc_unreachable ();
12002 }
12003 #endif
12004 /* FALLTHRU */
12005 case OMP_CLAUSE_TO:
12006 case OMP_CLAUSE_FROM:
12007 oacc_firstprivate:
12008 var = OMP_CLAUSE_DECL (c);
12009 if (!DECL_P (var))
12010 {
12011 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12012 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12013 && (OMP_CLAUSE_MAP_KIND (c)
12014 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
12015 map_cnt++;
12016 continue;
12017 }
12018
12019 if (DECL_SIZE (var)
12020 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12021 {
12022 tree var2 = DECL_VALUE_EXPR (var);
12023 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12024 var2 = TREE_OPERAND (var2, 0);
12025 gcc_assert (DECL_P (var2));
12026 var = var2;
12027 }
12028
12029 if (offloaded
12030 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12031 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12032 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12033 {
12034 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12035 {
12036 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12037 && varpool_node::get_create (var)->offloadable)
12038 continue;
12039
12040 tree type = build_pointer_type (TREE_TYPE (var));
12041 tree new_var = lookup_decl (var, ctx);
12042 x = create_tmp_var_raw (type, get_name (new_var));
12043 gimple_add_tmp_var (x);
12044 x = build_simple_mem_ref (x);
12045 SET_DECL_VALUE_EXPR (new_var, x);
12046 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12047 }
12048 continue;
12049 }
12050
12051 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12052 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12053 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12054 && is_omp_target (stmt))
12055 {
12056 gcc_assert (maybe_lookup_field (c, ctx));
12057 map_cnt++;
12058 continue;
12059 }
12060
12061 if (!maybe_lookup_field (var, ctx))
12062 continue;
12063
12064 /* Don't remap compute constructs' reduction variables, because the
12065 intermediate result must be local to each gang. */
12066 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12067 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12068 {
12069 x = build_receiver_ref (var, true, ctx);
12070 tree new_var = lookup_decl (var, ctx);
12071
12072 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12073 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12074 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12075 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12076 x = build_simple_mem_ref (x);
12077 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12078 {
12079 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12080 if (omp_is_reference (new_var)
12081 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12082 || DECL_BY_REFERENCE (var)))
12083 {
12084 /* Create a local object to hold the instance
12085 value. */
12086 tree type = TREE_TYPE (TREE_TYPE (new_var));
12087 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12088 tree inst = create_tmp_var (type, id);
12089 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12090 x = build_fold_addr_expr (inst);
12091 }
12092 gimplify_assign (new_var, x, &fplist);
12093 }
12094 else if (DECL_P (new_var))
12095 {
12096 SET_DECL_VALUE_EXPR (new_var, x);
12097 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12098 }
12099 else
12100 gcc_unreachable ();
12101 }
12102 map_cnt++;
12103 break;
12104
12105 case OMP_CLAUSE_FIRSTPRIVATE:
12106 gcc_checking_assert (offloaded);
12107 if (is_gimple_omp_oacc (ctx->stmt))
12108 {
12109 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
12110 gcc_checking_assert (!is_oacc_kernels (ctx));
12111 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12112 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12113
12114 goto oacc_firstprivate;
12115 }
12116 map_cnt++;
12117 var = OMP_CLAUSE_DECL (c);
12118 if (!omp_is_reference (var)
12119 && !is_gimple_reg_type (TREE_TYPE (var)))
12120 {
12121 tree new_var = lookup_decl (var, ctx);
12122 if (is_variable_sized (var))
12123 {
12124 tree pvar = DECL_VALUE_EXPR (var);
12125 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12126 pvar = TREE_OPERAND (pvar, 0);
12127 gcc_assert (DECL_P (pvar));
12128 tree new_pvar = lookup_decl (pvar, ctx);
12129 x = build_fold_indirect_ref (new_pvar);
12130 TREE_THIS_NOTRAP (x) = 1;
12131 }
12132 else
12133 x = build_receiver_ref (var, true, ctx);
12134 SET_DECL_VALUE_EXPR (new_var, x);
12135 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12136 }
12137 break;
12138
12139 case OMP_CLAUSE_PRIVATE:
12140 gcc_checking_assert (offloaded);
12141 if (is_gimple_omp_oacc (ctx->stmt))
12142 {
12143 /* No 'private' clauses on OpenACC 'kernels'. */
12144 gcc_checking_assert (!is_oacc_kernels (ctx));
12145 /* Likewise, on OpenACC 'kernels' decomposed parts. */
12146 gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12147
12148 break;
12149 }
12150 var = OMP_CLAUSE_DECL (c);
12151 if (is_variable_sized (var))
12152 {
12153 tree new_var = lookup_decl (var, ctx);
12154 tree pvar = DECL_VALUE_EXPR (var);
12155 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12156 pvar = TREE_OPERAND (pvar, 0);
12157 gcc_assert (DECL_P (pvar));
12158 tree new_pvar = lookup_decl (pvar, ctx);
12159 x = build_fold_indirect_ref (new_pvar);
12160 TREE_THIS_NOTRAP (x) = 1;
12161 SET_DECL_VALUE_EXPR (new_var, x);
12162 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12163 }
12164 break;
12165
12166 case OMP_CLAUSE_USE_DEVICE_PTR:
12167 case OMP_CLAUSE_USE_DEVICE_ADDR:
12168 case OMP_CLAUSE_IS_DEVICE_PTR:
12169 var = OMP_CLAUSE_DECL (c);
12170 map_cnt++;
12171 if (is_variable_sized (var))
12172 {
12173 tree new_var = lookup_decl (var, ctx);
12174 tree pvar = DECL_VALUE_EXPR (var);
12175 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12176 pvar = TREE_OPERAND (pvar, 0);
12177 gcc_assert (DECL_P (pvar));
12178 tree new_pvar = lookup_decl (pvar, ctx);
12179 x = build_fold_indirect_ref (new_pvar);
12180 TREE_THIS_NOTRAP (x) = 1;
12181 SET_DECL_VALUE_EXPR (new_var, x);
12182 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12183 }
12184 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12185 && !omp_is_reference (var)
12186 && !omp_is_allocatable_or_ptr (var)
12187 && !lang_hooks.decls.omp_array_data (var, true))
12188 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12189 {
12190 tree new_var = lookup_decl (var, ctx);
12191 tree type = build_pointer_type (TREE_TYPE (var));
12192 x = create_tmp_var_raw (type, get_name (new_var));
12193 gimple_add_tmp_var (x);
12194 x = build_simple_mem_ref (x);
12195 SET_DECL_VALUE_EXPR (new_var, x);
12196 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12197 }
12198 else
12199 {
12200 tree new_var = lookup_decl (var, ctx);
12201 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12202 gimple_add_tmp_var (x);
12203 SET_DECL_VALUE_EXPR (new_var, x);
12204 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12205 }
12206 break;
12207 }
12208
12209 if (offloaded)
12210 {
12211 target_nesting_level++;
12212 lower_omp (&tgt_body, ctx);
12213 target_nesting_level--;
12214 }
12215 else if (data_region)
12216 lower_omp (&tgt_body, ctx);
12217
12218 if (offloaded)
12219 {
12220 /* Declare all the variables created by mapping and the variables
12221 declared in the scope of the target body. */
12222 record_vars_into (ctx->block_vars, child_fn);
12223 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12224 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12225 }
12226
12227 olist = NULL;
12228 ilist = NULL;
12229 if (ctx->record_type)
12230 {
12231 ctx->sender_decl
12232 = create_tmp_var (ctx->record_type, ".omp_data_arr");
12233 DECL_NAMELESS (ctx->sender_decl) = 1;
12234 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12235 t = make_tree_vec (3);
12236 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12237 TREE_VEC_ELT (t, 1)
12238 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12239 ".omp_data_sizes");
12240 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12241 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12242 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12243 tree tkind_type = short_unsigned_type_node;
12244 int talign_shift = 8;
12245 TREE_VEC_ELT (t, 2)
12246 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12247 ".omp_data_kinds");
12248 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12249 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12250 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12251 gimple_omp_target_set_data_arg (stmt, t);
12252
12253 vec<constructor_elt, va_gc> *vsize;
12254 vec<constructor_elt, va_gc> *vkind;
12255 vec_alloc (vsize, map_cnt);
12256 vec_alloc (vkind, map_cnt);
12257 unsigned int map_idx = 0;
12258
12259 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12260 switch (OMP_CLAUSE_CODE (c))
12261 {
12262 tree ovar, nc, s, purpose, var, x, type;
12263 unsigned int talign;
12264
12265 default:
12266 break;
12267
12268 case OMP_CLAUSE_MAP:
12269 case OMP_CLAUSE_TO:
12270 case OMP_CLAUSE_FROM:
12271 oacc_firstprivate_map:
12272 nc = c;
12273 ovar = OMP_CLAUSE_DECL (c);
12274 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12275 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12276 || (OMP_CLAUSE_MAP_KIND (c)
12277 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12278 break;
12279 if (!DECL_P (ovar))
12280 {
12281 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12282 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12283 {
12284 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
12285 == get_base_address (ovar));
12286 nc = OMP_CLAUSE_CHAIN (c);
12287 ovar = OMP_CLAUSE_DECL (nc);
12288 }
12289 else
12290 {
12291 tree x = build_sender_ref (ovar, ctx);
12292 tree v
12293 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
12294 gimplify_assign (x, v, &ilist);
12295 nc = NULL_TREE;
12296 }
12297 }
12298 else
12299 {
12300 if (DECL_SIZE (ovar)
12301 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12302 {
12303 tree ovar2 = DECL_VALUE_EXPR (ovar);
12304 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12305 ovar2 = TREE_OPERAND (ovar2, 0);
12306 gcc_assert (DECL_P (ovar2));
12307 ovar = ovar2;
12308 }
12309 if (!maybe_lookup_field (ovar, ctx)
12310 && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12311 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12312 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12313 continue;
12314 }
12315
12316 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12317 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
12318 talign = DECL_ALIGN_UNIT (ovar);
12319
12320 if (nc
12321 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12322 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12323 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12324 && is_omp_target (stmt))
12325 {
12326 var = lookup_decl_in_outer_ctx (ovar, ctx);
12327 x = build_sender_ref (c, ctx);
12328 gimplify_assign (x, build_fold_addr_expr (var), &ilist);
12329 }
12330 else if (nc)
12331 {
12332 var = lookup_decl_in_outer_ctx (ovar, ctx);
12333 x = build_sender_ref (ovar, ctx);
12334
12335 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12336 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12337 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12338 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
12339 {
12340 gcc_assert (offloaded);
12341 tree avar
12342 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
12343 mark_addressable (avar);
12344 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
12345 talign = DECL_ALIGN_UNIT (avar);
12346 avar = build_fold_addr_expr (avar);
12347 gimplify_assign (x, avar, &ilist);
12348 }
12349 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12350 {
12351 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12352 if (!omp_is_reference (var))
12353 {
12354 if (is_gimple_reg (var)
12355 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12356 TREE_NO_WARNING (var) = 1;
12357 var = build_fold_addr_expr (var);
12358 }
12359 else
12360 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12361 gimplify_assign (x, var, &ilist);
12362 }
12363 else if (is_gimple_reg (var))
12364 {
12365 gcc_assert (offloaded);
12366 tree avar = create_tmp_var (TREE_TYPE (var));
12367 mark_addressable (avar);
12368 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
12369 if (GOMP_MAP_COPY_TO_P (map_kind)
12370 || map_kind == GOMP_MAP_POINTER
12371 || map_kind == GOMP_MAP_TO_PSET
12372 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12373 {
12374 /* If we need to initialize a temporary
12375 with VAR because it is not addressable, and
12376 the variable hasn't been initialized yet, then
12377 we'll get a warning for the store to avar.
12378 Don't warn in that case, the mapping might
12379 be implicit. */
12380 TREE_NO_WARNING (var) = 1;
12381 gimplify_assign (avar, var, &ilist);
12382 }
12383 avar = build_fold_addr_expr (avar);
12384 gimplify_assign (x, avar, &ilist);
12385 if ((GOMP_MAP_COPY_FROM_P (map_kind)
12386 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
12387 && !TYPE_READONLY (TREE_TYPE (var)))
12388 {
12389 x = unshare_expr (x);
12390 x = build_simple_mem_ref (x);
12391 gimplify_assign (var, x, &olist);
12392 }
12393 }
12394 else
12395 {
12396 /* While MAP is handled explicitly by the FE,
12397 for 'target update', only the identified is passed. */
12398 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
12399 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
12400 && (omp_is_allocatable_or_ptr (var)
12401 && omp_check_optional_argument (var, false)))
12402 var = build_fold_indirect_ref (var);
12403 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
12404 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
12405 || (!omp_is_allocatable_or_ptr (var)
12406 && !omp_check_optional_argument (var, false)))
12407 var = build_fold_addr_expr (var);
12408 gimplify_assign (x, var, &ilist);
12409 }
12410 }
12411 s = NULL_TREE;
12412 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12413 {
12414 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12415 s = TREE_TYPE (ovar);
12416 if (TREE_CODE (s) == REFERENCE_TYPE
12417 || omp_check_optional_argument (ovar, false))
12418 s = TREE_TYPE (s);
12419 s = TYPE_SIZE_UNIT (s);
12420 }
12421 else
12422 s = OMP_CLAUSE_SIZE (c);
12423 if (s == NULL_TREE)
12424 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12425 s = fold_convert (size_type_node, s);
12426 purpose = size_int (map_idx++);
12427 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12428 if (TREE_CODE (s) != INTEGER_CST)
12429 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12430
12431 unsigned HOST_WIDE_INT tkind, tkind_zero;
12432 switch (OMP_CLAUSE_CODE (c))
12433 {
12434 case OMP_CLAUSE_MAP:
12435 tkind = OMP_CLAUSE_MAP_KIND (c);
12436 tkind_zero = tkind;
12437 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
12438 switch (tkind)
12439 {
12440 case GOMP_MAP_ALLOC:
12441 case GOMP_MAP_IF_PRESENT:
12442 case GOMP_MAP_TO:
12443 case GOMP_MAP_FROM:
12444 case GOMP_MAP_TOFROM:
12445 case GOMP_MAP_ALWAYS_TO:
12446 case GOMP_MAP_ALWAYS_FROM:
12447 case GOMP_MAP_ALWAYS_TOFROM:
12448 case GOMP_MAP_RELEASE:
12449 case GOMP_MAP_FORCE_TO:
12450 case GOMP_MAP_FORCE_FROM:
12451 case GOMP_MAP_FORCE_TOFROM:
12452 case GOMP_MAP_FORCE_PRESENT:
12453 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12454 break;
12455 case GOMP_MAP_DELETE:
12456 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12457 default:
12458 break;
12459 }
12460 if (tkind_zero != tkind)
12461 {
12462 if (integer_zerop (s))
12463 tkind = tkind_zero;
12464 else if (integer_nonzerop (s))
12465 tkind_zero = tkind;
12466 }
12467 break;
12468 case OMP_CLAUSE_FIRSTPRIVATE:
12469 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12470 tkind = GOMP_MAP_TO;
12471 tkind_zero = tkind;
12472 break;
12473 case OMP_CLAUSE_TO:
12474 tkind = GOMP_MAP_TO;
12475 tkind_zero = tkind;
12476 break;
12477 case OMP_CLAUSE_FROM:
12478 tkind = GOMP_MAP_FROM;
12479 tkind_zero = tkind;
12480 break;
12481 default:
12482 gcc_unreachable ();
12483 }
12484 gcc_checking_assert (tkind
12485 < (HOST_WIDE_INT_C (1U) << talign_shift));
12486 gcc_checking_assert (tkind_zero
12487 < (HOST_WIDE_INT_C (1U) << talign_shift));
12488 talign = ceil_log2 (talign);
12489 tkind |= talign << talign_shift;
12490 tkind_zero |= talign << talign_shift;
12491 gcc_checking_assert (tkind
12492 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12493 gcc_checking_assert (tkind_zero
12494 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12495 if (tkind == tkind_zero)
12496 x = build_int_cstu (tkind_type, tkind);
12497 else
12498 {
12499 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12500 x = build3 (COND_EXPR, tkind_type,
12501 fold_build2 (EQ_EXPR, boolean_type_node,
12502 unshare_expr (s), size_zero_node),
12503 build_int_cstu (tkind_type, tkind_zero),
12504 build_int_cstu (tkind_type, tkind));
12505 }
12506 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12507 if (nc && nc != c)
12508 c = nc;
12509 break;
12510
12511 case OMP_CLAUSE_FIRSTPRIVATE:
12512 if (is_gimple_omp_oacc (ctx->stmt))
12513 goto oacc_firstprivate_map;
12514 ovar = OMP_CLAUSE_DECL (c);
12515 if (omp_is_reference (ovar))
12516 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12517 else
12518 talign = DECL_ALIGN_UNIT (ovar);
12519 var = lookup_decl_in_outer_ctx (ovar, ctx);
12520 x = build_sender_ref (ovar, ctx);
12521 tkind = GOMP_MAP_FIRSTPRIVATE;
12522 type = TREE_TYPE (ovar);
12523 if (omp_is_reference (ovar))
12524 type = TREE_TYPE (type);
12525 if ((INTEGRAL_TYPE_P (type)
12526 && TYPE_PRECISION (type) <= POINTER_SIZE)
12527 || TREE_CODE (type) == POINTER_TYPE)
12528 {
12529 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12530 tree t = var;
12531 if (omp_is_reference (var))
12532 t = build_simple_mem_ref (var);
12533 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12534 TREE_NO_WARNING (var) = 1;
12535 if (TREE_CODE (type) != POINTER_TYPE)
12536 t = fold_convert (pointer_sized_int_node, t);
12537 t = fold_convert (TREE_TYPE (x), t);
12538 gimplify_assign (x, t, &ilist);
12539 }
12540 else if (omp_is_reference (var))
12541 gimplify_assign (x, var, &ilist);
12542 else if (is_gimple_reg (var))
12543 {
12544 tree avar = create_tmp_var (TREE_TYPE (var));
12545 mark_addressable (avar);
12546 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12547 TREE_NO_WARNING (var) = 1;
12548 gimplify_assign (avar, var, &ilist);
12549 avar = build_fold_addr_expr (avar);
12550 gimplify_assign (x, avar, &ilist);
12551 }
12552 else
12553 {
12554 var = build_fold_addr_expr (var);
12555 gimplify_assign (x, var, &ilist);
12556 }
12557 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12558 s = size_int (0);
12559 else if (omp_is_reference (ovar))
12560 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12561 else
12562 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12563 s = fold_convert (size_type_node, s);
12564 purpose = size_int (map_idx++);
12565 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12566 if (TREE_CODE (s) != INTEGER_CST)
12567 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12568
12569 gcc_checking_assert (tkind
12570 < (HOST_WIDE_INT_C (1U) << talign_shift));
12571 talign = ceil_log2 (talign);
12572 tkind |= talign << talign_shift;
12573 gcc_checking_assert (tkind
12574 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12575 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12576 build_int_cstu (tkind_type, tkind));
12577 break;
12578
12579 case OMP_CLAUSE_USE_DEVICE_PTR:
12580 case OMP_CLAUSE_USE_DEVICE_ADDR:
12581 case OMP_CLAUSE_IS_DEVICE_PTR:
12582 ovar = OMP_CLAUSE_DECL (c);
12583 var = lookup_decl_in_outer_ctx (ovar, ctx);
12584
12585 if (lang_hooks.decls.omp_array_data (ovar, true))
12586 {
12587 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12588 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12589 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12590 }
12591 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12592 {
12593 tkind = GOMP_MAP_USE_DEVICE_PTR;
12594 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12595 }
12596 else
12597 {
12598 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12599 x = build_sender_ref (ovar, ctx);
12600 }
12601
12602 if (is_gimple_omp_oacc (ctx->stmt))
12603 {
12604 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12605
12606 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12607 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12608 }
12609
12610 type = TREE_TYPE (ovar);
12611 if (lang_hooks.decls.omp_array_data (ovar, true))
12612 var = lang_hooks.decls.omp_array_data (ovar, false);
12613 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12614 && !omp_is_reference (ovar)
12615 && !omp_is_allocatable_or_ptr (ovar))
12616 || TREE_CODE (type) == ARRAY_TYPE)
12617 var = build_fold_addr_expr (var);
12618 else
12619 {
12620 if (omp_is_reference (ovar)
12621 || omp_check_optional_argument (ovar, false)
12622 || omp_is_allocatable_or_ptr (ovar))
12623 {
12624 type = TREE_TYPE (type);
12625 if (POINTER_TYPE_P (type)
12626 && TREE_CODE (type) != ARRAY_TYPE
12627 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12628 && !omp_is_allocatable_or_ptr (ovar))
12629 || (omp_is_reference (ovar)
12630 && omp_is_allocatable_or_ptr (ovar))))
12631 var = build_simple_mem_ref (var);
12632 var = fold_convert (TREE_TYPE (x), var);
12633 }
12634 }
12635 tree present;
12636 present = omp_check_optional_argument (ovar, true);
12637 if (present)
12638 {
12639 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12640 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12641 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12642 tree new_x = unshare_expr (x);
12643 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12644 fb_rvalue);
12645 gcond *cond = gimple_build_cond_from_tree (present,
12646 notnull_label,
12647 null_label);
12648 gimple_seq_add_stmt (&ilist, cond);
12649 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12650 gimplify_assign (new_x, null_pointer_node, &ilist);
12651 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12652 gimple_seq_add_stmt (&ilist,
12653 gimple_build_label (notnull_label));
12654 gimplify_assign (x, var, &ilist);
12655 gimple_seq_add_stmt (&ilist,
12656 gimple_build_label (opt_arg_label));
12657 }
12658 else
12659 gimplify_assign (x, var, &ilist);
12660 s = size_int (0);
12661 purpose = size_int (map_idx++);
12662 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12663 gcc_checking_assert (tkind
12664 < (HOST_WIDE_INT_C (1U) << talign_shift));
12665 gcc_checking_assert (tkind
12666 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12667 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12668 build_int_cstu (tkind_type, tkind));
12669 break;
12670 }
12671
12672 gcc_assert (map_idx == map_cnt);
12673
12674 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12675 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12676 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12677 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12678 for (int i = 1; i <= 2; i++)
12679 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12680 {
12681 gimple_seq initlist = NULL;
12682 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12683 TREE_VEC_ELT (t, i)),
12684 &initlist, true, NULL_TREE);
12685 gimple_seq_add_seq (&ilist, initlist);
12686
12687 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12688 gimple_seq_add_stmt (&olist,
12689 gimple_build_assign (TREE_VEC_ELT (t, i),
12690 clobber));
12691 }
12692
12693 tree clobber = build_clobber (ctx->record_type);
12694 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12695 clobber));
12696 }
12697
12698 /* Once all the expansions are done, sequence all the different
12699 fragments inside gimple_omp_body. */
12700
12701 new_body = NULL;
12702
12703 if (offloaded
12704 && ctx->record_type)
12705 {
12706 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12707 /* fixup_child_record_type might have changed receiver_decl's type. */
12708 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12709 gimple_seq_add_stmt (&new_body,
12710 gimple_build_assign (ctx->receiver_decl, t));
12711 }
12712 gimple_seq_add_seq (&new_body, fplist);
12713
12714 if (offloaded || data_region)
12715 {
12716 tree prev = NULL_TREE;
12717 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12718 switch (OMP_CLAUSE_CODE (c))
12719 {
12720 tree var, x;
12721 default:
12722 break;
12723 case OMP_CLAUSE_FIRSTPRIVATE:
12724 if (is_gimple_omp_oacc (ctx->stmt))
12725 break;
12726 var = OMP_CLAUSE_DECL (c);
12727 if (omp_is_reference (var)
12728 || is_gimple_reg_type (TREE_TYPE (var)))
12729 {
12730 tree new_var = lookup_decl (var, ctx);
12731 tree type;
12732 type = TREE_TYPE (var);
12733 if (omp_is_reference (var))
12734 type = TREE_TYPE (type);
12735 if ((INTEGRAL_TYPE_P (type)
12736 && TYPE_PRECISION (type) <= POINTER_SIZE)
12737 || TREE_CODE (type) == POINTER_TYPE)
12738 {
12739 x = build_receiver_ref (var, false, ctx);
12740 if (TREE_CODE (type) != POINTER_TYPE)
12741 x = fold_convert (pointer_sized_int_node, x);
12742 x = fold_convert (type, x);
12743 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12744 fb_rvalue);
12745 if (omp_is_reference (var))
12746 {
12747 tree v = create_tmp_var_raw (type, get_name (var));
12748 gimple_add_tmp_var (v);
12749 TREE_ADDRESSABLE (v) = 1;
12750 gimple_seq_add_stmt (&new_body,
12751 gimple_build_assign (v, x));
12752 x = build_fold_addr_expr (v);
12753 }
12754 gimple_seq_add_stmt (&new_body,
12755 gimple_build_assign (new_var, x));
12756 }
12757 else
12758 {
12759 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12760 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12761 fb_rvalue);
12762 gimple_seq_add_stmt (&new_body,
12763 gimple_build_assign (new_var, x));
12764 }
12765 }
12766 else if (is_variable_sized (var))
12767 {
12768 tree pvar = DECL_VALUE_EXPR (var);
12769 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12770 pvar = TREE_OPERAND (pvar, 0);
12771 gcc_assert (DECL_P (pvar));
12772 tree new_var = lookup_decl (pvar, ctx);
12773 x = build_receiver_ref (var, false, ctx);
12774 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12775 gimple_seq_add_stmt (&new_body,
12776 gimple_build_assign (new_var, x));
12777 }
12778 break;
12779 case OMP_CLAUSE_PRIVATE:
12780 if (is_gimple_omp_oacc (ctx->stmt))
12781 break;
12782 var = OMP_CLAUSE_DECL (c);
12783 if (omp_is_reference (var))
12784 {
12785 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12786 tree new_var = lookup_decl (var, ctx);
12787 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12788 if (TREE_CONSTANT (x))
12789 {
12790 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12791 get_name (var));
12792 gimple_add_tmp_var (x);
12793 TREE_ADDRESSABLE (x) = 1;
12794 x = build_fold_addr_expr_loc (clause_loc, x);
12795 }
12796 else
12797 break;
12798
12799 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12800 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12801 gimple_seq_add_stmt (&new_body,
12802 gimple_build_assign (new_var, x));
12803 }
12804 break;
12805 case OMP_CLAUSE_USE_DEVICE_PTR:
12806 case OMP_CLAUSE_USE_DEVICE_ADDR:
12807 case OMP_CLAUSE_IS_DEVICE_PTR:
12808 tree new_var;
12809 gimple_seq assign_body;
12810 bool is_array_data;
12811 bool do_optional_check;
12812 assign_body = NULL;
12813 do_optional_check = false;
12814 var = OMP_CLAUSE_DECL (c);
12815 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12816
12817 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12818 x = build_sender_ref (is_array_data
12819 ? (splay_tree_key) &DECL_NAME (var)
12820 : (splay_tree_key) &DECL_UID (var), ctx);
12821 else
12822 x = build_receiver_ref (var, false, ctx);
12823
12824 if (is_array_data)
12825 {
12826 bool is_ref = omp_is_reference (var);
12827 do_optional_check = true;
12828 /* First, we copy the descriptor data from the host; then
12829 we update its data to point to the target address. */
12830 new_var = lookup_decl (var, ctx);
12831 new_var = DECL_VALUE_EXPR (new_var);
12832 tree v = new_var;
12833
12834 if (is_ref)
12835 {
12836 var = build_fold_indirect_ref (var);
12837 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12838 fb_rvalue);
12839 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12840 gimple_add_tmp_var (v);
12841 TREE_ADDRESSABLE (v) = 1;
12842 gimple_seq_add_stmt (&assign_body,
12843 gimple_build_assign (v, var));
12844 tree rhs = build_fold_addr_expr (v);
12845 gimple_seq_add_stmt (&assign_body,
12846 gimple_build_assign (new_var, rhs));
12847 }
12848 else
12849 gimple_seq_add_stmt (&assign_body,
12850 gimple_build_assign (new_var, var));
12851
12852 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12853 gcc_assert (v2);
12854 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12855 gimple_seq_add_stmt (&assign_body,
12856 gimple_build_assign (v2, x));
12857 }
12858 else if (is_variable_sized (var))
12859 {
12860 tree pvar = DECL_VALUE_EXPR (var);
12861 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12862 pvar = TREE_OPERAND (pvar, 0);
12863 gcc_assert (DECL_P (pvar));
12864 new_var = lookup_decl (pvar, ctx);
12865 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12866 gimple_seq_add_stmt (&assign_body,
12867 gimple_build_assign (new_var, x));
12868 }
12869 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12870 && !omp_is_reference (var)
12871 && !omp_is_allocatable_or_ptr (var))
12872 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12873 {
12874 new_var = lookup_decl (var, ctx);
12875 new_var = DECL_VALUE_EXPR (new_var);
12876 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12877 new_var = TREE_OPERAND (new_var, 0);
12878 gcc_assert (DECL_P (new_var));
12879 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12880 gimple_seq_add_stmt (&assign_body,
12881 gimple_build_assign (new_var, x));
12882 }
12883 else
12884 {
12885 tree type = TREE_TYPE (var);
12886 new_var = lookup_decl (var, ctx);
12887 if (omp_is_reference (var))
12888 {
12889 type = TREE_TYPE (type);
12890 if (POINTER_TYPE_P (type)
12891 && TREE_CODE (type) != ARRAY_TYPE
12892 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12893 || (omp_is_reference (var)
12894 && omp_is_allocatable_or_ptr (var))))
12895 {
12896 tree v = create_tmp_var_raw (type, get_name (var));
12897 gimple_add_tmp_var (v);
12898 TREE_ADDRESSABLE (v) = 1;
12899 x = fold_convert (type, x);
12900 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12901 fb_rvalue);
12902 gimple_seq_add_stmt (&assign_body,
12903 gimple_build_assign (v, x));
12904 x = build_fold_addr_expr (v);
12905 do_optional_check = true;
12906 }
12907 }
12908 new_var = DECL_VALUE_EXPR (new_var);
12909 x = fold_convert (TREE_TYPE (new_var), x);
12910 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12911 gimple_seq_add_stmt (&assign_body,
12912 gimple_build_assign (new_var, x));
12913 }
12914 tree present;
12915 present = (do_optional_check
12916 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12917 : NULL_TREE);
12918 if (present)
12919 {
12920 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12921 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12922 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12923 glabel *null_glabel = gimple_build_label (null_label);
12924 glabel *notnull_glabel = gimple_build_label (notnull_label);
12925 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12926 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12927 fb_rvalue);
12928 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12929 fb_rvalue);
12930 gcond *cond = gimple_build_cond_from_tree (present,
12931 notnull_label,
12932 null_label);
12933 gimple_seq_add_stmt (&new_body, cond);
12934 gimple_seq_add_stmt (&new_body, null_glabel);
12935 gimplify_assign (new_var, null_pointer_node, &new_body);
12936 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12937 gimple_seq_add_stmt (&new_body, notnull_glabel);
12938 gimple_seq_add_seq (&new_body, assign_body);
12939 gimple_seq_add_stmt (&new_body,
12940 gimple_build_label (opt_arg_label));
12941 }
12942 else
12943 gimple_seq_add_seq (&new_body, assign_body);
12944 break;
12945 }
12946 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12947 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12948 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12949 or references to VLAs. */
12950 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12951 switch (OMP_CLAUSE_CODE (c))
12952 {
12953 tree var;
12954 default:
12955 break;
12956 case OMP_CLAUSE_MAP:
12957 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12958 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12959 {
12960 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12961 poly_int64 offset = 0;
12962 gcc_assert (prev);
12963 var = OMP_CLAUSE_DECL (c);
12964 if (DECL_P (var)
12965 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12966 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12967 ctx))
12968 && varpool_node::get_create (var)->offloadable)
12969 break;
12970 if (TREE_CODE (var) == INDIRECT_REF
12971 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12972 var = TREE_OPERAND (var, 0);
12973 if (TREE_CODE (var) == COMPONENT_REF)
12974 {
12975 var = get_addr_base_and_unit_offset (var, &offset);
12976 gcc_assert (var != NULL_TREE && DECL_P (var));
12977 }
12978 else if (DECL_SIZE (var)
12979 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12980 {
12981 tree var2 = DECL_VALUE_EXPR (var);
12982 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12983 var2 = TREE_OPERAND (var2, 0);
12984 gcc_assert (DECL_P (var2));
12985 var = var2;
12986 }
12987 tree new_var = lookup_decl (var, ctx), x;
12988 tree type = TREE_TYPE (new_var);
12989 bool is_ref;
12990 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12991 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12992 == COMPONENT_REF))
12993 {
12994 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12995 is_ref = true;
12996 new_var = build2 (MEM_REF, type,
12997 build_fold_addr_expr (new_var),
12998 build_int_cst (build_pointer_type (type),
12999 offset));
13000 }
13001 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13002 {
13003 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13004 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13005 new_var = build2 (MEM_REF, type,
13006 build_fold_addr_expr (new_var),
13007 build_int_cst (build_pointer_type (type),
13008 offset));
13009 }
13010 else
13011 is_ref = omp_is_reference (var);
13012 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13013 is_ref = false;
13014 bool ref_to_array = false;
13015 if (is_ref)
13016 {
13017 type = TREE_TYPE (type);
13018 if (TREE_CODE (type) == ARRAY_TYPE)
13019 {
13020 type = build_pointer_type (type);
13021 ref_to_array = true;
13022 }
13023 }
13024 else if (TREE_CODE (type) == ARRAY_TYPE)
13025 {
13026 tree decl2 = DECL_VALUE_EXPR (new_var);
13027 gcc_assert (TREE_CODE (decl2) == MEM_REF);
13028 decl2 = TREE_OPERAND (decl2, 0);
13029 gcc_assert (DECL_P (decl2));
13030 new_var = decl2;
13031 type = TREE_TYPE (new_var);
13032 }
13033 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13034 x = fold_convert_loc (clause_loc, type, x);
13035 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13036 {
13037 tree bias = OMP_CLAUSE_SIZE (c);
13038 if (DECL_P (bias))
13039 bias = lookup_decl (bias, ctx);
13040 bias = fold_convert_loc (clause_loc, sizetype, bias);
13041 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13042 bias);
13043 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13044 TREE_TYPE (x), x, bias);
13045 }
13046 if (ref_to_array)
13047 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13048 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13049 if (is_ref && !ref_to_array)
13050 {
13051 tree t = create_tmp_var_raw (type, get_name (var));
13052 gimple_add_tmp_var (t);
13053 TREE_ADDRESSABLE (t) = 1;
13054 gimple_seq_add_stmt (&new_body,
13055 gimple_build_assign (t, x));
13056 x = build_fold_addr_expr_loc (clause_loc, t);
13057 }
13058 gimple_seq_add_stmt (&new_body,
13059 gimple_build_assign (new_var, x));
13060 prev = NULL_TREE;
13061 }
13062 else if (OMP_CLAUSE_CHAIN (c)
13063 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13064 == OMP_CLAUSE_MAP
13065 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13066 == GOMP_MAP_FIRSTPRIVATE_POINTER
13067 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13068 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13069 prev = c;
13070 break;
13071 case OMP_CLAUSE_PRIVATE:
13072 var = OMP_CLAUSE_DECL (c);
13073 if (is_variable_sized (var))
13074 {
13075 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13076 tree new_var = lookup_decl (var, ctx);
13077 tree pvar = DECL_VALUE_EXPR (var);
13078 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13079 pvar = TREE_OPERAND (pvar, 0);
13080 gcc_assert (DECL_P (pvar));
13081 tree new_pvar = lookup_decl (pvar, ctx);
13082 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13083 tree al = size_int (DECL_ALIGN (var));
13084 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13085 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13086 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13087 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13088 gimple_seq_add_stmt (&new_body,
13089 gimple_build_assign (new_pvar, x));
13090 }
13091 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
13092 {
13093 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13094 tree new_var = lookup_decl (var, ctx);
13095 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13096 if (TREE_CONSTANT (x))
13097 break;
13098 else
13099 {
13100 tree atmp
13101 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13102 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13103 tree al = size_int (TYPE_ALIGN (rtype));
13104 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13105 }
13106
13107 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13108 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13109 gimple_seq_add_stmt (&new_body,
13110 gimple_build_assign (new_var, x));
13111 }
13112 break;
13113 }
13114
13115 gimple_seq fork_seq = NULL;
13116 gimple_seq join_seq = NULL;
13117
13118 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13119 {
13120 /* If there are reductions on the offloaded region itself, treat
13121 them as a dummy GANG loop. */
13122 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13123
13124 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13125 false, NULL, NULL, &fork_seq, &join_seq, ctx);
13126 }
13127
13128 gimple_seq_add_seq (&new_body, fork_seq);
13129 gimple_seq_add_seq (&new_body, tgt_body);
13130 gimple_seq_add_seq (&new_body, join_seq);
13131
13132 if (offloaded)
13133 {
13134 new_body = maybe_catch_exception (new_body);
13135 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13136 }
13137 gimple_omp_set_body (stmt, new_body);
13138 }
13139
13140 bind = gimple_build_bind (NULL, NULL,
13141 tgt_bind ? gimple_bind_block (tgt_bind)
13142 : NULL_TREE);
13143 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13144 gimple_bind_add_seq (bind, ilist);
13145 gimple_bind_add_stmt (bind, stmt);
13146 gimple_bind_add_seq (bind, olist);
13147
13148 pop_gimplify_context (NULL);
13149
13150 if (dep_bind)
13151 {
13152 gimple_bind_add_seq (dep_bind, dep_ilist);
13153 gimple_bind_add_stmt (dep_bind, bind);
13154 gimple_bind_add_seq (dep_bind, dep_olist);
13155 pop_gimplify_context (dep_bind);
13156 }
13157 }
13158
13159 /* Expand code for an OpenMP teams directive. */
13160
13161 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)13162 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13163 {
13164 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13165 push_gimplify_context ();
13166
13167 tree block = make_node (BLOCK);
13168 gbind *bind = gimple_build_bind (NULL, NULL, block);
13169 gsi_replace (gsi_p, bind, true);
13170 gimple_seq bind_body = NULL;
13171 gimple_seq dlist = NULL;
13172 gimple_seq olist = NULL;
13173
13174 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13175 OMP_CLAUSE_NUM_TEAMS);
13176 if (num_teams == NULL_TREE)
13177 num_teams = build_int_cst (unsigned_type_node, 0);
13178 else
13179 {
13180 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
13181 num_teams = fold_convert (unsigned_type_node, num_teams);
13182 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13183 }
13184 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13185 OMP_CLAUSE_THREAD_LIMIT);
13186 if (thread_limit == NULL_TREE)
13187 thread_limit = build_int_cst (unsigned_type_node, 0);
13188 else
13189 {
13190 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13191 thread_limit = fold_convert (unsigned_type_node, thread_limit);
13192 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13193 fb_rvalue);
13194 }
13195
13196 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13197 &bind_body, &dlist, ctx, NULL);
13198 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13199 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13200 NULL, ctx);
13201 gimple_seq_add_stmt (&bind_body, teams_stmt);
13202
13203 location_t loc = gimple_location (teams_stmt);
13204 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
13205 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
13206 gimple_set_location (call, loc);
13207 gimple_seq_add_stmt (&bind_body, call);
13208
13209 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13210 gimple_omp_set_body (teams_stmt, NULL);
13211 gimple_seq_add_seq (&bind_body, olist);
13212 gimple_seq_add_seq (&bind_body, dlist);
13213 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13214 gimple_bind_set_body (bind, bind_body);
13215
13216 pop_gimplify_context (bind);
13217
13218 gimple_bind_append_vars (bind, ctx->block_vars);
13219 BLOCK_VARS (block) = ctx->block_vars;
13220 if (BLOCK_VARS (block))
13221 TREE_USED (block) = 1;
13222 }
13223
13224 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
13225 regimplified. If DATA is non-NULL, lower_omp_1 is outside
13226 of OMP context, but with task_shared_vars set. */
13227
13228 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)13229 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
13230 void *data)
13231 {
13232 tree t = *tp;
13233
13234 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
13235 if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
13236 && data == NULL
13237 && DECL_HAS_VALUE_EXPR_P (t))
13238 return t;
13239
13240 if (task_shared_vars
13241 && DECL_P (t)
13242 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
13243 return t;
13244
13245 /* If a global variable has been privatized, TREE_CONSTANT on
13246 ADDR_EXPR might be wrong. */
13247 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
13248 recompute_tree_invariant_for_addr_expr (t);
13249
13250 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
13251 return NULL_TREE;
13252 }
13253
13254 /* Data to be communicated between lower_omp_regimplify_operands and
13255 lower_omp_regimplify_operands_p. */
13256
13257 struct lower_omp_regimplify_operands_data
13258 {
13259 omp_context *ctx;
13260 vec<tree> *decls;
13261 };
13262
13263 /* Helper function for lower_omp_regimplify_operands. Find
13264 omp_member_access_dummy_var vars and adjust temporarily their
13265 DECL_VALUE_EXPRs if needed. */
13266
13267 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)13268 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
13269 void *data)
13270 {
13271 tree t = omp_member_access_dummy_var (*tp);
13272 if (t)
13273 {
13274 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
13275 lower_omp_regimplify_operands_data *ldata
13276 = (lower_omp_regimplify_operands_data *) wi->info;
13277 tree o = maybe_lookup_decl (t, ldata->ctx);
13278 if (o != t)
13279 {
13280 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
13281 ldata->decls->safe_push (*tp);
13282 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
13283 SET_DECL_VALUE_EXPR (*tp, v);
13284 }
13285 }
13286 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
13287 return NULL_TREE;
13288 }
13289
13290 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
13291 of omp_member_access_dummy_var vars during regimplification. */
13292
13293 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)13294 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
13295 gimple_stmt_iterator *gsi_p)
13296 {
13297 auto_vec<tree, 10> decls;
13298 if (ctx)
13299 {
13300 struct walk_stmt_info wi;
13301 memset (&wi, '\0', sizeof (wi));
13302 struct lower_omp_regimplify_operands_data data;
13303 data.ctx = ctx;
13304 data.decls = &decls;
13305 wi.info = &data;
13306 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
13307 }
13308 gimple_regimplify_operands (stmt, gsi_p);
13309 while (!decls.is_empty ())
13310 {
13311 tree t = decls.pop ();
13312 tree v = decls.pop ();
13313 SET_DECL_VALUE_EXPR (t, v);
13314 }
13315 }
13316
13317 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)13318 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13319 {
13320 gimple *stmt = gsi_stmt (*gsi_p);
13321 struct walk_stmt_info wi;
13322 gcall *call_stmt;
13323
13324 if (gimple_has_location (stmt))
13325 input_location = gimple_location (stmt);
13326
13327 if (task_shared_vars)
13328 memset (&wi, '\0', sizeof (wi));
13329
13330 /* If we have issued syntax errors, avoid doing any heavy lifting.
13331 Just replace the OMP directives with a NOP to avoid
13332 confusing RTL expansion. */
13333 if (seen_error () && is_gimple_omp (stmt))
13334 {
13335 gsi_replace (gsi_p, gimple_build_nop (), true);
13336 return;
13337 }
13338
13339 switch (gimple_code (stmt))
13340 {
13341 case GIMPLE_COND:
13342 {
13343 gcond *cond_stmt = as_a <gcond *> (stmt);
13344 if ((ctx || task_shared_vars)
13345 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
13346 lower_omp_regimplify_p,
13347 ctx ? NULL : &wi, NULL)
13348 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
13349 lower_omp_regimplify_p,
13350 ctx ? NULL : &wi, NULL)))
13351 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
13352 }
13353 break;
13354 case GIMPLE_CATCH:
13355 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
13356 break;
13357 case GIMPLE_EH_FILTER:
13358 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
13359 break;
13360 case GIMPLE_TRY:
13361 lower_omp (gimple_try_eval_ptr (stmt), ctx);
13362 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
13363 break;
13364 case GIMPLE_TRANSACTION:
13365 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
13366 ctx);
13367 break;
13368 case GIMPLE_BIND:
13369 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
13370 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
13371 break;
13372 case GIMPLE_OMP_PARALLEL:
13373 case GIMPLE_OMP_TASK:
13374 ctx = maybe_lookup_ctx (stmt);
13375 gcc_assert (ctx);
13376 if (ctx->cancellable)
13377 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13378 lower_omp_taskreg (gsi_p, ctx);
13379 break;
13380 case GIMPLE_OMP_FOR:
13381 ctx = maybe_lookup_ctx (stmt);
13382 gcc_assert (ctx);
13383 if (ctx->cancellable)
13384 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13385 lower_omp_for (gsi_p, ctx);
13386 break;
13387 case GIMPLE_OMP_SECTIONS:
13388 ctx = maybe_lookup_ctx (stmt);
13389 gcc_assert (ctx);
13390 if (ctx->cancellable)
13391 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
13392 lower_omp_sections (gsi_p, ctx);
13393 break;
13394 case GIMPLE_OMP_SINGLE:
13395 ctx = maybe_lookup_ctx (stmt);
13396 gcc_assert (ctx);
13397 lower_omp_single (gsi_p, ctx);
13398 break;
13399 case GIMPLE_OMP_MASTER:
13400 ctx = maybe_lookup_ctx (stmt);
13401 gcc_assert (ctx);
13402 lower_omp_master (gsi_p, ctx);
13403 break;
13404 case GIMPLE_OMP_TASKGROUP:
13405 ctx = maybe_lookup_ctx (stmt);
13406 gcc_assert (ctx);
13407 lower_omp_taskgroup (gsi_p, ctx);
13408 break;
13409 case GIMPLE_OMP_ORDERED:
13410 ctx = maybe_lookup_ctx (stmt);
13411 gcc_assert (ctx);
13412 lower_omp_ordered (gsi_p, ctx);
13413 break;
13414 case GIMPLE_OMP_SCAN:
13415 ctx = maybe_lookup_ctx (stmt);
13416 gcc_assert (ctx);
13417 lower_omp_scan (gsi_p, ctx);
13418 break;
13419 case GIMPLE_OMP_CRITICAL:
13420 ctx = maybe_lookup_ctx (stmt);
13421 gcc_assert (ctx);
13422 lower_omp_critical (gsi_p, ctx);
13423 break;
13424 case GIMPLE_OMP_ATOMIC_LOAD:
13425 if ((ctx || task_shared_vars)
13426 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
13427 as_a <gomp_atomic_load *> (stmt)),
13428 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
13429 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13430 break;
13431 case GIMPLE_OMP_TARGET:
13432 ctx = maybe_lookup_ctx (stmt);
13433 gcc_assert (ctx);
13434 lower_omp_target (gsi_p, ctx);
13435 break;
13436 case GIMPLE_OMP_TEAMS:
13437 ctx = maybe_lookup_ctx (stmt);
13438 gcc_assert (ctx);
13439 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13440 lower_omp_taskreg (gsi_p, ctx);
13441 else
13442 lower_omp_teams (gsi_p, ctx);
13443 break;
13444 case GIMPLE_CALL:
13445 tree fndecl;
13446 call_stmt = as_a <gcall *> (stmt);
13447 fndecl = gimple_call_fndecl (call_stmt);
13448 if (fndecl
13449 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13450 switch (DECL_FUNCTION_CODE (fndecl))
13451 {
13452 case BUILT_IN_GOMP_BARRIER:
13453 if (ctx == NULL)
13454 break;
13455 /* FALLTHRU */
13456 case BUILT_IN_GOMP_CANCEL:
13457 case BUILT_IN_GOMP_CANCELLATION_POINT:
13458 omp_context *cctx;
13459 cctx = ctx;
13460 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13461 cctx = cctx->outer;
13462 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13463 if (!cctx->cancellable)
13464 {
13465 if (DECL_FUNCTION_CODE (fndecl)
13466 == BUILT_IN_GOMP_CANCELLATION_POINT)
13467 {
13468 stmt = gimple_build_nop ();
13469 gsi_replace (gsi_p, stmt, false);
13470 }
13471 break;
13472 }
13473 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13474 {
13475 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13476 gimple_call_set_fndecl (call_stmt, fndecl);
13477 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13478 }
13479 tree lhs;
13480 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13481 gimple_call_set_lhs (call_stmt, lhs);
13482 tree fallthru_label;
13483 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13484 gimple *g;
13485 g = gimple_build_label (fallthru_label);
13486 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13487 g = gimple_build_cond (NE_EXPR, lhs,
13488 fold_convert (TREE_TYPE (lhs),
13489 boolean_false_node),
13490 cctx->cancel_label, fallthru_label);
13491 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13492 break;
13493 default:
13494 break;
13495 }
13496 goto regimplify;
13497
13498 case GIMPLE_ASSIGN:
13499 for (omp_context *up = ctx; up; up = up->outer)
13500 {
13501 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13502 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13503 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13504 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13505 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13506 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13507 && (gimple_omp_target_kind (up->stmt)
13508 == GF_OMP_TARGET_KIND_DATA)))
13509 continue;
13510 else if (!up->lastprivate_conditional_map)
13511 break;
13512 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13513 if (TREE_CODE (lhs) == MEM_REF
13514 && DECL_P (TREE_OPERAND (lhs, 0))
13515 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13516 0))) == REFERENCE_TYPE)
13517 lhs = TREE_OPERAND (lhs, 0);
13518 if (DECL_P (lhs))
13519 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13520 {
13521 tree clauses;
13522 if (up->combined_into_simd_safelen1)
13523 {
13524 up = up->outer;
13525 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13526 up = up->outer;
13527 }
13528 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13529 clauses = gimple_omp_for_clauses (up->stmt);
13530 else
13531 clauses = gimple_omp_sections_clauses (up->stmt);
13532 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13533 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13534 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13535 OMP_CLAUSE__CONDTEMP_);
13536 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13537 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13538 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13539 }
13540 }
13541 /* FALLTHRU */
13542
13543 default:
13544 regimplify:
13545 if ((ctx || task_shared_vars)
13546 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13547 ctx ? NULL : &wi))
13548 {
13549 /* Just remove clobbers, this should happen only if we have
13550 "privatized" local addressable variables in SIMD regions,
13551 the clobber isn't needed in that case and gimplifying address
13552 of the ARRAY_REF into a pointer and creating MEM_REF based
13553 clobber would create worse code than we get with the clobber
13554 dropped. */
13555 if (gimple_clobber_p (stmt))
13556 {
13557 gsi_replace (gsi_p, gimple_build_nop (), true);
13558 break;
13559 }
13560 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13561 }
13562 break;
13563 }
13564 }
13565
13566 static void
lower_omp(gimple_seq * body,omp_context * ctx)13567 lower_omp (gimple_seq *body, omp_context *ctx)
13568 {
13569 location_t saved_location = input_location;
13570 gimple_stmt_iterator gsi;
13571 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13572 lower_omp_1 (&gsi, ctx);
13573 /* During gimplification, we haven't folded statments inside offloading
13574 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13575 if (target_nesting_level || taskreg_nesting_level)
13576 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13577 fold_stmt (&gsi);
13578 input_location = saved_location;
13579 }
13580
13581 /* Main entry point. */
13582
13583 static unsigned int
execute_lower_omp(void)13584 execute_lower_omp (void)
13585 {
13586 gimple_seq body;
13587 int i;
13588 omp_context *ctx;
13589
13590 /* This pass always runs, to provide PROP_gimple_lomp.
13591 But often, there is nothing to do. */
13592 if (flag_openacc == 0 && flag_openmp == 0
13593 && flag_openmp_simd == 0)
13594 return 0;
13595
13596 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13597 delete_omp_context);
13598
13599 body = gimple_body (current_function_decl);
13600
13601 scan_omp (&body, NULL);
13602 gcc_assert (taskreg_nesting_level == 0);
13603 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13604 finish_taskreg_scan (ctx);
13605 taskreg_contexts.release ();
13606
13607 if (all_contexts->root)
13608 {
13609 if (task_shared_vars)
13610 push_gimplify_context ();
13611 lower_omp (&body, NULL);
13612 if (task_shared_vars)
13613 pop_gimplify_context (NULL);
13614 }
13615
13616 if (all_contexts)
13617 {
13618 splay_tree_delete (all_contexts);
13619 all_contexts = NULL;
13620 }
13621 BITMAP_FREE (task_shared_vars);
13622 BITMAP_FREE (global_nonaddressable_vars);
13623
13624 /* If current function is a method, remove artificial dummy VAR_DECL created
13625 for non-static data member privatization, they aren't needed for
13626 debuginfo nor anything else, have been already replaced everywhere in the
13627 IL and cause problems with LTO. */
13628 if (DECL_ARGUMENTS (current_function_decl)
13629 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13630 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13631 == POINTER_TYPE))
13632 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13633 return 0;
13634 }
13635
13636 namespace {
13637
13638 const pass_data pass_data_lower_omp =
13639 {
13640 GIMPLE_PASS, /* type */
13641 "omplower", /* name */
13642 OPTGROUP_OMP, /* optinfo_flags */
13643 TV_NONE, /* tv_id */
13644 PROP_gimple_any, /* properties_required */
13645 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13646 0, /* properties_destroyed */
13647 0, /* todo_flags_start */
13648 0, /* todo_flags_finish */
13649 };
13650
13651 class pass_lower_omp : public gimple_opt_pass
13652 {
13653 public:
pass_lower_omp(gcc::context * ctxt)13654 pass_lower_omp (gcc::context *ctxt)
13655 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13656 {}
13657
13658 /* opt_pass methods: */
execute(function *)13659 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13660
13661 }; // class pass_lower_omp
13662
13663 } // anon namespace
13664
13665 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)13666 make_pass_lower_omp (gcc::context *ctxt)
13667 {
13668 return new pass_lower_omp (ctxt);
13669 }
13670
13671 /* The following is a utility to diagnose structured block violations.
13672 It is not part of the "omplower" pass, as that's invoked too late. It
13673 should be invoked by the respective front ends after gimplification. */
13674
13675 static splay_tree all_labels;
13676
13677 /* Check for mismatched contexts and generate an error if needed. Return
13678 true if an error is detected. */
13679
13680 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)13681 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13682 gimple *branch_ctx, gimple *label_ctx)
13683 {
13684 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13685 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13686
13687 if (label_ctx == branch_ctx)
13688 return false;
13689
13690 const char* kind = NULL;
13691
13692 if (flag_openacc)
13693 {
13694 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13695 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13696 {
13697 gcc_checking_assert (kind == NULL);
13698 kind = "OpenACC";
13699 }
13700 }
13701 if (kind == NULL)
13702 {
13703 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13704 kind = "OpenMP";
13705 }
13706
13707 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13708 so we could traverse it and issue a correct "exit" or "enter" error
13709 message upon a structured block violation.
13710
13711 We built the context by building a list with tree_cons'ing, but there is
13712 no easy counterpart in gimple tuples. It seems like far too much work
13713 for issuing exit/enter error messages. If someone really misses the
13714 distinct error message... patches welcome. */
13715
13716 #if 0
13717 /* Try to avoid confusing the user by producing and error message
13718 with correct "exit" or "enter" verbiage. We prefer "exit"
13719 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13720 if (branch_ctx == NULL)
13721 exit_p = false;
13722 else
13723 {
13724 while (label_ctx)
13725 {
13726 if (TREE_VALUE (label_ctx) == branch_ctx)
13727 {
13728 exit_p = false;
13729 break;
13730 }
13731 label_ctx = TREE_CHAIN (label_ctx);
13732 }
13733 }
13734
13735 if (exit_p)
13736 error ("invalid exit from %s structured block", kind);
13737 else
13738 error ("invalid entry to %s structured block", kind);
13739 #endif
13740
13741 /* If it's obvious we have an invalid entry, be specific about the error. */
13742 if (branch_ctx == NULL)
13743 error ("invalid entry to %s structured block", kind);
13744 else
13745 {
13746 /* Otherwise, be vague and lazy, but efficient. */
13747 error ("invalid branch to/from %s structured block", kind);
13748 }
13749
13750 gsi_replace (gsi_p, gimple_build_nop (), false);
13751 return true;
13752 }
13753
13754 /* Pass 1: Create a minimal tree of structured blocks, and record
13755 where each label is found. */
13756
13757 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)13758 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13759 struct walk_stmt_info *wi)
13760 {
13761 gimple *context = (gimple *) wi->info;
13762 gimple *inner_context;
13763 gimple *stmt = gsi_stmt (*gsi_p);
13764
13765 *handled_ops_p = true;
13766
13767 switch (gimple_code (stmt))
13768 {
13769 WALK_SUBSTMTS;
13770
13771 case GIMPLE_OMP_PARALLEL:
13772 case GIMPLE_OMP_TASK:
13773 case GIMPLE_OMP_SECTIONS:
13774 case GIMPLE_OMP_SINGLE:
13775 case GIMPLE_OMP_SECTION:
13776 case GIMPLE_OMP_MASTER:
13777 case GIMPLE_OMP_ORDERED:
13778 case GIMPLE_OMP_SCAN:
13779 case GIMPLE_OMP_CRITICAL:
13780 case GIMPLE_OMP_TARGET:
13781 case GIMPLE_OMP_TEAMS:
13782 case GIMPLE_OMP_TASKGROUP:
13783 /* The minimal context here is just the current OMP construct. */
13784 inner_context = stmt;
13785 wi->info = inner_context;
13786 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13787 wi->info = context;
13788 break;
13789
13790 case GIMPLE_OMP_FOR:
13791 inner_context = stmt;
13792 wi->info = inner_context;
13793 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13794 walk them. */
13795 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13796 diagnose_sb_1, NULL, wi);
13797 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13798 wi->info = context;
13799 break;
13800
13801 case GIMPLE_LABEL:
13802 splay_tree_insert (all_labels,
13803 (splay_tree_key) gimple_label_label (
13804 as_a <glabel *> (stmt)),
13805 (splay_tree_value) context);
13806 break;
13807
13808 default:
13809 break;
13810 }
13811
13812 return NULL_TREE;
13813 }
13814
13815 /* Pass 2: Check each branch and see if its context differs from that of
13816 the destination label's context. */
13817
13818 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)13819 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13820 struct walk_stmt_info *wi)
13821 {
13822 gimple *context = (gimple *) wi->info;
13823 splay_tree_node n;
13824 gimple *stmt = gsi_stmt (*gsi_p);
13825
13826 *handled_ops_p = true;
13827
13828 switch (gimple_code (stmt))
13829 {
13830 WALK_SUBSTMTS;
13831
13832 case GIMPLE_OMP_PARALLEL:
13833 case GIMPLE_OMP_TASK:
13834 case GIMPLE_OMP_SECTIONS:
13835 case GIMPLE_OMP_SINGLE:
13836 case GIMPLE_OMP_SECTION:
13837 case GIMPLE_OMP_MASTER:
13838 case GIMPLE_OMP_ORDERED:
13839 case GIMPLE_OMP_SCAN:
13840 case GIMPLE_OMP_CRITICAL:
13841 case GIMPLE_OMP_TARGET:
13842 case GIMPLE_OMP_TEAMS:
13843 case GIMPLE_OMP_TASKGROUP:
13844 wi->info = stmt;
13845 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13846 wi->info = context;
13847 break;
13848
13849 case GIMPLE_OMP_FOR:
13850 wi->info = stmt;
13851 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13852 walk them. */
13853 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13854 diagnose_sb_2, NULL, wi);
13855 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13856 wi->info = context;
13857 break;
13858
13859 case GIMPLE_COND:
13860 {
13861 gcond *cond_stmt = as_a <gcond *> (stmt);
13862 tree lab = gimple_cond_true_label (cond_stmt);
13863 if (lab)
13864 {
13865 n = splay_tree_lookup (all_labels,
13866 (splay_tree_key) lab);
13867 diagnose_sb_0 (gsi_p, context,
13868 n ? (gimple *) n->value : NULL);
13869 }
13870 lab = gimple_cond_false_label (cond_stmt);
13871 if (lab)
13872 {
13873 n = splay_tree_lookup (all_labels,
13874 (splay_tree_key) lab);
13875 diagnose_sb_0 (gsi_p, context,
13876 n ? (gimple *) n->value : NULL);
13877 }
13878 }
13879 break;
13880
13881 case GIMPLE_GOTO:
13882 {
13883 tree lab = gimple_goto_dest (stmt);
13884 if (TREE_CODE (lab) != LABEL_DECL)
13885 break;
13886
13887 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13888 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13889 }
13890 break;
13891
13892 case GIMPLE_SWITCH:
13893 {
13894 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13895 unsigned int i;
13896 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13897 {
13898 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13899 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13900 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13901 break;
13902 }
13903 }
13904 break;
13905
13906 case GIMPLE_RETURN:
13907 diagnose_sb_0 (gsi_p, context, NULL);
13908 break;
13909
13910 default:
13911 break;
13912 }
13913
13914 return NULL_TREE;
13915 }
13916
13917 static unsigned int
diagnose_omp_structured_block_errors(void)13918 diagnose_omp_structured_block_errors (void)
13919 {
13920 struct walk_stmt_info wi;
13921 gimple_seq body = gimple_body (current_function_decl);
13922
13923 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13924
13925 memset (&wi, 0, sizeof (wi));
13926 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13927
13928 memset (&wi, 0, sizeof (wi));
13929 wi.want_locations = true;
13930 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13931
13932 gimple_set_body (current_function_decl, body);
13933
13934 splay_tree_delete (all_labels);
13935 all_labels = NULL;
13936
13937 return 0;
13938 }
13939
13940 namespace {
13941
13942 const pass_data pass_data_diagnose_omp_blocks =
13943 {
13944 GIMPLE_PASS, /* type */
13945 "*diagnose_omp_blocks", /* name */
13946 OPTGROUP_OMP, /* optinfo_flags */
13947 TV_NONE, /* tv_id */
13948 PROP_gimple_any, /* properties_required */
13949 0, /* properties_provided */
13950 0, /* properties_destroyed */
13951 0, /* todo_flags_start */
13952 0, /* todo_flags_finish */
13953 };
13954
13955 class pass_diagnose_omp_blocks : public gimple_opt_pass
13956 {
13957 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)13958 pass_diagnose_omp_blocks (gcc::context *ctxt)
13959 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13960 {}
13961
13962 /* opt_pass methods: */
gate(function *)13963 virtual bool gate (function *)
13964 {
13965 return flag_openacc || flag_openmp || flag_openmp_simd;
13966 }
execute(function *)13967 virtual unsigned int execute (function *)
13968 {
13969 return diagnose_omp_structured_block_errors ();
13970 }
13971
13972 }; // class pass_diagnose_omp_blocks
13973
13974 } // anon namespace
13975
13976 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)13977 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13978 {
13979 return new pass_diagnose_omp_blocks (ctxt);
13980 }
13981
13982
13983 #include "gt-omp-low.h"
13984