1 /* Lowering pass for OMP directives. Converts OMP directives into explicit
2 calls to the runtime library (libgomp), data marshalling to implement data
3 sharing and copying clauses, offloading to accelerators, and more.
4
5 Contributed by Diego Novillo <dnovillo@redhat.com>
6
7 Copyright (C) 2005-2020 Free Software Foundation, Inc.
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "alloc-pool.h"
56 #include "symbol-summary.h"
57 #include "tree-nested.h"
58 #include "context.h"
59 #include "gomp-constants.h"
60 #include "gimple-pretty-print.h"
61 #include "hsa-common.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64
65 /* Lowering of OMP parallel and workshare constructs proceeds in two
66 phases. The first phase scans the function looking for OMP statements
67 and then for variables that must be replaced to satisfy data sharing
68 clauses. The second phase expands code for the constructs, as well as
69 re-gimplifying things when variables have been replaced with complex
70 expressions.
71
72 Final code generation is done by pass_expand_omp. The flowgraph is
73 scanned for regions which are then moved to a new
74 function, to be invoked by the thread library, or offloaded. */
75
76 /* Context structure. Used to store information about each parallel
77 directive in the code. */
78
79 struct omp_context
80 {
81 /* This field must be at the beginning, as we do "inheritance": Some
82 callback functions for tree-inline.c (e.g., omp_copy_decl)
83 receive a copy_body_data pointer that is up-casted to an
84 omp_context pointer. */
85 copy_body_data cb;
86
87 /* The tree of contexts corresponding to the encountered constructs. */
88 struct omp_context *outer;
89 gimple *stmt;
90
91 /* Map variables to fields in a structure that allows communication
92 between sending and receiving threads. */
93 splay_tree field_map;
94 tree record_type;
95 tree sender_decl;
96 tree receiver_decl;
97
98 /* These are used just by task contexts, if task firstprivate fn is
99 needed. srecord_type is used to communicate from the thread
100 that encountered the task construct to task firstprivate fn,
101 record_type is allocated by GOMP_task, initialized by task firstprivate
102 fn and passed to the task body fn. */
103 splay_tree sfield_map;
104 tree srecord_type;
105
106 /* A chain of variables to add to the top-level block surrounding the
107 construct. In the case of a parallel, this is in the child function. */
108 tree block_vars;
109
110 /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
111 barriers should jump to during omplower pass. */
112 tree cancel_label;
113
114 /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
115 otherwise. */
116 gimple *simt_stmt;
117
118 /* For task reductions registered in this context, a vector containing
119 the length of the private copies block (if constant, otherwise NULL)
120 and then offsets (if constant, otherwise NULL) for each entry. */
121 vec<tree> task_reductions;
122
123 /* A hash map from the reduction clauses to the registered array
124 elts. */
125 hash_map<tree, unsigned> *task_reduction_map;
126
127 /* And a hash map from the lastprivate(conditional:) variables to their
128 corresponding tracking loop iteration variables. */
129 hash_map<tree, tree> *lastprivate_conditional_map;
130
131 /* A tree_list of the reduction clauses in this context. This is
132 only used for checking the consistency of OpenACC reduction
133 clauses in scan_omp_for and is not guaranteed to contain a valid
134 value outside of this function. */
135 tree local_reduction_clauses;
136
137 /* A tree_list of the reduction clauses in outer contexts. This is
138 only used for checking the consistency of OpenACC reduction
139 clauses in scan_omp_for and is not guaranteed to contain a valid
140 value outside of this function. */
141 tree outer_reduction_clauses;
142
143 /* Nesting depth of this context. Used to beautify error messages re
144 invalid gotos. The outermost ctx is depth 1, with depth 0 being
145 reserved for the main body of the function. */
146 int depth;
147
148 /* True if this parallel directive is nested within another. */
149 bool is_nested;
150
151 /* True if this construct can be cancelled. */
152 bool cancellable;
153
154 /* True if lower_omp_1 should look up lastprivate conditional in parent
155 context. */
156 bool combined_into_simd_safelen1;
157
158 /* True if there is nested scan context with inclusive clause. */
159 bool scan_inclusive;
160
161 /* True if there is nested scan context with exclusive clause. */
162 bool scan_exclusive;
163
164 /* True in the second simd loop of for simd with inscan reductions. */
165 bool for_simd_scan_phase;
166
167 /* True if there is order(concurrent) clause on the construct. */
168 bool order_concurrent;
169
170 /* True if there is bind clause on the construct (i.e. a loop construct). */
171 bool loop_p;
172 };
173
174 static splay_tree all_contexts;
175 static int taskreg_nesting_level;
176 static int target_nesting_level;
177 static bitmap task_shared_vars;
178 static bitmap global_nonaddressable_vars;
179 static vec<omp_context *> taskreg_contexts;
180
181 static void scan_omp (gimple_seq *, omp_context *);
182 static tree scan_omp_1_op (tree *, int *, void *);
183
184 #define WALK_SUBSTMTS \
185 case GIMPLE_BIND: \
186 case GIMPLE_TRY: \
187 case GIMPLE_CATCH: \
188 case GIMPLE_EH_FILTER: \
189 case GIMPLE_TRANSACTION: \
190 /* The sub-statements for these should be walked. */ \
191 *handled_ops_p = false; \
192 break;
193
194 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
195 region. */
196
197 static bool
is_oacc_parallel_or_serial(omp_context * ctx)198 is_oacc_parallel_or_serial (omp_context *ctx)
199 {
200 enum gimple_code outer_type = gimple_code (ctx->stmt);
201 return ((outer_type == GIMPLE_OMP_TARGET)
202 && ((gimple_omp_target_kind (ctx->stmt)
203 == GF_OMP_TARGET_KIND_OACC_PARALLEL)
204 || (gimple_omp_target_kind (ctx->stmt)
205 == GF_OMP_TARGET_KIND_OACC_SERIAL)));
206 }
207
208 /* Return true if CTX corresponds to an oacc kernels region. */
209
210 static bool
is_oacc_kernels(omp_context * ctx)211 is_oacc_kernels (omp_context *ctx)
212 {
213 enum gimple_code outer_type = gimple_code (ctx->stmt);
214 return ((outer_type == GIMPLE_OMP_TARGET)
215 && (gimple_omp_target_kind (ctx->stmt)
216 == GF_OMP_TARGET_KIND_OACC_KERNELS));
217 }
218
219 /* If DECL is the artificial dummy VAR_DECL created for non-static
220 data member privatization, return the underlying "this" parameter,
221 otherwise return NULL. */
222
223 tree
omp_member_access_dummy_var(tree decl)224 omp_member_access_dummy_var (tree decl)
225 {
226 if (!VAR_P (decl)
227 || !DECL_ARTIFICIAL (decl)
228 || !DECL_IGNORED_P (decl)
229 || !DECL_HAS_VALUE_EXPR_P (decl)
230 || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
231 return NULL_TREE;
232
233 tree v = DECL_VALUE_EXPR (decl);
234 if (TREE_CODE (v) != COMPONENT_REF)
235 return NULL_TREE;
236
237 while (1)
238 switch (TREE_CODE (v))
239 {
240 case COMPONENT_REF:
241 case MEM_REF:
242 case INDIRECT_REF:
243 CASE_CONVERT:
244 case POINTER_PLUS_EXPR:
245 v = TREE_OPERAND (v, 0);
246 continue;
247 case PARM_DECL:
248 if (DECL_CONTEXT (v) == current_function_decl
249 && DECL_ARTIFICIAL (v)
250 && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
251 return v;
252 return NULL_TREE;
253 default:
254 return NULL_TREE;
255 }
256 }
257
258 /* Helper for unshare_and_remap, called through walk_tree. */
259
260 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)261 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
262 {
263 tree *pair = (tree *) data;
264 if (*tp == pair[0])
265 {
266 *tp = unshare_expr (pair[1]);
267 *walk_subtrees = 0;
268 }
269 else if (IS_TYPE_OR_DECL_P (*tp))
270 *walk_subtrees = 0;
271 return NULL_TREE;
272 }
273
274 /* Return unshare_expr (X) with all occurrences of FROM
275 replaced with TO. */
276
277 static tree
unshare_and_remap(tree x,tree from,tree to)278 unshare_and_remap (tree x, tree from, tree to)
279 {
280 tree pair[2] = { from, to };
281 x = unshare_expr (x);
282 walk_tree (&x, unshare_and_remap_1, pair, NULL);
283 return x;
284 }
285
286 /* Convenience function for calling scan_omp_1_op on tree operands. */
287
288 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)289 scan_omp_op (tree *tp, omp_context *ctx)
290 {
291 struct walk_stmt_info wi;
292
293 memset (&wi, 0, sizeof (wi));
294 wi.info = ctx;
295 wi.want_locations = true;
296
297 return walk_tree (tp, scan_omp_1_op, &wi, NULL);
298 }
299
300 static void lower_omp (gimple_seq *, omp_context *);
301 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
302 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
303
304 /* Return true if CTX is for an omp parallel. */
305
306 static inline bool
is_parallel_ctx(omp_context * ctx)307 is_parallel_ctx (omp_context *ctx)
308 {
309 return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
310 }
311
312
313 /* Return true if CTX is for an omp task. */
314
315 static inline bool
is_task_ctx(omp_context * ctx)316 is_task_ctx (omp_context *ctx)
317 {
318 return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
319 }
320
321
322 /* Return true if CTX is for an omp taskloop. */
323
324 static inline bool
is_taskloop_ctx(omp_context * ctx)325 is_taskloop_ctx (omp_context *ctx)
326 {
327 return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
328 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
329 }
330
331
332 /* Return true if CTX is for a host omp teams. */
333
334 static inline bool
is_host_teams_ctx(omp_context * ctx)335 is_host_teams_ctx (omp_context *ctx)
336 {
337 return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
338 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
339 }
340
341 /* Return true if CTX is for an omp parallel or omp task or host omp teams
342 (the last one is strictly not a task region in OpenMP speak, but we
343 need to treat it similarly). */
344
345 static inline bool
is_taskreg_ctx(omp_context * ctx)346 is_taskreg_ctx (omp_context *ctx)
347 {
348 return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
349 }
350
351 /* Return true if EXPR is variable sized. */
352
353 static inline bool
is_variable_sized(const_tree expr)354 is_variable_sized (const_tree expr)
355 {
356 return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
357 }
358
359 /* Lookup variables. The "maybe" form
360 allows for the variable form to not have been entered, otherwise we
361 assert that the variable must have been entered. */
362
363 static inline tree
lookup_decl(tree var,omp_context * ctx)364 lookup_decl (tree var, omp_context *ctx)
365 {
366 tree *n = ctx->cb.decl_map->get (var);
367 return *n;
368 }
369
370 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)371 maybe_lookup_decl (const_tree var, omp_context *ctx)
372 {
373 tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
374 return n ? *n : NULL_TREE;
375 }
376
377 static inline tree
lookup_field(tree var,omp_context * ctx)378 lookup_field (tree var, omp_context *ctx)
379 {
380 splay_tree_node n;
381 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
382 return (tree) n->value;
383 }
384
385 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)386 lookup_sfield (splay_tree_key key, omp_context *ctx)
387 {
388 splay_tree_node n;
389 n = splay_tree_lookup (ctx->sfield_map
390 ? ctx->sfield_map : ctx->field_map, key);
391 return (tree) n->value;
392 }
393
394 static inline tree
lookup_sfield(tree var,omp_context * ctx)395 lookup_sfield (tree var, omp_context *ctx)
396 {
397 return lookup_sfield ((splay_tree_key) var, ctx);
398 }
399
400 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)401 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
402 {
403 splay_tree_node n;
404 n = splay_tree_lookup (ctx->field_map, key);
405 return n ? (tree) n->value : NULL_TREE;
406 }
407
408 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)409 maybe_lookup_field (tree var, omp_context *ctx)
410 {
411 return maybe_lookup_field ((splay_tree_key) var, ctx);
412 }
413
414 /* Return true if DECL should be copied by pointer. SHARED_CTX is
415 the parallel context if DECL is to be shared. */
416
417 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)418 use_pointer_for_field (tree decl, omp_context *shared_ctx)
419 {
420 if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
421 || TYPE_ATOMIC (TREE_TYPE (decl)))
422 return true;
423
424 /* We can only use copy-in/copy-out semantics for shared variables
425 when we know the value is not accessible from an outer scope. */
426 if (shared_ctx)
427 {
428 gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
429
430 /* ??? Trivially accessible from anywhere. But why would we even
431 be passing an address in this case? Should we simply assert
432 this to be false, or should we have a cleanup pass that removes
433 these from the list of mappings? */
434 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
435 return true;
436
437 /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
438 without analyzing the expression whether or not its location
439 is accessible to anyone else. In the case of nested parallel
440 regions it certainly may be. */
441 if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
442 return true;
443
444 /* Do not use copy-in/copy-out for variables that have their
445 address taken. */
446 if (is_global_var (decl))
447 {
448 /* For file scope vars, track whether we've seen them as
449 non-addressable initially and in that case, keep the same
450 answer for the duration of the pass, even when they are made
451 addressable later on e.g. through reduction expansion. Global
452 variables which weren't addressable before the pass will not
453 have their privatized copies address taken. See PR91216. */
454 if (!TREE_ADDRESSABLE (decl))
455 {
456 if (!global_nonaddressable_vars)
457 global_nonaddressable_vars = BITMAP_ALLOC (NULL);
458 bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
459 }
460 else if (!global_nonaddressable_vars
461 || !bitmap_bit_p (global_nonaddressable_vars,
462 DECL_UID (decl)))
463 return true;
464 }
465 else if (TREE_ADDRESSABLE (decl))
466 return true;
467
468 /* lower_send_shared_vars only uses copy-in, but not copy-out
469 for these. */
470 if (TREE_READONLY (decl)
471 || ((TREE_CODE (decl) == RESULT_DECL
472 || TREE_CODE (decl) == PARM_DECL)
473 && DECL_BY_REFERENCE (decl)))
474 return false;
475
476 /* Disallow copy-in/out in nested parallel if
477 decl is shared in outer parallel, otherwise
478 each thread could store the shared variable
479 in its own copy-in location, making the
480 variable no longer really shared. */
481 if (shared_ctx->is_nested)
482 {
483 omp_context *up;
484
485 for (up = shared_ctx->outer; up; up = up->outer)
486 if ((is_taskreg_ctx (up)
487 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
488 && is_gimple_omp_offloaded (up->stmt)))
489 && maybe_lookup_decl (decl, up))
490 break;
491
492 if (up)
493 {
494 tree c;
495
496 if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
497 {
498 for (c = gimple_omp_target_clauses (up->stmt);
499 c; c = OMP_CLAUSE_CHAIN (c))
500 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
501 && OMP_CLAUSE_DECL (c) == decl)
502 break;
503 }
504 else
505 for (c = gimple_omp_taskreg_clauses (up->stmt);
506 c; c = OMP_CLAUSE_CHAIN (c))
507 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
508 && OMP_CLAUSE_DECL (c) == decl)
509 break;
510
511 if (c)
512 goto maybe_mark_addressable_and_ret;
513 }
514 }
515
516 /* For tasks avoid using copy-in/out. As tasks can be
517 deferred or executed in different thread, when GOMP_task
518 returns, the task hasn't necessarily terminated. */
519 if (is_task_ctx (shared_ctx))
520 {
521 tree outer;
522 maybe_mark_addressable_and_ret:
523 outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
524 if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
525 {
526 /* Taking address of OUTER in lower_send_shared_vars
527 might need regimplification of everything that uses the
528 variable. */
529 if (!task_shared_vars)
530 task_shared_vars = BITMAP_ALLOC (NULL);
531 bitmap_set_bit (task_shared_vars, DECL_UID (outer));
532 TREE_ADDRESSABLE (outer) = 1;
533 }
534 return true;
535 }
536 }
537
538 return false;
539 }
540
541 /* Construct a new automatic decl similar to VAR. */
542
543 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)544 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
545 {
546 tree copy = copy_var_decl (var, name, type);
547
548 DECL_CONTEXT (copy) = current_function_decl;
549 DECL_CHAIN (copy) = ctx->block_vars;
550 /* If VAR is listed in task_shared_vars, it means it wasn't
551 originally addressable and is just because task needs to take
552 it's address. But we don't need to take address of privatizations
553 from that var. */
554 if (TREE_ADDRESSABLE (var)
555 && ((task_shared_vars
556 && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
557 || (global_nonaddressable_vars
558 && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
559 TREE_ADDRESSABLE (copy) = 0;
560 ctx->block_vars = copy;
561
562 return copy;
563 }
564
565 static tree
omp_copy_decl_1(tree var,omp_context * ctx)566 omp_copy_decl_1 (tree var, omp_context *ctx)
567 {
568 return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
569 }
570
571 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
572 as appropriate. */
573 static tree
omp_build_component_ref(tree obj,tree field)574 omp_build_component_ref (tree obj, tree field)
575 {
576 tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
577 if (TREE_THIS_VOLATILE (field))
578 TREE_THIS_VOLATILE (ret) |= 1;
579 if (TREE_READONLY (field))
580 TREE_READONLY (ret) |= 1;
581 return ret;
582 }
583
584 /* Build tree nodes to access the field for VAR on the receiver side. */
585
586 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)587 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
588 {
589 tree x, field = lookup_field (var, ctx);
590
591 /* If the receiver record type was remapped in the child function,
592 remap the field into the new record type. */
593 x = maybe_lookup_field (field, ctx);
594 if (x != NULL)
595 field = x;
596
597 x = build_simple_mem_ref (ctx->receiver_decl);
598 TREE_THIS_NOTRAP (x) = 1;
599 x = omp_build_component_ref (x, field);
600 if (by_ref)
601 {
602 x = build_simple_mem_ref (x);
603 TREE_THIS_NOTRAP (x) = 1;
604 }
605
606 return x;
607 }
608
609 /* Build tree nodes to access VAR in the scope outer to CTX. In the case
610 of a parallel, this is a component reference; for workshare constructs
611 this is some variable. */
612
613 static tree
614 build_outer_var_ref (tree var, omp_context *ctx,
615 enum omp_clause_code code = OMP_CLAUSE_ERROR)
616 {
617 tree x;
618 omp_context *outer = ctx->outer;
619 while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
620 outer = outer->outer;
621
622 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
623 x = var;
624 else if (is_variable_sized (var))
625 {
626 x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
627 x = build_outer_var_ref (x, ctx, code);
628 x = build_simple_mem_ref (x);
629 }
630 else if (is_taskreg_ctx (ctx))
631 {
632 bool by_ref = use_pointer_for_field (var, NULL);
633 x = build_receiver_ref (var, by_ref, ctx);
634 }
635 else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
636 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
637 || ctx->loop_p
638 || (code == OMP_CLAUSE_PRIVATE
639 && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
640 || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
641 || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
642 {
643 /* #pragma omp simd isn't a worksharing construct, and can reference
644 even private vars in its linear etc. clauses.
645 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
646 to private vars in all worksharing constructs. */
647 x = NULL_TREE;
648 if (outer && is_taskreg_ctx (outer))
649 x = lookup_decl (var, outer);
650 else if (outer)
651 x = maybe_lookup_decl_in_outer_ctx (var, ctx);
652 if (x == NULL_TREE)
653 x = var;
654 }
655 else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
656 {
657 gcc_assert (outer);
658 splay_tree_node n
659 = splay_tree_lookup (outer->field_map,
660 (splay_tree_key) &DECL_UID (var));
661 if (n == NULL)
662 {
663 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
664 x = var;
665 else
666 x = lookup_decl (var, outer);
667 }
668 else
669 {
670 tree field = (tree) n->value;
671 /* If the receiver record type was remapped in the child function,
672 remap the field into the new record type. */
673 x = maybe_lookup_field (field, outer);
674 if (x != NULL)
675 field = x;
676
677 x = build_simple_mem_ref (outer->receiver_decl);
678 x = omp_build_component_ref (x, field);
679 if (use_pointer_for_field (var, outer))
680 x = build_simple_mem_ref (x);
681 }
682 }
683 else if (outer)
684 {
685 if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
686 {
687 outer = outer->outer;
688 gcc_assert (outer
689 && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
690 }
691 x = lookup_decl (var, outer);
692 }
693 else if (omp_is_reference (var))
694 /* This can happen with orphaned constructs. If var is reference, it is
695 possible it is shared and as such valid. */
696 x = var;
697 else if (omp_member_access_dummy_var (var))
698 x = var;
699 else
700 gcc_unreachable ();
701
702 if (x == var)
703 {
704 tree t = omp_member_access_dummy_var (var);
705 if (t)
706 {
707 x = DECL_VALUE_EXPR (var);
708 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
709 if (o != t)
710 x = unshare_and_remap (x, t, o);
711 else
712 x = unshare_expr (x);
713 }
714 }
715
716 if (omp_is_reference (var))
717 x = build_simple_mem_ref (x);
718
719 return x;
720 }
721
722 /* Build tree nodes to access the field for VAR on the sender side. */
723
724 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)725 build_sender_ref (splay_tree_key key, omp_context *ctx)
726 {
727 tree field = lookup_sfield (key, ctx);
728 return omp_build_component_ref (ctx->sender_decl, field);
729 }
730
731 static tree
build_sender_ref(tree var,omp_context * ctx)732 build_sender_ref (tree var, omp_context *ctx)
733 {
734 return build_sender_ref ((splay_tree_key) var, ctx);
735 }
736
737 /* Add a new field for VAR inside the structure CTX->SENDER_DECL. If
738 BASE_POINTERS_RESTRICT, declare the field with restrict. */
739
740 static void
install_var_field(tree var,bool by_ref,int mask,omp_context * ctx)741 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
742 {
743 tree field, type, sfield = NULL_TREE;
744 splay_tree_key key = (splay_tree_key) var;
745
746 if ((mask & 16) != 0)
747 {
748 key = (splay_tree_key) &DECL_NAME (var);
749 gcc_checking_assert (key != (splay_tree_key) var);
750 }
751 if ((mask & 8) != 0)
752 {
753 key = (splay_tree_key) &DECL_UID (var);
754 gcc_checking_assert (key != (splay_tree_key) var);
755 }
756 gcc_assert ((mask & 1) == 0
757 || !splay_tree_lookup (ctx->field_map, key));
758 gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
759 || !splay_tree_lookup (ctx->sfield_map, key));
760 gcc_assert ((mask & 3) == 3
761 || !is_gimple_omp_oacc (ctx->stmt));
762
763 type = TREE_TYPE (var);
764 if ((mask & 16) != 0)
765 type = lang_hooks.decls.omp_array_data (var, true);
766
767 /* Prevent redeclaring the var in the split-off function with a restrict
768 pointer type. Note that we only clear type itself, restrict qualifiers in
769 the pointed-to type will be ignored by points-to analysis. */
770 if (POINTER_TYPE_P (type)
771 && TYPE_RESTRICT (type))
772 type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
773
774 if (mask & 4)
775 {
776 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
777 type = build_pointer_type (build_pointer_type (type));
778 }
779 else if (by_ref)
780 type = build_pointer_type (type);
781 else if ((mask & 3) == 1 && omp_is_reference (var))
782 type = TREE_TYPE (type);
783
784 field = build_decl (DECL_SOURCE_LOCATION (var),
785 FIELD_DECL, DECL_NAME (var), type);
786
787 /* Remember what variable this field was created for. This does have a
788 side effect of making dwarf2out ignore this member, so for helpful
789 debugging we clear it later in delete_omp_context. */
790 DECL_ABSTRACT_ORIGIN (field) = var;
791 if ((mask & 16) == 0 && type == TREE_TYPE (var))
792 {
793 SET_DECL_ALIGN (field, DECL_ALIGN (var));
794 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
795 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
796 }
797 else
798 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
799
800 if ((mask & 3) == 3)
801 {
802 insert_field_into_struct (ctx->record_type, field);
803 if (ctx->srecord_type)
804 {
805 sfield = build_decl (DECL_SOURCE_LOCATION (var),
806 FIELD_DECL, DECL_NAME (var), type);
807 DECL_ABSTRACT_ORIGIN (sfield) = var;
808 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
809 DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
810 TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
811 insert_field_into_struct (ctx->srecord_type, sfield);
812 }
813 }
814 else
815 {
816 if (ctx->srecord_type == NULL_TREE)
817 {
818 tree t;
819
820 ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
821 ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
822 for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
823 {
824 sfield = build_decl (DECL_SOURCE_LOCATION (t),
825 FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
826 DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
827 insert_field_into_struct (ctx->srecord_type, sfield);
828 splay_tree_insert (ctx->sfield_map,
829 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
830 (splay_tree_value) sfield);
831 }
832 }
833 sfield = field;
834 insert_field_into_struct ((mask & 1) ? ctx->record_type
835 : ctx->srecord_type, field);
836 }
837
838 if (mask & 1)
839 splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
840 if ((mask & 2) && ctx->sfield_map)
841 splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
842 }
843
844 static tree
install_var_local(tree var,omp_context * ctx)845 install_var_local (tree var, omp_context *ctx)
846 {
847 tree new_var = omp_copy_decl_1 (var, ctx);
848 insert_decl_map (&ctx->cb, var, new_var);
849 return new_var;
850 }
851
852 /* Adjust the replacement for DECL in CTX for the new context. This means
853 copying the DECL_VALUE_EXPR, and fixing up the type. */
854
855 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)856 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
857 {
858 tree new_decl, size;
859
860 new_decl = lookup_decl (decl, ctx);
861
862 TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
863
864 if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
865 && DECL_HAS_VALUE_EXPR_P (decl))
866 {
867 tree ve = DECL_VALUE_EXPR (decl);
868 walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
869 SET_DECL_VALUE_EXPR (new_decl, ve);
870 DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
871 }
872
873 if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
874 {
875 size = remap_decl (DECL_SIZE (decl), &ctx->cb);
876 if (size == error_mark_node)
877 size = TYPE_SIZE (TREE_TYPE (new_decl));
878 DECL_SIZE (new_decl) = size;
879
880 size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
881 if (size == error_mark_node)
882 size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
883 DECL_SIZE_UNIT (new_decl) = size;
884 }
885 }
886
887 /* The callback for remap_decl. Search all containing contexts for a
888 mapping of the variable; this avoids having to duplicate the splay
889 tree ahead of time. We know a mapping doesn't already exist in the
890 given context. Create new mappings to implement default semantics. */
891
892 static tree
omp_copy_decl(tree var,copy_body_data * cb)893 omp_copy_decl (tree var, copy_body_data *cb)
894 {
895 omp_context *ctx = (omp_context *) cb;
896 tree new_var;
897
898 if (TREE_CODE (var) == LABEL_DECL)
899 {
900 if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
901 return var;
902 new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
903 DECL_CONTEXT (new_var) = current_function_decl;
904 insert_decl_map (&ctx->cb, var, new_var);
905 return new_var;
906 }
907
908 while (!is_taskreg_ctx (ctx))
909 {
910 ctx = ctx->outer;
911 if (ctx == NULL)
912 return var;
913 new_var = maybe_lookup_decl (var, ctx);
914 if (new_var)
915 return new_var;
916 }
917
918 if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
919 return var;
920
921 return error_mark_node;
922 }
923
924 /* Create a new context, with OUTER_CTX being the surrounding context. */
925
926 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)927 new_omp_context (gimple *stmt, omp_context *outer_ctx)
928 {
929 omp_context *ctx = XCNEW (omp_context);
930
931 splay_tree_insert (all_contexts, (splay_tree_key) stmt,
932 (splay_tree_value) ctx);
933 ctx->stmt = stmt;
934
935 if (outer_ctx)
936 {
937 ctx->outer = outer_ctx;
938 ctx->cb = outer_ctx->cb;
939 ctx->cb.block = NULL;
940 ctx->depth = outer_ctx->depth + 1;
941 }
942 else
943 {
944 ctx->cb.src_fn = current_function_decl;
945 ctx->cb.dst_fn = current_function_decl;
946 ctx->cb.src_node = cgraph_node::get (current_function_decl);
947 gcc_checking_assert (ctx->cb.src_node);
948 ctx->cb.dst_node = ctx->cb.src_node;
949 ctx->cb.src_cfun = cfun;
950 ctx->cb.copy_decl = omp_copy_decl;
951 ctx->cb.eh_lp_nr = 0;
952 ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
953 ctx->cb.adjust_array_error_bounds = true;
954 ctx->cb.dont_remap_vla_if_no_change = true;
955 ctx->depth = 1;
956 }
957
958 ctx->cb.decl_map = new hash_map<tree, tree>;
959
960 return ctx;
961 }
962
963 static gimple_seq maybe_catch_exception (gimple_seq);
964
965 /* Finalize task copyfn. */
966
967 static void
finalize_task_copyfn(gomp_task * task_stmt)968 finalize_task_copyfn (gomp_task *task_stmt)
969 {
970 struct function *child_cfun;
971 tree child_fn;
972 gimple_seq seq = NULL, new_seq;
973 gbind *bind;
974
975 child_fn = gimple_omp_task_copy_fn (task_stmt);
976 if (child_fn == NULL_TREE)
977 return;
978
979 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
980 DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
981
982 push_cfun (child_cfun);
983 bind = gimplify_body (child_fn, false);
984 gimple_seq_add_stmt (&seq, bind);
985 new_seq = maybe_catch_exception (seq);
986 if (new_seq != seq)
987 {
988 bind = gimple_build_bind (NULL, new_seq, NULL);
989 seq = NULL;
990 gimple_seq_add_stmt (&seq, bind);
991 }
992 gimple_set_body (child_fn, seq);
993 pop_cfun ();
994
995 /* Inform the callgraph about the new function. */
996 cgraph_node *node = cgraph_node::get_create (child_fn);
997 node->parallelized_function = 1;
998 cgraph_node::add_new_function (child_fn, false);
999 }
1000
1001 /* Destroy a omp_context data structures. Called through the splay tree
1002 value delete callback. */
1003
1004 static void
delete_omp_context(splay_tree_value value)1005 delete_omp_context (splay_tree_value value)
1006 {
1007 omp_context *ctx = (omp_context *) value;
1008
1009 delete ctx->cb.decl_map;
1010
1011 if (ctx->field_map)
1012 splay_tree_delete (ctx->field_map);
1013 if (ctx->sfield_map)
1014 splay_tree_delete (ctx->sfield_map);
1015
1016 /* We hijacked DECL_ABSTRACT_ORIGIN earlier. We need to clear it before
1017 it produces corrupt debug information. */
1018 if (ctx->record_type)
1019 {
1020 tree t;
1021 for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1022 DECL_ABSTRACT_ORIGIN (t) = NULL;
1023 }
1024 if (ctx->srecord_type)
1025 {
1026 tree t;
1027 for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1028 DECL_ABSTRACT_ORIGIN (t) = NULL;
1029 }
1030
1031 if (is_task_ctx (ctx))
1032 finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1033
1034 if (ctx->task_reduction_map)
1035 {
1036 ctx->task_reductions.release ();
1037 delete ctx->task_reduction_map;
1038 }
1039
1040 delete ctx->lastprivate_conditional_map;
1041
1042 XDELETE (ctx);
1043 }
1044
1045 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1046 context. */
1047
1048 static void
fixup_child_record_type(omp_context * ctx)1049 fixup_child_record_type (omp_context *ctx)
1050 {
1051 tree f, type = ctx->record_type;
1052
1053 if (!ctx->receiver_decl)
1054 return;
1055 /* ??? It isn't sufficient to just call remap_type here, because
1056 variably_modified_type_p doesn't work the way we expect for
1057 record types. Testing each field for whether it needs remapping
1058 and creating a new record by hand works, however. */
1059 for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1060 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1061 break;
1062 if (f)
1063 {
1064 tree name, new_fields = NULL;
1065
1066 type = lang_hooks.types.make_type (RECORD_TYPE);
1067 name = DECL_NAME (TYPE_NAME (ctx->record_type));
1068 name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1069 TYPE_DECL, name, type);
1070 TYPE_NAME (type) = name;
1071
1072 for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1073 {
1074 tree new_f = copy_node (f);
1075 DECL_CONTEXT (new_f) = type;
1076 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1077 DECL_CHAIN (new_f) = new_fields;
1078 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1079 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1080 &ctx->cb, NULL);
1081 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1082 &ctx->cb, NULL);
1083 new_fields = new_f;
1084
1085 /* Arrange to be able to look up the receiver field
1086 given the sender field. */
1087 splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1088 (splay_tree_value) new_f);
1089 }
1090 TYPE_FIELDS (type) = nreverse (new_fields);
1091 layout_type (type);
1092 }
1093
1094 /* In a target region we never modify any of the pointers in *.omp_data_i,
1095 so attempt to help the optimizers. */
1096 if (is_gimple_omp_offloaded (ctx->stmt))
1097 type = build_qualified_type (type, TYPE_QUAL_CONST);
1098
1099 TREE_TYPE (ctx->receiver_decl)
1100 = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1101 }
1102
1103 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1104 specified by CLAUSES. */
1105
1106 static void
scan_sharing_clauses(tree clauses,omp_context * ctx)1107 scan_sharing_clauses (tree clauses, omp_context *ctx)
1108 {
1109 tree c, decl;
1110 bool scan_array_reductions = false;
1111
1112 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1113 {
1114 bool by_ref;
1115
1116 switch (OMP_CLAUSE_CODE (c))
1117 {
1118 case OMP_CLAUSE_PRIVATE:
1119 decl = OMP_CLAUSE_DECL (c);
1120 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1121 goto do_private;
1122 else if (!is_variable_sized (decl))
1123 install_var_local (decl, ctx);
1124 break;
1125
1126 case OMP_CLAUSE_SHARED:
1127 decl = OMP_CLAUSE_DECL (c);
1128 /* Ignore shared directives in teams construct inside of
1129 target construct. */
1130 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1131 && !is_host_teams_ctx (ctx))
1132 {
1133 /* Global variables don't need to be copied,
1134 the receiver side will use them directly. */
1135 tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1136 if (is_global_var (odecl))
1137 break;
1138 insert_decl_map (&ctx->cb, decl, odecl);
1139 break;
1140 }
1141 gcc_assert (is_taskreg_ctx (ctx));
1142 gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1143 || !is_variable_sized (decl));
1144 /* Global variables don't need to be copied,
1145 the receiver side will use them directly. */
1146 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1147 break;
1148 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1149 {
1150 use_pointer_for_field (decl, ctx);
1151 break;
1152 }
1153 by_ref = use_pointer_for_field (decl, NULL);
1154 if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1155 || TREE_ADDRESSABLE (decl)
1156 || by_ref
1157 || omp_is_reference (decl))
1158 {
1159 by_ref = use_pointer_for_field (decl, ctx);
1160 install_var_field (decl, by_ref, 3, ctx);
1161 install_var_local (decl, ctx);
1162 break;
1163 }
1164 /* We don't need to copy const scalar vars back. */
1165 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1166 goto do_private;
1167
1168 case OMP_CLAUSE_REDUCTION:
1169 /* Collect 'reduction' clauses on OpenACC compute construct. */
1170 if (is_gimple_omp_oacc (ctx->stmt)
1171 && is_gimple_omp_offloaded (ctx->stmt))
1172 {
1173 /* No 'reduction' clauses on OpenACC 'kernels'. */
1174 gcc_checking_assert (!is_oacc_kernels (ctx));
1175
1176 ctx->local_reduction_clauses
1177 = tree_cons (NULL, c, ctx->local_reduction_clauses);
1178 }
1179 /* FALLTHRU */
1180
1181 case OMP_CLAUSE_IN_REDUCTION:
1182 decl = OMP_CLAUSE_DECL (c);
1183 if (TREE_CODE (decl) == MEM_REF)
1184 {
1185 tree t = TREE_OPERAND (decl, 0);
1186 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1187 t = TREE_OPERAND (t, 0);
1188 if (TREE_CODE (t) == INDIRECT_REF
1189 || TREE_CODE (t) == ADDR_EXPR)
1190 t = TREE_OPERAND (t, 0);
1191 install_var_local (t, ctx);
1192 if (is_taskreg_ctx (ctx)
1193 && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1194 || (is_task_ctx (ctx)
1195 && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1196 || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1197 && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1198 == POINTER_TYPE)))))
1199 && !is_variable_sized (t)
1200 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1201 || (!OMP_CLAUSE_REDUCTION_TASK (c)
1202 && !is_task_ctx (ctx))))
1203 {
1204 by_ref = use_pointer_for_field (t, NULL);
1205 if (is_task_ctx (ctx)
1206 && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1207 && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1208 {
1209 install_var_field (t, false, 1, ctx);
1210 install_var_field (t, by_ref, 2, ctx);
1211 }
1212 else
1213 install_var_field (t, by_ref, 3, ctx);
1214 }
1215 break;
1216 }
1217 if (is_task_ctx (ctx)
1218 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1219 && OMP_CLAUSE_REDUCTION_TASK (c)
1220 && is_parallel_ctx (ctx)))
1221 {
1222 /* Global variables don't need to be copied,
1223 the receiver side will use them directly. */
1224 if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1225 {
1226 by_ref = use_pointer_for_field (decl, ctx);
1227 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1228 install_var_field (decl, by_ref, 3, ctx);
1229 }
1230 install_var_local (decl, ctx);
1231 break;
1232 }
1233 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1234 && OMP_CLAUSE_REDUCTION_TASK (c))
1235 {
1236 install_var_local (decl, ctx);
1237 break;
1238 }
1239 goto do_private;
1240
1241 case OMP_CLAUSE_LASTPRIVATE:
1242 /* Let the corresponding firstprivate clause create
1243 the variable. */
1244 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1245 break;
1246 /* FALLTHRU */
1247
1248 case OMP_CLAUSE_FIRSTPRIVATE:
1249 case OMP_CLAUSE_LINEAR:
1250 decl = OMP_CLAUSE_DECL (c);
1251 do_private:
1252 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1253 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1254 && is_gimple_omp_offloaded (ctx->stmt))
1255 {
1256 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1257 install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1258 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1259 install_var_field (decl, true, 3, ctx);
1260 else
1261 install_var_field (decl, false, 3, ctx);
1262 }
1263 if (is_variable_sized (decl))
1264 {
1265 if (is_task_ctx (ctx))
1266 install_var_field (decl, false, 1, ctx);
1267 break;
1268 }
1269 else if (is_taskreg_ctx (ctx))
1270 {
1271 bool global
1272 = is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1273 by_ref = use_pointer_for_field (decl, NULL);
1274
1275 if (is_task_ctx (ctx)
1276 && (global || by_ref || omp_is_reference (decl)))
1277 {
1278 install_var_field (decl, false, 1, ctx);
1279 if (!global)
1280 install_var_field (decl, by_ref, 2, ctx);
1281 }
1282 else if (!global)
1283 install_var_field (decl, by_ref, 3, ctx);
1284 }
1285 install_var_local (decl, ctx);
1286 break;
1287
1288 case OMP_CLAUSE_USE_DEVICE_PTR:
1289 case OMP_CLAUSE_USE_DEVICE_ADDR:
1290 decl = OMP_CLAUSE_DECL (c);
1291
1292 /* Fortran array descriptors. */
1293 if (lang_hooks.decls.omp_array_data (decl, true))
1294 install_var_field (decl, false, 19, ctx);
1295 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1296 && !omp_is_reference (decl)
1297 && !omp_is_allocatable_or_ptr (decl))
1298 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1299 install_var_field (decl, true, 11, ctx);
1300 else
1301 install_var_field (decl, false, 11, ctx);
1302 if (DECL_SIZE (decl)
1303 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1304 {
1305 tree decl2 = DECL_VALUE_EXPR (decl);
1306 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1307 decl2 = TREE_OPERAND (decl2, 0);
1308 gcc_assert (DECL_P (decl2));
1309 install_var_local (decl2, ctx);
1310 }
1311 install_var_local (decl, ctx);
1312 break;
1313
1314 case OMP_CLAUSE_IS_DEVICE_PTR:
1315 decl = OMP_CLAUSE_DECL (c);
1316 goto do_private;
1317
1318 case OMP_CLAUSE__LOOPTEMP_:
1319 case OMP_CLAUSE__REDUCTEMP_:
1320 gcc_assert (is_taskreg_ctx (ctx));
1321 decl = OMP_CLAUSE_DECL (c);
1322 install_var_field (decl, false, 3, ctx);
1323 install_var_local (decl, ctx);
1324 break;
1325
1326 case OMP_CLAUSE_COPYPRIVATE:
1327 case OMP_CLAUSE_COPYIN:
1328 decl = OMP_CLAUSE_DECL (c);
1329 by_ref = use_pointer_for_field (decl, NULL);
1330 install_var_field (decl, by_ref, 3, ctx);
1331 break;
1332
1333 case OMP_CLAUSE_FINAL:
1334 case OMP_CLAUSE_IF:
1335 case OMP_CLAUSE_NUM_THREADS:
1336 case OMP_CLAUSE_NUM_TEAMS:
1337 case OMP_CLAUSE_THREAD_LIMIT:
1338 case OMP_CLAUSE_DEVICE:
1339 case OMP_CLAUSE_SCHEDULE:
1340 case OMP_CLAUSE_DIST_SCHEDULE:
1341 case OMP_CLAUSE_DEPEND:
1342 case OMP_CLAUSE_PRIORITY:
1343 case OMP_CLAUSE_GRAINSIZE:
1344 case OMP_CLAUSE_NUM_TASKS:
1345 case OMP_CLAUSE_NUM_GANGS:
1346 case OMP_CLAUSE_NUM_WORKERS:
1347 case OMP_CLAUSE_VECTOR_LENGTH:
1348 if (ctx->outer)
1349 scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1350 break;
1351
1352 case OMP_CLAUSE_TO:
1353 case OMP_CLAUSE_FROM:
1354 case OMP_CLAUSE_MAP:
1355 if (ctx->outer)
1356 scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1357 decl = OMP_CLAUSE_DECL (c);
1358 /* Global variables with "omp declare target" attribute
1359 don't need to be copied, the receiver side will use them
1360 directly. However, global variables with "omp declare target link"
1361 attribute need to be copied. Or when ALWAYS modifier is used. */
1362 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1363 && DECL_P (decl)
1364 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1365 && (OMP_CLAUSE_MAP_KIND (c)
1366 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1367 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1368 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1369 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1370 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1371 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1372 && varpool_node::get_create (decl)->offloadable
1373 && !lookup_attribute ("omp declare target link",
1374 DECL_ATTRIBUTES (decl)))
1375 break;
1376 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1377 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1378 {
1379 /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1380 not offloaded; there is nothing to map for those. */
1381 if (!is_gimple_omp_offloaded (ctx->stmt)
1382 && !POINTER_TYPE_P (TREE_TYPE (decl))
1383 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1384 break;
1385 }
1386 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1387 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1388 || (OMP_CLAUSE_MAP_KIND (c)
1389 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1390 {
1391 if (TREE_CODE (decl) == COMPONENT_REF
1392 || (TREE_CODE (decl) == INDIRECT_REF
1393 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1394 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1395 == REFERENCE_TYPE)))
1396 break;
1397 if (DECL_SIZE (decl)
1398 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1399 {
1400 tree decl2 = DECL_VALUE_EXPR (decl);
1401 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1402 decl2 = TREE_OPERAND (decl2, 0);
1403 gcc_assert (DECL_P (decl2));
1404 install_var_local (decl2, ctx);
1405 }
1406 install_var_local (decl, ctx);
1407 break;
1408 }
1409 if (DECL_P (decl))
1410 {
1411 if (DECL_SIZE (decl)
1412 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1413 {
1414 tree decl2 = DECL_VALUE_EXPR (decl);
1415 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1416 decl2 = TREE_OPERAND (decl2, 0);
1417 gcc_assert (DECL_P (decl2));
1418 install_var_field (decl2, true, 3, ctx);
1419 install_var_local (decl2, ctx);
1420 install_var_local (decl, ctx);
1421 }
1422 else
1423 {
1424 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1425 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1426 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1427 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1428 install_var_field (decl, true, 7, ctx);
1429 else
1430 install_var_field (decl, true, 3, ctx);
1431 if (is_gimple_omp_offloaded (ctx->stmt)
1432 && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1433 install_var_local (decl, ctx);
1434 }
1435 }
1436 else
1437 {
1438 tree base = get_base_address (decl);
1439 tree nc = OMP_CLAUSE_CHAIN (c);
1440 if (DECL_P (base)
1441 && nc != NULL_TREE
1442 && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1443 && OMP_CLAUSE_DECL (nc) == base
1444 && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1445 && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1446 {
1447 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1448 OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1449 }
1450 else
1451 {
1452 if (ctx->outer)
1453 {
1454 scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1455 decl = OMP_CLAUSE_DECL (c);
1456 }
1457 gcc_assert (!splay_tree_lookup (ctx->field_map,
1458 (splay_tree_key) decl));
1459 tree field
1460 = build_decl (OMP_CLAUSE_LOCATION (c),
1461 FIELD_DECL, NULL_TREE, ptr_type_node);
1462 SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1463 insert_field_into_struct (ctx->record_type, field);
1464 splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1465 (splay_tree_value) field);
1466 }
1467 }
1468 break;
1469
1470 case OMP_CLAUSE__GRIDDIM_:
1471 if (ctx->outer)
1472 {
1473 scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1474 scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1475 }
1476 break;
1477
1478 case OMP_CLAUSE_ORDER:
1479 ctx->order_concurrent = true;
1480 break;
1481
1482 case OMP_CLAUSE_BIND:
1483 ctx->loop_p = true;
1484 break;
1485
1486 case OMP_CLAUSE_NOWAIT:
1487 case OMP_CLAUSE_ORDERED:
1488 case OMP_CLAUSE_COLLAPSE:
1489 case OMP_CLAUSE_UNTIED:
1490 case OMP_CLAUSE_MERGEABLE:
1491 case OMP_CLAUSE_PROC_BIND:
1492 case OMP_CLAUSE_SAFELEN:
1493 case OMP_CLAUSE_SIMDLEN:
1494 case OMP_CLAUSE_THREADS:
1495 case OMP_CLAUSE_SIMD:
1496 case OMP_CLAUSE_NOGROUP:
1497 case OMP_CLAUSE_DEFAULTMAP:
1498 case OMP_CLAUSE_ASYNC:
1499 case OMP_CLAUSE_WAIT:
1500 case OMP_CLAUSE_GANG:
1501 case OMP_CLAUSE_WORKER:
1502 case OMP_CLAUSE_VECTOR:
1503 case OMP_CLAUSE_INDEPENDENT:
1504 case OMP_CLAUSE_AUTO:
1505 case OMP_CLAUSE_SEQ:
1506 case OMP_CLAUSE_TILE:
1507 case OMP_CLAUSE__SIMT_:
1508 case OMP_CLAUSE_DEFAULT:
1509 case OMP_CLAUSE_NONTEMPORAL:
1510 case OMP_CLAUSE_IF_PRESENT:
1511 case OMP_CLAUSE_FINALIZE:
1512 case OMP_CLAUSE_TASK_REDUCTION:
1513 break;
1514
1515 case OMP_CLAUSE_ALIGNED:
1516 decl = OMP_CLAUSE_DECL (c);
1517 if (is_global_var (decl)
1518 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1519 install_var_local (decl, ctx);
1520 break;
1521
1522 case OMP_CLAUSE__CONDTEMP_:
1523 decl = OMP_CLAUSE_DECL (c);
1524 if (is_parallel_ctx (ctx))
1525 {
1526 install_var_field (decl, false, 3, ctx);
1527 install_var_local (decl, ctx);
1528 }
1529 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1530 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1531 && !OMP_CLAUSE__CONDTEMP__ITER (c))
1532 install_var_local (decl, ctx);
1533 break;
1534
1535 case OMP_CLAUSE__CACHE_:
1536 default:
1537 gcc_unreachable ();
1538 }
1539 }
1540
1541 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1542 {
1543 switch (OMP_CLAUSE_CODE (c))
1544 {
1545 case OMP_CLAUSE_LASTPRIVATE:
1546 /* Let the corresponding firstprivate clause create
1547 the variable. */
1548 if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1549 scan_array_reductions = true;
1550 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1551 break;
1552 /* FALLTHRU */
1553
1554 case OMP_CLAUSE_FIRSTPRIVATE:
1555 case OMP_CLAUSE_PRIVATE:
1556 case OMP_CLAUSE_LINEAR:
1557 case OMP_CLAUSE_IS_DEVICE_PTR:
1558 decl = OMP_CLAUSE_DECL (c);
1559 if (is_variable_sized (decl))
1560 {
1561 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1562 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1563 && is_gimple_omp_offloaded (ctx->stmt))
1564 {
1565 tree decl2 = DECL_VALUE_EXPR (decl);
1566 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1567 decl2 = TREE_OPERAND (decl2, 0);
1568 gcc_assert (DECL_P (decl2));
1569 install_var_local (decl2, ctx);
1570 fixup_remapped_decl (decl2, ctx, false);
1571 }
1572 install_var_local (decl, ctx);
1573 }
1574 fixup_remapped_decl (decl, ctx,
1575 OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1576 && OMP_CLAUSE_PRIVATE_DEBUG (c));
1577 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1578 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1579 scan_array_reductions = true;
1580 break;
1581
1582 case OMP_CLAUSE_REDUCTION:
1583 case OMP_CLAUSE_IN_REDUCTION:
1584 decl = OMP_CLAUSE_DECL (c);
1585 if (TREE_CODE (decl) != MEM_REF)
1586 {
1587 if (is_variable_sized (decl))
1588 install_var_local (decl, ctx);
1589 fixup_remapped_decl (decl, ctx, false);
1590 }
1591 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1592 scan_array_reductions = true;
1593 break;
1594
1595 case OMP_CLAUSE_TASK_REDUCTION:
1596 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1597 scan_array_reductions = true;
1598 break;
1599
1600 case OMP_CLAUSE_SHARED:
1601 /* Ignore shared directives in teams construct inside of
1602 target construct. */
1603 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1604 && !is_host_teams_ctx (ctx))
1605 break;
1606 decl = OMP_CLAUSE_DECL (c);
1607 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1608 break;
1609 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1610 {
1611 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1612 ctx->outer)))
1613 break;
1614 bool by_ref = use_pointer_for_field (decl, ctx);
1615 install_var_field (decl, by_ref, 11, ctx);
1616 break;
1617 }
1618 fixup_remapped_decl (decl, ctx, false);
1619 break;
1620
1621 case OMP_CLAUSE_MAP:
1622 if (!is_gimple_omp_offloaded (ctx->stmt))
1623 break;
1624 decl = OMP_CLAUSE_DECL (c);
1625 if (DECL_P (decl)
1626 && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1627 && (OMP_CLAUSE_MAP_KIND (c)
1628 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1629 || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1630 && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1631 && varpool_node::get_create (decl)->offloadable)
1632 break;
1633 if (DECL_P (decl))
1634 {
1635 if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1636 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1637 && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1638 && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1639 {
1640 tree new_decl = lookup_decl (decl, ctx);
1641 TREE_TYPE (new_decl)
1642 = remap_type (TREE_TYPE (decl), &ctx->cb);
1643 }
1644 else if (DECL_SIZE (decl)
1645 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1646 {
1647 tree decl2 = DECL_VALUE_EXPR (decl);
1648 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1649 decl2 = TREE_OPERAND (decl2, 0);
1650 gcc_assert (DECL_P (decl2));
1651 fixup_remapped_decl (decl2, ctx, false);
1652 fixup_remapped_decl (decl, ctx, true);
1653 }
1654 else
1655 fixup_remapped_decl (decl, ctx, false);
1656 }
1657 break;
1658
1659 case OMP_CLAUSE_COPYPRIVATE:
1660 case OMP_CLAUSE_COPYIN:
1661 case OMP_CLAUSE_DEFAULT:
1662 case OMP_CLAUSE_IF:
1663 case OMP_CLAUSE_NUM_THREADS:
1664 case OMP_CLAUSE_NUM_TEAMS:
1665 case OMP_CLAUSE_THREAD_LIMIT:
1666 case OMP_CLAUSE_DEVICE:
1667 case OMP_CLAUSE_SCHEDULE:
1668 case OMP_CLAUSE_DIST_SCHEDULE:
1669 case OMP_CLAUSE_NOWAIT:
1670 case OMP_CLAUSE_ORDERED:
1671 case OMP_CLAUSE_COLLAPSE:
1672 case OMP_CLAUSE_UNTIED:
1673 case OMP_CLAUSE_FINAL:
1674 case OMP_CLAUSE_MERGEABLE:
1675 case OMP_CLAUSE_PROC_BIND:
1676 case OMP_CLAUSE_SAFELEN:
1677 case OMP_CLAUSE_SIMDLEN:
1678 case OMP_CLAUSE_ALIGNED:
1679 case OMP_CLAUSE_DEPEND:
1680 case OMP_CLAUSE__LOOPTEMP_:
1681 case OMP_CLAUSE__REDUCTEMP_:
1682 case OMP_CLAUSE_TO:
1683 case OMP_CLAUSE_FROM:
1684 case OMP_CLAUSE_PRIORITY:
1685 case OMP_CLAUSE_GRAINSIZE:
1686 case OMP_CLAUSE_NUM_TASKS:
1687 case OMP_CLAUSE_THREADS:
1688 case OMP_CLAUSE_SIMD:
1689 case OMP_CLAUSE_NOGROUP:
1690 case OMP_CLAUSE_DEFAULTMAP:
1691 case OMP_CLAUSE_ORDER:
1692 case OMP_CLAUSE_BIND:
1693 case OMP_CLAUSE_USE_DEVICE_PTR:
1694 case OMP_CLAUSE_USE_DEVICE_ADDR:
1695 case OMP_CLAUSE_NONTEMPORAL:
1696 case OMP_CLAUSE_ASYNC:
1697 case OMP_CLAUSE_WAIT:
1698 case OMP_CLAUSE_NUM_GANGS:
1699 case OMP_CLAUSE_NUM_WORKERS:
1700 case OMP_CLAUSE_VECTOR_LENGTH:
1701 case OMP_CLAUSE_GANG:
1702 case OMP_CLAUSE_WORKER:
1703 case OMP_CLAUSE_VECTOR:
1704 case OMP_CLAUSE_INDEPENDENT:
1705 case OMP_CLAUSE_AUTO:
1706 case OMP_CLAUSE_SEQ:
1707 case OMP_CLAUSE_TILE:
1708 case OMP_CLAUSE__GRIDDIM_:
1709 case OMP_CLAUSE__SIMT_:
1710 case OMP_CLAUSE_IF_PRESENT:
1711 case OMP_CLAUSE_FINALIZE:
1712 case OMP_CLAUSE__CONDTEMP_:
1713 break;
1714
1715 case OMP_CLAUSE__CACHE_:
1716 default:
1717 gcc_unreachable ();
1718 }
1719 }
1720
1721 gcc_checking_assert (!scan_array_reductions
1722 || !is_gimple_omp_oacc (ctx->stmt));
1723 if (scan_array_reductions)
1724 {
1725 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1726 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1727 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1728 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1729 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1730 {
1731 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1732 scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1733 }
1734 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1735 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1736 scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1737 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1738 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1739 scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1740 }
1741 }
1742
1743 /* Create a new name for omp child function. Returns an identifier. */
1744
1745 static tree
create_omp_child_function_name(bool task_copy)1746 create_omp_child_function_name (bool task_copy)
1747 {
1748 return clone_function_name_numbered (current_function_decl,
1749 task_copy ? "_omp_cpyfn" : "_omp_fn");
1750 }
1751
1752 /* Return true if CTX may belong to offloaded code: either if current function
1753 is offloaded, or any enclosing context corresponds to a target region. */
1754
1755 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1756 omp_maybe_offloaded_ctx (omp_context *ctx)
1757 {
1758 if (cgraph_node::get (current_function_decl)->offloadable)
1759 return true;
1760 for (; ctx; ctx = ctx->outer)
1761 if (is_gimple_omp_offloaded (ctx->stmt))
1762 return true;
1763 return false;
1764 }
1765
1766 /* Build a decl for the omp child function. It'll not contain a body
1767 yet, just the bare decl. */
1768
1769 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1770 create_omp_child_function (omp_context *ctx, bool task_copy)
1771 {
1772 tree decl, type, name, t;
1773
1774 name = create_omp_child_function_name (task_copy);
1775 if (task_copy)
1776 type = build_function_type_list (void_type_node, ptr_type_node,
1777 ptr_type_node, NULL_TREE);
1778 else
1779 type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1780
1781 decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1782
1783 gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1784 || !task_copy);
1785 if (!task_copy)
1786 ctx->cb.dst_fn = decl;
1787 else
1788 gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1789
1790 TREE_STATIC (decl) = 1;
1791 TREE_USED (decl) = 1;
1792 DECL_ARTIFICIAL (decl) = 1;
1793 DECL_IGNORED_P (decl) = 0;
1794 TREE_PUBLIC (decl) = 0;
1795 DECL_UNINLINABLE (decl) = 1;
1796 DECL_EXTERNAL (decl) = 0;
1797 DECL_CONTEXT (decl) = NULL_TREE;
1798 DECL_INITIAL (decl) = make_node (BLOCK);
1799 BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1800 DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1801 /* Remove omp declare simd attribute from the new attributes. */
1802 if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1803 {
1804 while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1805 a = a2;
1806 a = TREE_CHAIN (a);
1807 for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1808 if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1809 *p = TREE_CHAIN (*p);
1810 else
1811 {
1812 tree chain = TREE_CHAIN (*p);
1813 *p = copy_node (*p);
1814 p = &TREE_CHAIN (*p);
1815 *p = chain;
1816 }
1817 }
1818 DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1819 = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1820 DECL_FUNCTION_SPECIFIC_TARGET (decl)
1821 = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1822 DECL_FUNCTION_VERSIONED (decl)
1823 = DECL_FUNCTION_VERSIONED (current_function_decl);
1824
1825 if (omp_maybe_offloaded_ctx (ctx))
1826 {
1827 cgraph_node::get_create (decl)->offloadable = 1;
1828 if (ENABLE_OFFLOADING)
1829 g->have_offload = true;
1830 }
1831
1832 if (cgraph_node::get_create (decl)->offloadable
1833 && !lookup_attribute ("omp declare target",
1834 DECL_ATTRIBUTES (current_function_decl)))
1835 {
1836 const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1837 ? "omp target entrypoint"
1838 : "omp declare target");
1839 DECL_ATTRIBUTES (decl)
1840 = tree_cons (get_identifier (target_attr),
1841 NULL_TREE, DECL_ATTRIBUTES (decl));
1842 }
1843
1844 t = build_decl (DECL_SOURCE_LOCATION (decl),
1845 RESULT_DECL, NULL_TREE, void_type_node);
1846 DECL_ARTIFICIAL (t) = 1;
1847 DECL_IGNORED_P (t) = 1;
1848 DECL_CONTEXT (t) = decl;
1849 DECL_RESULT (decl) = t;
1850
1851 tree data_name = get_identifier (".omp_data_i");
1852 t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1853 ptr_type_node);
1854 DECL_ARTIFICIAL (t) = 1;
1855 DECL_NAMELESS (t) = 1;
1856 DECL_ARG_TYPE (t) = ptr_type_node;
1857 DECL_CONTEXT (t) = current_function_decl;
1858 TREE_USED (t) = 1;
1859 TREE_READONLY (t) = 1;
1860 DECL_ARGUMENTS (decl) = t;
1861 if (!task_copy)
1862 ctx->receiver_decl = t;
1863 else
1864 {
1865 t = build_decl (DECL_SOURCE_LOCATION (decl),
1866 PARM_DECL, get_identifier (".omp_data_o"),
1867 ptr_type_node);
1868 DECL_ARTIFICIAL (t) = 1;
1869 DECL_NAMELESS (t) = 1;
1870 DECL_ARG_TYPE (t) = ptr_type_node;
1871 DECL_CONTEXT (t) = current_function_decl;
1872 TREE_USED (t) = 1;
1873 TREE_ADDRESSABLE (t) = 1;
1874 DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1875 DECL_ARGUMENTS (decl) = t;
1876 }
1877
1878 /* Allocate memory for the function structure. The call to
1879 allocate_struct_function clobbers CFUN, so we need to restore
1880 it afterward. */
1881 push_struct_function (decl);
1882 cfun->function_end_locus = gimple_location (ctx->stmt);
1883 init_tree_ssa (cfun);
1884 pop_cfun ();
1885 }
1886
1887 /* Callback for walk_gimple_seq. Check if combined parallel
1888 contains gimple_omp_for_combined_into_p OMP_FOR. */
1889
1890 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1891 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1892 bool *handled_ops_p,
1893 struct walk_stmt_info *wi)
1894 {
1895 gimple *stmt = gsi_stmt (*gsi_p);
1896
1897 *handled_ops_p = true;
1898 switch (gimple_code (stmt))
1899 {
1900 WALK_SUBSTMTS;
1901
1902 case GIMPLE_OMP_FOR:
1903 if (gimple_omp_for_combined_into_p (stmt)
1904 && gimple_omp_for_kind (stmt)
1905 == *(const enum gf_mask *) (wi->info))
1906 {
1907 wi->info = stmt;
1908 return integer_zero_node;
1909 }
1910 break;
1911 default:
1912 break;
1913 }
1914 return NULL;
1915 }
1916
1917 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task. */
1918
1919 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)1920 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1921 omp_context *outer_ctx)
1922 {
1923 struct walk_stmt_info wi;
1924
1925 memset (&wi, 0, sizeof (wi));
1926 wi.val_only = true;
1927 wi.info = (void *) &msk;
1928 walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1929 if (wi.info != (void *) &msk)
1930 {
1931 gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1932 struct omp_for_data fd;
1933 omp_extract_for_data (for_stmt, &fd, NULL);
1934 /* We need two temporaries with fd.loop.v type (istart/iend)
1935 and then (fd.collapse - 1) temporaries with the same
1936 type for count2 ... countN-1 vars if not constant. */
1937 size_t count = 2, i;
1938 tree type = fd.iter_type;
1939 if (fd.collapse > 1
1940 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1941 {
1942 count += fd.collapse - 1;
1943 /* If there are lastprivate clauses on the inner
1944 GIMPLE_OMP_FOR, add one more temporaries for the total number
1945 of iterations (product of count1 ... countN-1). */
1946 if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1947 OMP_CLAUSE_LASTPRIVATE))
1948 count++;
1949 else if (msk == GF_OMP_FOR_KIND_FOR
1950 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1951 OMP_CLAUSE_LASTPRIVATE))
1952 count++;
1953 }
1954 for (i = 0; i < count; i++)
1955 {
1956 tree temp = create_tmp_var (type);
1957 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1958 insert_decl_map (&outer_ctx->cb, temp, temp);
1959 OMP_CLAUSE_DECL (c) = temp;
1960 OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1961 gimple_omp_taskreg_set_clauses (stmt, c);
1962 }
1963 }
1964 if (msk == GF_OMP_FOR_KIND_TASKLOOP
1965 && omp_find_clause (gimple_omp_task_clauses (stmt),
1966 OMP_CLAUSE_REDUCTION))
1967 {
1968 tree type = build_pointer_type (pointer_sized_int_node);
1969 tree temp = create_tmp_var (type);
1970 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1971 insert_decl_map (&outer_ctx->cb, temp, temp);
1972 OMP_CLAUSE_DECL (c) = temp;
1973 OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1974 gimple_omp_task_set_clauses (stmt, c);
1975 }
1976 }
1977
1978 /* Scan an OpenMP parallel directive. */
1979
1980 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1981 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1982 {
1983 omp_context *ctx;
1984 tree name;
1985 gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1986
1987 /* Ignore parallel directives with empty bodies, unless there
1988 are copyin clauses. */
1989 if (optimize > 0
1990 && empty_body_p (gimple_omp_body (stmt))
1991 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1992 OMP_CLAUSE_COPYIN) == NULL)
1993 {
1994 gsi_replace (gsi, gimple_build_nop (), false);
1995 return;
1996 }
1997
1998 if (gimple_omp_parallel_combined_p (stmt))
1999 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2000 for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2001 OMP_CLAUSE_REDUCTION);
2002 c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2003 if (OMP_CLAUSE_REDUCTION_TASK (c))
2004 {
2005 tree type = build_pointer_type (pointer_sized_int_node);
2006 tree temp = create_tmp_var (type);
2007 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2008 if (outer_ctx)
2009 insert_decl_map (&outer_ctx->cb, temp, temp);
2010 OMP_CLAUSE_DECL (c) = temp;
2011 OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2012 gimple_omp_parallel_set_clauses (stmt, c);
2013 break;
2014 }
2015 else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2016 break;
2017
2018 ctx = new_omp_context (stmt, outer_ctx);
2019 taskreg_contexts.safe_push (ctx);
2020 if (taskreg_nesting_level > 1)
2021 ctx->is_nested = true;
2022 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2023 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2024 name = create_tmp_var_name (".omp_data_s");
2025 name = build_decl (gimple_location (stmt),
2026 TYPE_DECL, name, ctx->record_type);
2027 DECL_ARTIFICIAL (name) = 1;
2028 DECL_NAMELESS (name) = 1;
2029 TYPE_NAME (ctx->record_type) = name;
2030 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2031 if (!gimple_omp_parallel_grid_phony (stmt))
2032 {
2033 create_omp_child_function (ctx, false);
2034 gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2035 }
2036
2037 scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2038 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2039
2040 if (TYPE_FIELDS (ctx->record_type) == NULL)
2041 ctx->record_type = ctx->receiver_decl = NULL;
2042 }
2043
2044 /* Scan an OpenMP task directive. */
2045
2046 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2047 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2048 {
2049 omp_context *ctx;
2050 tree name, t;
2051 gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2052
2053 /* Ignore task directives with empty bodies, unless they have depend
2054 clause. */
2055 if (optimize > 0
2056 && gimple_omp_body (stmt)
2057 && empty_body_p (gimple_omp_body (stmt))
2058 && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2059 {
2060 gsi_replace (gsi, gimple_build_nop (), false);
2061 return;
2062 }
2063
2064 if (gimple_omp_task_taskloop_p (stmt))
2065 add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2066
2067 ctx = new_omp_context (stmt, outer_ctx);
2068
2069 if (gimple_omp_task_taskwait_p (stmt))
2070 {
2071 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2072 return;
2073 }
2074
2075 taskreg_contexts.safe_push (ctx);
2076 if (taskreg_nesting_level > 1)
2077 ctx->is_nested = true;
2078 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2079 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2080 name = create_tmp_var_name (".omp_data_s");
2081 name = build_decl (gimple_location (stmt),
2082 TYPE_DECL, name, ctx->record_type);
2083 DECL_ARTIFICIAL (name) = 1;
2084 DECL_NAMELESS (name) = 1;
2085 TYPE_NAME (ctx->record_type) = name;
2086 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2087 create_omp_child_function (ctx, false);
2088 gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2089
2090 scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2091
2092 if (ctx->srecord_type)
2093 {
2094 name = create_tmp_var_name (".omp_data_a");
2095 name = build_decl (gimple_location (stmt),
2096 TYPE_DECL, name, ctx->srecord_type);
2097 DECL_ARTIFICIAL (name) = 1;
2098 DECL_NAMELESS (name) = 1;
2099 TYPE_NAME (ctx->srecord_type) = name;
2100 TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2101 create_omp_child_function (ctx, true);
2102 }
2103
2104 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2105
2106 if (TYPE_FIELDS (ctx->record_type) == NULL)
2107 {
2108 ctx->record_type = ctx->receiver_decl = NULL;
2109 t = build_int_cst (long_integer_type_node, 0);
2110 gimple_omp_task_set_arg_size (stmt, t);
2111 t = build_int_cst (long_integer_type_node, 1);
2112 gimple_omp_task_set_arg_align (stmt, t);
2113 }
2114 }
2115
2116 /* Helper function for finish_taskreg_scan, called through walk_tree.
2117 If maybe_lookup_decl_in_outer_context returns non-NULL for some
2118 tree, replace it in the expression. */
2119
2120 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)2121 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2122 {
2123 if (VAR_P (*tp))
2124 {
2125 omp_context *ctx = (omp_context *) data;
2126 tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2127 if (t != *tp)
2128 {
2129 if (DECL_HAS_VALUE_EXPR_P (t))
2130 t = unshare_expr (DECL_VALUE_EXPR (t));
2131 *tp = t;
2132 }
2133 *walk_subtrees = 0;
2134 }
2135 else if (IS_TYPE_OR_DECL_P (*tp))
2136 *walk_subtrees = 0;
2137 return NULL_TREE;
2138 }
2139
2140 /* If any decls have been made addressable during scan_omp,
2141 adjust their fields if needed, and layout record types
2142 of parallel/task constructs. */
2143
2144 static void
finish_taskreg_scan(omp_context * ctx)2145 finish_taskreg_scan (omp_context *ctx)
2146 {
2147 if (ctx->record_type == NULL_TREE)
2148 return;
2149
2150 /* If any task_shared_vars were needed, verify all
2151 OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2152 statements if use_pointer_for_field hasn't changed
2153 because of that. If it did, update field types now. */
2154 if (task_shared_vars)
2155 {
2156 tree c;
2157
2158 for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2159 c; c = OMP_CLAUSE_CHAIN (c))
2160 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2161 && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2162 {
2163 tree decl = OMP_CLAUSE_DECL (c);
2164
2165 /* Global variables don't need to be copied,
2166 the receiver side will use them directly. */
2167 if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2168 continue;
2169 if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2170 || !use_pointer_for_field (decl, ctx))
2171 continue;
2172 tree field = lookup_field (decl, ctx);
2173 if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2174 && TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2175 continue;
2176 TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2177 TREE_THIS_VOLATILE (field) = 0;
2178 DECL_USER_ALIGN (field) = 0;
2179 SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2180 if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2181 SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2182 if (ctx->srecord_type)
2183 {
2184 tree sfield = lookup_sfield (decl, ctx);
2185 TREE_TYPE (sfield) = TREE_TYPE (field);
2186 TREE_THIS_VOLATILE (sfield) = 0;
2187 DECL_USER_ALIGN (sfield) = 0;
2188 SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2189 if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2190 SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2191 }
2192 }
2193 }
2194
2195 if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2196 {
2197 tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2198 tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2199 if (c)
2200 {
2201 /* Move the _reductemp_ clause first. GOMP_parallel_reductions
2202 expects to find it at the start of data. */
2203 tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2204 tree *p = &TYPE_FIELDS (ctx->record_type);
2205 while (*p)
2206 if (*p == f)
2207 {
2208 *p = DECL_CHAIN (*p);
2209 break;
2210 }
2211 else
2212 p = &DECL_CHAIN (*p);
2213 DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2214 TYPE_FIELDS (ctx->record_type) = f;
2215 }
2216 layout_type (ctx->record_type);
2217 fixup_child_record_type (ctx);
2218 }
2219 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2220 {
2221 layout_type (ctx->record_type);
2222 fixup_child_record_type (ctx);
2223 }
2224 else
2225 {
2226 location_t loc = gimple_location (ctx->stmt);
2227 tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2228 /* Move VLA fields to the end. */
2229 p = &TYPE_FIELDS (ctx->record_type);
2230 while (*p)
2231 if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2232 || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2233 {
2234 *q = *p;
2235 *p = TREE_CHAIN (*p);
2236 TREE_CHAIN (*q) = NULL_TREE;
2237 q = &TREE_CHAIN (*q);
2238 }
2239 else
2240 p = &DECL_CHAIN (*p);
2241 *p = vla_fields;
2242 if (gimple_omp_task_taskloop_p (ctx->stmt))
2243 {
2244 /* Move fields corresponding to first and second _looptemp_
2245 clause first. There are filled by GOMP_taskloop
2246 and thus need to be in specific positions. */
2247 tree clauses = gimple_omp_task_clauses (ctx->stmt);
2248 tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2249 tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2250 OMP_CLAUSE__LOOPTEMP_);
2251 tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2252 tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2253 tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2254 tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2255 p = &TYPE_FIELDS (ctx->record_type);
2256 while (*p)
2257 if (*p == f1 || *p == f2 || *p == f3)
2258 *p = DECL_CHAIN (*p);
2259 else
2260 p = &DECL_CHAIN (*p);
2261 DECL_CHAIN (f1) = f2;
2262 if (c3)
2263 {
2264 DECL_CHAIN (f2) = f3;
2265 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2266 }
2267 else
2268 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2269 TYPE_FIELDS (ctx->record_type) = f1;
2270 if (ctx->srecord_type)
2271 {
2272 f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2273 f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2274 if (c3)
2275 f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2276 p = &TYPE_FIELDS (ctx->srecord_type);
2277 while (*p)
2278 if (*p == f1 || *p == f2 || *p == f3)
2279 *p = DECL_CHAIN (*p);
2280 else
2281 p = &DECL_CHAIN (*p);
2282 DECL_CHAIN (f1) = f2;
2283 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2284 if (c3)
2285 {
2286 DECL_CHAIN (f2) = f3;
2287 DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2288 }
2289 else
2290 DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2291 TYPE_FIELDS (ctx->srecord_type) = f1;
2292 }
2293 }
2294 layout_type (ctx->record_type);
2295 fixup_child_record_type (ctx);
2296 if (ctx->srecord_type)
2297 layout_type (ctx->srecord_type);
2298 tree t = fold_convert_loc (loc, long_integer_type_node,
2299 TYPE_SIZE_UNIT (ctx->record_type));
2300 if (TREE_CODE (t) != INTEGER_CST)
2301 {
2302 t = unshare_expr (t);
2303 walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2304 }
2305 gimple_omp_task_set_arg_size (ctx->stmt, t);
2306 t = build_int_cst (long_integer_type_node,
2307 TYPE_ALIGN_UNIT (ctx->record_type));
2308 gimple_omp_task_set_arg_align (ctx->stmt, t);
2309 }
2310 }
2311
2312 /* Find the enclosing offload context. */
2313
2314 static omp_context *
enclosing_target_ctx(omp_context * ctx)2315 enclosing_target_ctx (omp_context *ctx)
2316 {
2317 for (; ctx; ctx = ctx->outer)
2318 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2319 break;
2320
2321 return ctx;
2322 }
2323
2324 /* Return true if ctx is part of an oacc kernels region. */
2325
2326 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2327 ctx_in_oacc_kernels_region (omp_context *ctx)
2328 {
2329 for (;ctx != NULL; ctx = ctx->outer)
2330 {
2331 gimple *stmt = ctx->stmt;
2332 if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2333 && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2334 return true;
2335 }
2336
2337 return false;
2338 }
2339
2340 /* Check the parallelism clauses inside a kernels regions.
2341 Until kernels handling moves to use the same loop indirection
2342 scheme as parallel, we need to do this checking early. */
2343
2344 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2345 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2346 {
2347 bool checking = true;
2348 unsigned outer_mask = 0;
2349 unsigned this_mask = 0;
2350 bool has_seq = false, has_auto = false;
2351
2352 if (ctx->outer)
2353 outer_mask = check_oacc_kernel_gwv (NULL, ctx->outer);
2354 if (!stmt)
2355 {
2356 checking = false;
2357 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2358 return outer_mask;
2359 stmt = as_a <gomp_for *> (ctx->stmt);
2360 }
2361
2362 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2363 {
2364 switch (OMP_CLAUSE_CODE (c))
2365 {
2366 case OMP_CLAUSE_GANG:
2367 this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2368 break;
2369 case OMP_CLAUSE_WORKER:
2370 this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2371 break;
2372 case OMP_CLAUSE_VECTOR:
2373 this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2374 break;
2375 case OMP_CLAUSE_SEQ:
2376 has_seq = true;
2377 break;
2378 case OMP_CLAUSE_AUTO:
2379 has_auto = true;
2380 break;
2381 default:
2382 break;
2383 }
2384 }
2385
2386 if (checking)
2387 {
2388 if (has_seq && (this_mask || has_auto))
2389 error_at (gimple_location (stmt), "%<seq%> overrides other"
2390 " OpenACC loop specifiers");
2391 else if (has_auto && this_mask)
2392 error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2393 " OpenACC loop specifiers");
2394
2395 if (this_mask & outer_mask)
2396 error_at (gimple_location (stmt), "inner loop uses same"
2397 " OpenACC parallelism as containing loop");
2398 }
2399
2400 return outer_mask | this_mask;
2401 }
2402
2403 /* Scan a GIMPLE_OMP_FOR. */
2404
2405 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2406 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2407 {
2408 omp_context *ctx;
2409 size_t i;
2410 tree clauses = gimple_omp_for_clauses (stmt);
2411
2412 ctx = new_omp_context (stmt, outer_ctx);
2413
2414 if (is_gimple_omp_oacc (stmt))
2415 {
2416 omp_context *tgt = enclosing_target_ctx (outer_ctx);
2417
2418 if (!(tgt && is_oacc_kernels (tgt)))
2419 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2420 {
2421 tree c_op0;
2422 switch (OMP_CLAUSE_CODE (c))
2423 {
2424 case OMP_CLAUSE_GANG:
2425 c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2426 break;
2427
2428 case OMP_CLAUSE_WORKER:
2429 c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2430 break;
2431
2432 case OMP_CLAUSE_VECTOR:
2433 c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2434 break;
2435
2436 default:
2437 continue;
2438 }
2439
2440 if (c_op0)
2441 {
2442 error_at (OMP_CLAUSE_LOCATION (c),
2443 "argument not permitted on %qs clause",
2444 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2445 if (tgt)
2446 inform (gimple_location (tgt->stmt),
2447 "enclosing parent compute construct");
2448 else if (oacc_get_fn_attrib (current_function_decl))
2449 inform (DECL_SOURCE_LOCATION (current_function_decl),
2450 "enclosing routine");
2451 else
2452 gcc_unreachable ();
2453 }
2454 }
2455
2456 if (tgt && is_oacc_kernels (tgt))
2457 check_oacc_kernel_gwv (stmt, ctx);
2458
2459 /* Collect all variables named in reductions on this loop. Ensure
2460 that, if this loop has a reduction on some variable v, and there is
2461 a reduction on v somewhere in an outer context, then there is a
2462 reduction on v on all intervening loops as well. */
2463 tree local_reduction_clauses = NULL;
2464 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2465 {
2466 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2467 local_reduction_clauses
2468 = tree_cons (NULL, c, local_reduction_clauses);
2469 }
2470 if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2471 ctx->outer_reduction_clauses
2472 = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2473 ctx->outer->outer_reduction_clauses);
2474 tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2475 tree local_iter = local_reduction_clauses;
2476 for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2477 {
2478 tree local_clause = TREE_VALUE (local_iter);
2479 tree local_var = OMP_CLAUSE_DECL (local_clause);
2480 tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2481 bool have_outer_reduction = false;
2482 tree ctx_iter = outer_reduction_clauses;
2483 for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2484 {
2485 tree outer_clause = TREE_VALUE (ctx_iter);
2486 tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2487 tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2488 if (outer_var == local_var && outer_op != local_op)
2489 {
2490 warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2491 "conflicting reduction operations for %qE",
2492 local_var);
2493 inform (OMP_CLAUSE_LOCATION (outer_clause),
2494 "location of the previous reduction for %qE",
2495 outer_var);
2496 }
2497 if (outer_var == local_var)
2498 {
2499 have_outer_reduction = true;
2500 break;
2501 }
2502 }
2503 if (have_outer_reduction)
2504 {
2505 /* There is a reduction on outer_var both on this loop and on
2506 some enclosing loop. Walk up the context tree until such a
2507 loop with a reduction on outer_var is found, and complain
2508 about all intervening loops that do not have such a
2509 reduction. */
2510 struct omp_context *curr_loop = ctx->outer;
2511 bool found = false;
2512 while (curr_loop != NULL)
2513 {
2514 tree curr_iter = curr_loop->local_reduction_clauses;
2515 for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2516 {
2517 tree curr_clause = TREE_VALUE (curr_iter);
2518 tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2519 if (curr_var == local_var)
2520 {
2521 found = true;
2522 break;
2523 }
2524 }
2525 if (!found)
2526 warning_at (gimple_location (curr_loop->stmt), 0,
2527 "nested loop in reduction needs "
2528 "reduction clause for %qE",
2529 local_var);
2530 else
2531 break;
2532 curr_loop = curr_loop->outer;
2533 }
2534 }
2535 }
2536 ctx->local_reduction_clauses = local_reduction_clauses;
2537 ctx->outer_reduction_clauses
2538 = chainon (unshare_expr (ctx->local_reduction_clauses),
2539 ctx->outer_reduction_clauses);
2540
2541 if (tgt && is_oacc_kernels (tgt))
2542 {
2543 /* Strip out reductions, as they are not handled yet. */
2544 tree *prev_ptr = &clauses;
2545
2546 while (tree probe = *prev_ptr)
2547 {
2548 tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2549
2550 if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2551 *prev_ptr = *next_ptr;
2552 else
2553 prev_ptr = next_ptr;
2554 }
2555
2556 gimple_omp_for_set_clauses (stmt, clauses);
2557 }
2558 }
2559
2560 scan_sharing_clauses (clauses, ctx);
2561
2562 scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2563 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2564 {
2565 scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2566 scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2567 scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2568 scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2569 }
2570 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2571 return ctx;
2572 }
2573
2574 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD. */
2575
2576 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2577 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2578 omp_context *outer_ctx)
2579 {
2580 gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2581 gsi_replace (gsi, bind, false);
2582 gimple_seq seq = NULL;
2583 gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2584 tree cond = create_tmp_var_raw (integer_type_node);
2585 DECL_CONTEXT (cond) = current_function_decl;
2586 DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2587 gimple_bind_set_vars (bind, cond);
2588 gimple_call_set_lhs (g, cond);
2589 gimple_seq_add_stmt (&seq, g);
2590 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2591 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2592 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2593 g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2594 gimple_seq_add_stmt (&seq, g);
2595 g = gimple_build_label (lab1);
2596 gimple_seq_add_stmt (&seq, g);
2597 gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2598 gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2599 tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2600 OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2601 gimple_omp_for_set_clauses (new_stmt, clause);
2602 gimple_seq_add_stmt (&seq, new_stmt);
2603 g = gimple_build_goto (lab3);
2604 gimple_seq_add_stmt (&seq, g);
2605 g = gimple_build_label (lab2);
2606 gimple_seq_add_stmt (&seq, g);
2607 gimple_seq_add_stmt (&seq, stmt);
2608 g = gimple_build_label (lab3);
2609 gimple_seq_add_stmt (&seq, g);
2610 gimple_bind_set_body (bind, seq);
2611 update_stmt (bind);
2612 scan_omp_for (new_stmt, outer_ctx);
2613 scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2614 }
2615
2616 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2617 struct walk_stmt_info *);
2618 static omp_context *maybe_lookup_ctx (gimple *);
2619
2620 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2621 for scan phase loop. */
2622
2623 static void
scan_omp_simd_scan(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2624 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2625 omp_context *outer_ctx)
2626 {
2627 /* The only change between inclusive and exclusive scan will be
2628 within the first simd loop, so just use inclusive in the
2629 worksharing loop. */
2630 outer_ctx->scan_inclusive = true;
2631 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2632 OMP_CLAUSE_DECL (c) = integer_zero_node;
2633
2634 gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2635 gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2636 gsi_replace (gsi, input_stmt, false);
2637 gimple_seq input_body = NULL;
2638 gimple_seq_add_stmt (&input_body, stmt);
2639 gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2640
2641 gimple_stmt_iterator input1_gsi = gsi_none ();
2642 struct walk_stmt_info wi;
2643 memset (&wi, 0, sizeof (wi));
2644 wi.val_only = true;
2645 wi.info = (void *) &input1_gsi;
2646 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2647 gcc_assert (!gsi_end_p (input1_gsi));
2648
2649 gimple *input_stmt1 = gsi_stmt (input1_gsi);
2650 gsi_next (&input1_gsi);
2651 gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2652 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2653 c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2654 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2655 std::swap (input_stmt1, scan_stmt1);
2656
2657 gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2658 gimple_omp_set_body (input_stmt1, NULL);
2659
2660 gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2661 gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2662
2663 gimple_omp_set_body (input_stmt1, input_body1);
2664 gimple_omp_set_body (scan_stmt1, NULL);
2665
2666 gimple_stmt_iterator input2_gsi = gsi_none ();
2667 memset (&wi, 0, sizeof (wi));
2668 wi.val_only = true;
2669 wi.info = (void *) &input2_gsi;
2670 walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2671 NULL, &wi);
2672 gcc_assert (!gsi_end_p (input2_gsi));
2673
2674 gimple *input_stmt2 = gsi_stmt (input2_gsi);
2675 gsi_next (&input2_gsi);
2676 gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2677 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2678 if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2679 std::swap (input_stmt2, scan_stmt2);
2680
2681 gimple_omp_set_body (input_stmt2, NULL);
2682
2683 gimple_omp_set_body (input_stmt, input_body);
2684 gimple_omp_set_body (scan_stmt, scan_body);
2685
2686 omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2687 scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2688
2689 ctx = new_omp_context (scan_stmt, outer_ctx);
2690 scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2691
2692 maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2693 }
2694
2695 /* Scan an OpenMP sections directive. */
2696
2697 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2698 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2699 {
2700 omp_context *ctx;
2701
2702 ctx = new_omp_context (stmt, outer_ctx);
2703 scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2704 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2705 }
2706
2707 /* Scan an OpenMP single directive. */
2708
2709 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2710 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2711 {
2712 omp_context *ctx;
2713 tree name;
2714
2715 ctx = new_omp_context (stmt, outer_ctx);
2716 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2717 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2718 name = create_tmp_var_name (".omp_copy_s");
2719 name = build_decl (gimple_location (stmt),
2720 TYPE_DECL, name, ctx->record_type);
2721 TYPE_NAME (ctx->record_type) = name;
2722
2723 scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2724 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2725
2726 if (TYPE_FIELDS (ctx->record_type) == NULL)
2727 ctx->record_type = NULL;
2728 else
2729 layout_type (ctx->record_type);
2730 }
2731
2732 /* Scan a GIMPLE_OMP_TARGET. */
2733
2734 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)2735 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2736 {
2737 omp_context *ctx;
2738 tree name;
2739 bool offloaded = is_gimple_omp_offloaded (stmt);
2740 tree clauses = gimple_omp_target_clauses (stmt);
2741
2742 ctx = new_omp_context (stmt, outer_ctx);
2743 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2744 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2745 name = create_tmp_var_name (".omp_data_t");
2746 name = build_decl (gimple_location (stmt),
2747 TYPE_DECL, name, ctx->record_type);
2748 DECL_ARTIFICIAL (name) = 1;
2749 DECL_NAMELESS (name) = 1;
2750 TYPE_NAME (ctx->record_type) = name;
2751 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2752
2753 if (offloaded)
2754 {
2755 create_omp_child_function (ctx, false);
2756 gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2757 }
2758
2759 scan_sharing_clauses (clauses, ctx);
2760 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2761
2762 if (TYPE_FIELDS (ctx->record_type) == NULL)
2763 ctx->record_type = ctx->receiver_decl = NULL;
2764 else
2765 {
2766 TYPE_FIELDS (ctx->record_type)
2767 = nreverse (TYPE_FIELDS (ctx->record_type));
2768 if (flag_checking)
2769 {
2770 unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2771 for (tree field = TYPE_FIELDS (ctx->record_type);
2772 field;
2773 field = DECL_CHAIN (field))
2774 gcc_assert (DECL_ALIGN (field) == align);
2775 }
2776 layout_type (ctx->record_type);
2777 if (offloaded)
2778 fixup_child_record_type (ctx);
2779 }
2780 }
2781
2782 /* Scan an OpenMP teams directive. */
2783
2784 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)2785 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2786 {
2787 omp_context *ctx = new_omp_context (stmt, outer_ctx);
2788
2789 if (!gimple_omp_teams_host (stmt))
2790 {
2791 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2792 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2793 return;
2794 }
2795 taskreg_contexts.safe_push (ctx);
2796 gcc_assert (taskreg_nesting_level == 1);
2797 ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2798 ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2799 tree name = create_tmp_var_name (".omp_data_s");
2800 name = build_decl (gimple_location (stmt),
2801 TYPE_DECL, name, ctx->record_type);
2802 DECL_ARTIFICIAL (name) = 1;
2803 DECL_NAMELESS (name) = 1;
2804 TYPE_NAME (ctx->record_type) = name;
2805 TYPE_ARTIFICIAL (ctx->record_type) = 1;
2806 create_omp_child_function (ctx, false);
2807 gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2808
2809 scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2810 scan_omp (gimple_omp_body_ptr (stmt), ctx);
2811
2812 if (TYPE_FIELDS (ctx->record_type) == NULL)
2813 ctx->record_type = ctx->receiver_decl = NULL;
2814 }
2815
2816 /* Check nesting restrictions. */
2817 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)2818 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2819 {
2820 tree c;
2821
2822 if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2823 /* GRID_BODY is an artificial construct, nesting rules will be checked in
2824 the original copy of its contents. */
2825 return true;
2826
2827 /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2828 inside an OpenACC CTX. */
2829 if (!(is_gimple_omp (stmt)
2830 && is_gimple_omp_oacc (stmt))
2831 /* Except for atomic codes that we share with OpenMP. */
2832 && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2833 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2834 {
2835 if (oacc_get_fn_attrib (cfun->decl) != NULL)
2836 {
2837 error_at (gimple_location (stmt),
2838 "non-OpenACC construct inside of OpenACC routine");
2839 return false;
2840 }
2841 else
2842 for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2843 if (is_gimple_omp (octx->stmt)
2844 && is_gimple_omp_oacc (octx->stmt))
2845 {
2846 error_at (gimple_location (stmt),
2847 "non-OpenACC construct inside of OpenACC region");
2848 return false;
2849 }
2850 }
2851
2852 if (ctx != NULL)
2853 {
2854 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2855 && ctx->outer
2856 && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2857 ctx = ctx->outer;
2858 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2859 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2860 && !ctx->loop_p)
2861 {
2862 c = NULL_TREE;
2863 if (ctx->order_concurrent
2864 && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2865 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2866 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2867 {
2868 error_at (gimple_location (stmt),
2869 "OpenMP constructs other than %<parallel%>, %<loop%>"
2870 " or %<simd%> may not be nested inside a region with"
2871 " the %<order(concurrent)%> clause");
2872 return false;
2873 }
2874 if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2875 {
2876 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2877 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2878 {
2879 if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2880 && (ctx->outer == NULL
2881 || !gimple_omp_for_combined_into_p (ctx->stmt)
2882 || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2883 || (gimple_omp_for_kind (ctx->outer->stmt)
2884 != GF_OMP_FOR_KIND_FOR)
2885 || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2886 {
2887 error_at (gimple_location (stmt),
2888 "%<ordered simd threads%> must be closely "
2889 "nested inside of %<for simd%> region");
2890 return false;
2891 }
2892 return true;
2893 }
2894 }
2895 else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2896 || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2897 || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2898 return true;
2899 else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2900 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2901 return true;
2902 error_at (gimple_location (stmt),
2903 "OpenMP constructs other than "
2904 "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2905 "not be nested inside %<simd%> region");
2906 return false;
2907 }
2908 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2909 {
2910 if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2911 || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2912 && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2913 && omp_find_clause (gimple_omp_for_clauses (stmt),
2914 OMP_CLAUSE_BIND) == NULL_TREE))
2915 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2916 {
2917 error_at (gimple_location (stmt),
2918 "only %<distribute%>, %<parallel%> or %<loop%> "
2919 "regions are allowed to be strictly nested inside "
2920 "%<teams%> region");
2921 return false;
2922 }
2923 }
2924 else if (ctx->order_concurrent
2925 && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2926 && (gimple_code (stmt) != GIMPLE_OMP_FOR
2927 || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2928 && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2929 {
2930 if (ctx->loop_p)
2931 error_at (gimple_location (stmt),
2932 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2933 "%<simd%> may not be nested inside a %<loop%> region");
2934 else
2935 error_at (gimple_location (stmt),
2936 "OpenMP constructs other than %<parallel%>, %<loop%> or "
2937 "%<simd%> may not be nested inside a region with "
2938 "the %<order(concurrent)%> clause");
2939 return false;
2940 }
2941 }
2942 switch (gimple_code (stmt))
2943 {
2944 case GIMPLE_OMP_FOR:
2945 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2946 return true;
2947 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2948 {
2949 if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2950 {
2951 error_at (gimple_location (stmt),
2952 "%<distribute%> region must be strictly nested "
2953 "inside %<teams%> construct");
2954 return false;
2955 }
2956 return true;
2957 }
2958 /* We split taskloop into task and nested taskloop in it. */
2959 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2960 return true;
2961 /* For now, hope this will change and loop bind(parallel) will not
2962 be allowed in lots of contexts. */
2963 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2964 && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2965 return true;
2966 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2967 {
2968 bool ok = false;
2969
2970 if (ctx)
2971 switch (gimple_code (ctx->stmt))
2972 {
2973 case GIMPLE_OMP_FOR:
2974 ok = (gimple_omp_for_kind (ctx->stmt)
2975 == GF_OMP_FOR_KIND_OACC_LOOP);
2976 break;
2977
2978 case GIMPLE_OMP_TARGET:
2979 switch (gimple_omp_target_kind (ctx->stmt))
2980 {
2981 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2982 case GF_OMP_TARGET_KIND_OACC_KERNELS:
2983 case GF_OMP_TARGET_KIND_OACC_SERIAL:
2984 ok = true;
2985 break;
2986
2987 default:
2988 break;
2989 }
2990
2991 default:
2992 break;
2993 }
2994 else if (oacc_get_fn_attrib (current_function_decl))
2995 ok = true;
2996 if (!ok)
2997 {
2998 error_at (gimple_location (stmt),
2999 "OpenACC loop directive must be associated with"
3000 " an OpenACC compute region");
3001 return false;
3002 }
3003 }
3004 /* FALLTHRU */
3005 case GIMPLE_CALL:
3006 if (is_gimple_call (stmt)
3007 && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3008 == BUILT_IN_GOMP_CANCEL
3009 || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3010 == BUILT_IN_GOMP_CANCELLATION_POINT))
3011 {
3012 const char *bad = NULL;
3013 const char *kind = NULL;
3014 const char *construct
3015 = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3016 == BUILT_IN_GOMP_CANCEL)
3017 ? "cancel"
3018 : "cancellation point";
3019 if (ctx == NULL)
3020 {
3021 error_at (gimple_location (stmt), "orphaned %qs construct",
3022 construct);
3023 return false;
3024 }
3025 switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3026 ? tree_to_shwi (gimple_call_arg (stmt, 0))
3027 : 0)
3028 {
3029 case 1:
3030 if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3031 bad = "parallel";
3032 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3033 == BUILT_IN_GOMP_CANCEL
3034 && !integer_zerop (gimple_call_arg (stmt, 1)))
3035 ctx->cancellable = true;
3036 kind = "parallel";
3037 break;
3038 case 2:
3039 if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3040 || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3041 bad = "for";
3042 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3043 == BUILT_IN_GOMP_CANCEL
3044 && !integer_zerop (gimple_call_arg (stmt, 1)))
3045 {
3046 ctx->cancellable = true;
3047 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3048 OMP_CLAUSE_NOWAIT))
3049 warning_at (gimple_location (stmt), 0,
3050 "%<cancel for%> inside "
3051 "%<nowait%> for construct");
3052 if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3053 OMP_CLAUSE_ORDERED))
3054 warning_at (gimple_location (stmt), 0,
3055 "%<cancel for%> inside "
3056 "%<ordered%> for construct");
3057 }
3058 kind = "for";
3059 break;
3060 case 4:
3061 if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3062 && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3063 bad = "sections";
3064 else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3065 == BUILT_IN_GOMP_CANCEL
3066 && !integer_zerop (gimple_call_arg (stmt, 1)))
3067 {
3068 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3069 {
3070 ctx->cancellable = true;
3071 if (omp_find_clause (gimple_omp_sections_clauses
3072 (ctx->stmt),
3073 OMP_CLAUSE_NOWAIT))
3074 warning_at (gimple_location (stmt), 0,
3075 "%<cancel sections%> inside "
3076 "%<nowait%> sections construct");
3077 }
3078 else
3079 {
3080 gcc_assert (ctx->outer
3081 && gimple_code (ctx->outer->stmt)
3082 == GIMPLE_OMP_SECTIONS);
3083 ctx->outer->cancellable = true;
3084 if (omp_find_clause (gimple_omp_sections_clauses
3085 (ctx->outer->stmt),
3086 OMP_CLAUSE_NOWAIT))
3087 warning_at (gimple_location (stmt), 0,
3088 "%<cancel sections%> inside "
3089 "%<nowait%> sections construct");
3090 }
3091 }
3092 kind = "sections";
3093 break;
3094 case 8:
3095 if (!is_task_ctx (ctx)
3096 && (!is_taskloop_ctx (ctx)
3097 || ctx->outer == NULL
3098 || !is_task_ctx (ctx->outer)))
3099 bad = "task";
3100 else
3101 {
3102 for (omp_context *octx = ctx->outer;
3103 octx; octx = octx->outer)
3104 {
3105 switch (gimple_code (octx->stmt))
3106 {
3107 case GIMPLE_OMP_TASKGROUP:
3108 break;
3109 case GIMPLE_OMP_TARGET:
3110 if (gimple_omp_target_kind (octx->stmt)
3111 != GF_OMP_TARGET_KIND_REGION)
3112 continue;
3113 /* FALLTHRU */
3114 case GIMPLE_OMP_PARALLEL:
3115 case GIMPLE_OMP_TEAMS:
3116 error_at (gimple_location (stmt),
3117 "%<%s taskgroup%> construct not closely "
3118 "nested inside of %<taskgroup%> region",
3119 construct);
3120 return false;
3121 case GIMPLE_OMP_TASK:
3122 if (gimple_omp_task_taskloop_p (octx->stmt)
3123 && octx->outer
3124 && is_taskloop_ctx (octx->outer))
3125 {
3126 tree clauses
3127 = gimple_omp_for_clauses (octx->outer->stmt);
3128 if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3129 break;
3130 }
3131 continue;
3132 default:
3133 continue;
3134 }
3135 break;
3136 }
3137 ctx->cancellable = true;
3138 }
3139 kind = "taskgroup";
3140 break;
3141 default:
3142 error_at (gimple_location (stmt), "invalid arguments");
3143 return false;
3144 }
3145 if (bad)
3146 {
3147 error_at (gimple_location (stmt),
3148 "%<%s %s%> construct not closely nested inside of %qs",
3149 construct, kind, bad);
3150 return false;
3151 }
3152 }
3153 /* FALLTHRU */
3154 case GIMPLE_OMP_SECTIONS:
3155 case GIMPLE_OMP_SINGLE:
3156 for (; ctx != NULL; ctx = ctx->outer)
3157 switch (gimple_code (ctx->stmt))
3158 {
3159 case GIMPLE_OMP_FOR:
3160 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3161 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3162 break;
3163 /* FALLTHRU */
3164 case GIMPLE_OMP_SECTIONS:
3165 case GIMPLE_OMP_SINGLE:
3166 case GIMPLE_OMP_ORDERED:
3167 case GIMPLE_OMP_MASTER:
3168 case GIMPLE_OMP_TASK:
3169 case GIMPLE_OMP_CRITICAL:
3170 if (is_gimple_call (stmt))
3171 {
3172 if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3173 != BUILT_IN_GOMP_BARRIER)
3174 return true;
3175 error_at (gimple_location (stmt),
3176 "barrier region may not be closely nested inside "
3177 "of work-sharing, %<loop%>, %<critical%>, "
3178 "%<ordered%>, %<master%>, explicit %<task%> or "
3179 "%<taskloop%> region");
3180 return false;
3181 }
3182 error_at (gimple_location (stmt),
3183 "work-sharing region may not be closely nested inside "
3184 "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3185 "%<master%>, explicit %<task%> or %<taskloop%> region");
3186 return false;
3187 case GIMPLE_OMP_PARALLEL:
3188 case GIMPLE_OMP_TEAMS:
3189 return true;
3190 case GIMPLE_OMP_TARGET:
3191 if (gimple_omp_target_kind (ctx->stmt)
3192 == GF_OMP_TARGET_KIND_REGION)
3193 return true;
3194 break;
3195 default:
3196 break;
3197 }
3198 break;
3199 case GIMPLE_OMP_MASTER:
3200 for (; ctx != NULL; ctx = ctx->outer)
3201 switch (gimple_code (ctx->stmt))
3202 {
3203 case GIMPLE_OMP_FOR:
3204 if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3205 && gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3206 break;
3207 /* FALLTHRU */
3208 case GIMPLE_OMP_SECTIONS:
3209 case GIMPLE_OMP_SINGLE:
3210 case GIMPLE_OMP_TASK:
3211 error_at (gimple_location (stmt),
3212 "%<master%> region may not be closely nested inside "
3213 "of work-sharing, %<loop%>, explicit %<task%> or "
3214 "%<taskloop%> region");
3215 return false;
3216 case GIMPLE_OMP_PARALLEL:
3217 case GIMPLE_OMP_TEAMS:
3218 return true;
3219 case GIMPLE_OMP_TARGET:
3220 if (gimple_omp_target_kind (ctx->stmt)
3221 == GF_OMP_TARGET_KIND_REGION)
3222 return true;
3223 break;
3224 default:
3225 break;
3226 }
3227 break;
3228 case GIMPLE_OMP_TASK:
3229 for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3230 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3231 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3232 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3233 {
3234 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3235 error_at (OMP_CLAUSE_LOCATION (c),
3236 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3237 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3238 return false;
3239 }
3240 break;
3241 case GIMPLE_OMP_ORDERED:
3242 for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3243 c; c = OMP_CLAUSE_CHAIN (c))
3244 {
3245 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3246 {
3247 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3248 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3249 continue;
3250 }
3251 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3252 if (kind == OMP_CLAUSE_DEPEND_SOURCE
3253 || kind == OMP_CLAUSE_DEPEND_SINK)
3254 {
3255 tree oclause;
3256 /* Look for containing ordered(N) loop. */
3257 if (ctx == NULL
3258 || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3259 || (oclause
3260 = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3261 OMP_CLAUSE_ORDERED)) == NULL_TREE)
3262 {
3263 error_at (OMP_CLAUSE_LOCATION (c),
3264 "%<ordered%> construct with %<depend%> clause "
3265 "must be closely nested inside an %<ordered%> "
3266 "loop");
3267 return false;
3268 }
3269 else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3270 {
3271 error_at (OMP_CLAUSE_LOCATION (c),
3272 "%<ordered%> construct with %<depend%> clause "
3273 "must be closely nested inside a loop with "
3274 "%<ordered%> clause with a parameter");
3275 return false;
3276 }
3277 }
3278 else
3279 {
3280 error_at (OMP_CLAUSE_LOCATION (c),
3281 "invalid depend kind in omp %<ordered%> %<depend%>");
3282 return false;
3283 }
3284 }
3285 c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3286 if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3287 {
3288 /* ordered simd must be closely nested inside of simd region,
3289 and simd region must not encounter constructs other than
3290 ordered simd, therefore ordered simd may be either orphaned,
3291 or ctx->stmt must be simd. The latter case is handled already
3292 earlier. */
3293 if (ctx != NULL)
3294 {
3295 error_at (gimple_location (stmt),
3296 "%<ordered%> %<simd%> must be closely nested inside "
3297 "%<simd%> region");
3298 return false;
3299 }
3300 }
3301 for (; ctx != NULL; ctx = ctx->outer)
3302 switch (gimple_code (ctx->stmt))
3303 {
3304 case GIMPLE_OMP_CRITICAL:
3305 case GIMPLE_OMP_TASK:
3306 case GIMPLE_OMP_ORDERED:
3307 ordered_in_taskloop:
3308 error_at (gimple_location (stmt),
3309 "%<ordered%> region may not be closely nested inside "
3310 "of %<critical%>, %<ordered%>, explicit %<task%> or "
3311 "%<taskloop%> region");
3312 return false;
3313 case GIMPLE_OMP_FOR:
3314 if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3315 goto ordered_in_taskloop;
3316 tree o;
3317 o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3318 OMP_CLAUSE_ORDERED);
3319 if (o == NULL)
3320 {
3321 error_at (gimple_location (stmt),
3322 "%<ordered%> region must be closely nested inside "
3323 "a loop region with an %<ordered%> clause");
3324 return false;
3325 }
3326 if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3327 && omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3328 {
3329 error_at (gimple_location (stmt),
3330 "%<ordered%> region without %<depend%> clause may "
3331 "not be closely nested inside a loop region with "
3332 "an %<ordered%> clause with a parameter");
3333 return false;
3334 }
3335 return true;
3336 case GIMPLE_OMP_TARGET:
3337 if (gimple_omp_target_kind (ctx->stmt)
3338 != GF_OMP_TARGET_KIND_REGION)
3339 break;
3340 /* FALLTHRU */
3341 case GIMPLE_OMP_PARALLEL:
3342 case GIMPLE_OMP_TEAMS:
3343 error_at (gimple_location (stmt),
3344 "%<ordered%> region must be closely nested inside "
3345 "a loop region with an %<ordered%> clause");
3346 return false;
3347 default:
3348 break;
3349 }
3350 break;
3351 case GIMPLE_OMP_CRITICAL:
3352 {
3353 tree this_stmt_name
3354 = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3355 for (; ctx != NULL; ctx = ctx->outer)
3356 if (gomp_critical *other_crit
3357 = dyn_cast <gomp_critical *> (ctx->stmt))
3358 if (this_stmt_name == gimple_omp_critical_name (other_crit))
3359 {
3360 error_at (gimple_location (stmt),
3361 "%<critical%> region may not be nested inside "
3362 "a %<critical%> region with the same name");
3363 return false;
3364 }
3365 }
3366 break;
3367 case GIMPLE_OMP_TEAMS:
3368 if (ctx == NULL)
3369 break;
3370 else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3371 || (gimple_omp_target_kind (ctx->stmt)
3372 != GF_OMP_TARGET_KIND_REGION))
3373 {
3374 /* Teams construct can appear either strictly nested inside of
3375 target construct with no intervening stmts, or can be encountered
3376 only by initial task (so must not appear inside any OpenMP
3377 construct. */
3378 error_at (gimple_location (stmt),
3379 "%<teams%> construct must be closely nested inside of "
3380 "%<target%> construct or not nested in any OpenMP "
3381 "construct");
3382 return false;
3383 }
3384 break;
3385 case GIMPLE_OMP_TARGET:
3386 for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3387 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3388 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3389 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3390 {
3391 enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3392 error_at (OMP_CLAUSE_LOCATION (c),
3393 "%<depend(%s)%> is only allowed in %<omp ordered%>",
3394 kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3395 return false;
3396 }
3397 if (is_gimple_omp_offloaded (stmt)
3398 && oacc_get_fn_attrib (cfun->decl) != NULL)
3399 {
3400 error_at (gimple_location (stmt),
3401 "OpenACC region inside of OpenACC routine, nested "
3402 "parallelism not supported yet");
3403 return false;
3404 }
3405 for (; ctx != NULL; ctx = ctx->outer)
3406 {
3407 if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3408 {
3409 if (is_gimple_omp (stmt)
3410 && is_gimple_omp_oacc (stmt)
3411 && is_gimple_omp (ctx->stmt))
3412 {
3413 error_at (gimple_location (stmt),
3414 "OpenACC construct inside of non-OpenACC region");
3415 return false;
3416 }
3417 continue;
3418 }
3419
3420 const char *stmt_name, *ctx_stmt_name;
3421 switch (gimple_omp_target_kind (stmt))
3422 {
3423 case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3424 case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3425 case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3426 case GF_OMP_TARGET_KIND_ENTER_DATA:
3427 stmt_name = "target enter data"; break;
3428 case GF_OMP_TARGET_KIND_EXIT_DATA:
3429 stmt_name = "target exit data"; break;
3430 case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3431 case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3432 case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3433 case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3434 case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3435 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3436 stmt_name = "enter/exit data"; break;
3437 case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3438 case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3439 break;
3440 default: gcc_unreachable ();
3441 }
3442 switch (gimple_omp_target_kind (ctx->stmt))
3443 {
3444 case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3445 case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3446 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3447 ctx_stmt_name = "parallel"; break;
3448 case GF_OMP_TARGET_KIND_OACC_KERNELS:
3449 ctx_stmt_name = "kernels"; break;
3450 case GF_OMP_TARGET_KIND_OACC_SERIAL:
3451 ctx_stmt_name = "serial"; break;
3452 case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3453 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3454 ctx_stmt_name = "host_data"; break;
3455 default: gcc_unreachable ();
3456 }
3457
3458 /* OpenACC/OpenMP mismatch? */
3459 if (is_gimple_omp_oacc (stmt)
3460 != is_gimple_omp_oacc (ctx->stmt))
3461 {
3462 error_at (gimple_location (stmt),
3463 "%s %qs construct inside of %s %qs region",
3464 (is_gimple_omp_oacc (stmt)
3465 ? "OpenACC" : "OpenMP"), stmt_name,
3466 (is_gimple_omp_oacc (ctx->stmt)
3467 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3468 return false;
3469 }
3470 if (is_gimple_omp_offloaded (ctx->stmt))
3471 {
3472 /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX. */
3473 if (is_gimple_omp_oacc (ctx->stmt))
3474 {
3475 error_at (gimple_location (stmt),
3476 "%qs construct inside of %qs region",
3477 stmt_name, ctx_stmt_name);
3478 return false;
3479 }
3480 else
3481 {
3482 warning_at (gimple_location (stmt), 0,
3483 "%qs construct inside of %qs region",
3484 stmt_name, ctx_stmt_name);
3485 }
3486 }
3487 }
3488 break;
3489 default:
3490 break;
3491 }
3492 return true;
3493 }
3494
3495
3496 /* Helper function scan_omp.
3497
3498 Callback for walk_tree or operators in walk_gimple_stmt used to
3499 scan for OMP directives in TP. */
3500
3501 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3502 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3503 {
3504 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3505 omp_context *ctx = (omp_context *) wi->info;
3506 tree t = *tp;
3507
3508 switch (TREE_CODE (t))
3509 {
3510 case VAR_DECL:
3511 case PARM_DECL:
3512 case LABEL_DECL:
3513 case RESULT_DECL:
3514 if (ctx)
3515 {
3516 tree repl = remap_decl (t, &ctx->cb);
3517 gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3518 *tp = repl;
3519 }
3520 break;
3521
3522 default:
3523 if (ctx && TYPE_P (t))
3524 *tp = remap_type (t, &ctx->cb);
3525 else if (!DECL_P (t))
3526 {
3527 *walk_subtrees = 1;
3528 if (ctx)
3529 {
3530 tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3531 if (tem != TREE_TYPE (t))
3532 {
3533 if (TREE_CODE (t) == INTEGER_CST)
3534 *tp = wide_int_to_tree (tem, wi::to_wide (t));
3535 else
3536 TREE_TYPE (t) = tem;
3537 }
3538 }
3539 }
3540 break;
3541 }
3542
3543 return NULL_TREE;
3544 }
3545
3546 /* Return true if FNDECL is a setjmp or a longjmp. */
3547
3548 static bool
setjmp_or_longjmp_p(const_tree fndecl)3549 setjmp_or_longjmp_p (const_tree fndecl)
3550 {
3551 if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3552 || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3553 return true;
3554
3555 tree declname = DECL_NAME (fndecl);
3556 if (!declname
3557 || (DECL_CONTEXT (fndecl) != NULL_TREE
3558 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3559 || !TREE_PUBLIC (fndecl))
3560 return false;
3561
3562 const char *name = IDENTIFIER_POINTER (declname);
3563 return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3564 }
3565
3566 /* Return true if FNDECL is an omp_* runtime API call. */
3567
3568 static bool
omp_runtime_api_call(const_tree fndecl)3569 omp_runtime_api_call (const_tree fndecl)
3570 {
3571 tree declname = DECL_NAME (fndecl);
3572 if (!declname
3573 || (DECL_CONTEXT (fndecl) != NULL_TREE
3574 && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3575 || !TREE_PUBLIC (fndecl))
3576 return false;
3577
3578 const char *name = IDENTIFIER_POINTER (declname);
3579 if (strncmp (name, "omp_", 4) != 0)
3580 return false;
3581
3582 static const char *omp_runtime_apis[] =
3583 {
3584 /* This array has 3 sections. First omp_* calls that don't
3585 have any suffixes. */
3586 "target_alloc",
3587 "target_associate_ptr",
3588 "target_disassociate_ptr",
3589 "target_free",
3590 "target_is_present",
3591 "target_memcpy",
3592 "target_memcpy_rect",
3593 NULL,
3594 /* Now omp_* calls that are available as omp_* and omp_*_. */
3595 "capture_affinity",
3596 "destroy_lock",
3597 "destroy_nest_lock",
3598 "display_affinity",
3599 "get_active_level",
3600 "get_affinity_format",
3601 "get_cancellation",
3602 "get_default_device",
3603 "get_dynamic",
3604 "get_initial_device",
3605 "get_level",
3606 "get_max_active_levels",
3607 "get_max_task_priority",
3608 "get_max_threads",
3609 "get_nested",
3610 "get_num_devices",
3611 "get_num_places",
3612 "get_num_procs",
3613 "get_num_teams",
3614 "get_num_threads",
3615 "get_partition_num_places",
3616 "get_place_num",
3617 "get_proc_bind",
3618 "get_team_num",
3619 "get_thread_limit",
3620 "get_thread_num",
3621 "get_wtick",
3622 "get_wtime",
3623 "in_final",
3624 "in_parallel",
3625 "init_lock",
3626 "init_nest_lock",
3627 "is_initial_device",
3628 "pause_resource",
3629 "pause_resource_all",
3630 "set_affinity_format",
3631 "set_lock",
3632 "set_nest_lock",
3633 "test_lock",
3634 "test_nest_lock",
3635 "unset_lock",
3636 "unset_nest_lock",
3637 NULL,
3638 /* And finally calls available as omp_*, omp_*_ and omp_*_8_. */
3639 "get_ancestor_thread_num",
3640 "get_partition_place_nums",
3641 "get_place_num_procs",
3642 "get_place_proc_ids",
3643 "get_schedule",
3644 "get_team_size",
3645 "set_default_device",
3646 "set_dynamic",
3647 "set_max_active_levels",
3648 "set_nested",
3649 "set_num_threads",
3650 "set_schedule"
3651 };
3652
3653 int mode = 0;
3654 for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3655 {
3656 if (omp_runtime_apis[i] == NULL)
3657 {
3658 mode++;
3659 continue;
3660 }
3661 size_t len = strlen (omp_runtime_apis[i]);
3662 if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3663 && (name[4 + len] == '\0'
3664 || (mode > 0
3665 && name[4 + len] == '_'
3666 && (name[4 + len + 1] == '\0'
3667 || (mode > 1
3668 && strcmp (name + 4 + len + 1, "8_") == 0)))))
3669 return true;
3670 }
3671 return false;
3672 }
3673
3674 /* Helper function for scan_omp.
3675
3676 Callback for walk_gimple_stmt used to scan for OMP directives in
3677 the current statement in GSI. */
3678
3679 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)3680 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3681 struct walk_stmt_info *wi)
3682 {
3683 gimple *stmt = gsi_stmt (*gsi);
3684 omp_context *ctx = (omp_context *) wi->info;
3685
3686 if (gimple_has_location (stmt))
3687 input_location = gimple_location (stmt);
3688
3689 /* Check the nesting restrictions. */
3690 bool remove = false;
3691 if (is_gimple_omp (stmt))
3692 remove = !check_omp_nesting_restrictions (stmt, ctx);
3693 else if (is_gimple_call (stmt))
3694 {
3695 tree fndecl = gimple_call_fndecl (stmt);
3696 if (fndecl)
3697 {
3698 if (ctx
3699 && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3700 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3701 && setjmp_or_longjmp_p (fndecl)
3702 && !ctx->loop_p)
3703 {
3704 remove = true;
3705 error_at (gimple_location (stmt),
3706 "setjmp/longjmp inside %<simd%> construct");
3707 }
3708 else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3709 switch (DECL_FUNCTION_CODE (fndecl))
3710 {
3711 case BUILT_IN_GOMP_BARRIER:
3712 case BUILT_IN_GOMP_CANCEL:
3713 case BUILT_IN_GOMP_CANCELLATION_POINT:
3714 case BUILT_IN_GOMP_TASKYIELD:
3715 case BUILT_IN_GOMP_TASKWAIT:
3716 case BUILT_IN_GOMP_TASKGROUP_START:
3717 case BUILT_IN_GOMP_TASKGROUP_END:
3718 remove = !check_omp_nesting_restrictions (stmt, ctx);
3719 break;
3720 default:
3721 break;
3722 }
3723 else if (ctx)
3724 {
3725 omp_context *octx = ctx;
3726 if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3727 octx = ctx->outer;
3728 if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3729 {
3730 remove = true;
3731 error_at (gimple_location (stmt),
3732 "OpenMP runtime API call %qD in a region with "
3733 "%<order(concurrent)%> clause", fndecl);
3734 }
3735 }
3736 }
3737 }
3738 if (remove)
3739 {
3740 stmt = gimple_build_nop ();
3741 gsi_replace (gsi, stmt, false);
3742 }
3743
3744 *handled_ops_p = true;
3745
3746 switch (gimple_code (stmt))
3747 {
3748 case GIMPLE_OMP_PARALLEL:
3749 taskreg_nesting_level++;
3750 scan_omp_parallel (gsi, ctx);
3751 taskreg_nesting_level--;
3752 break;
3753
3754 case GIMPLE_OMP_TASK:
3755 taskreg_nesting_level++;
3756 scan_omp_task (gsi, ctx);
3757 taskreg_nesting_level--;
3758 break;
3759
3760 case GIMPLE_OMP_FOR:
3761 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3762 == GF_OMP_FOR_KIND_SIMD)
3763 && gimple_omp_for_combined_into_p (stmt)
3764 && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3765 {
3766 tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3767 tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3768 if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3769 {
3770 scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3771 break;
3772 }
3773 }
3774 if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3775 == GF_OMP_FOR_KIND_SIMD)
3776 && omp_maybe_offloaded_ctx (ctx)
3777 && omp_max_simt_vf ())
3778 scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3779 else
3780 scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3781 break;
3782
3783 case GIMPLE_OMP_SECTIONS:
3784 scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3785 break;
3786
3787 case GIMPLE_OMP_SINGLE:
3788 scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3789 break;
3790
3791 case GIMPLE_OMP_SCAN:
3792 if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3793 {
3794 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3795 ctx->scan_inclusive = true;
3796 else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3797 ctx->scan_exclusive = true;
3798 }
3799 /* FALLTHRU */
3800 case GIMPLE_OMP_SECTION:
3801 case GIMPLE_OMP_MASTER:
3802 case GIMPLE_OMP_ORDERED:
3803 case GIMPLE_OMP_CRITICAL:
3804 case GIMPLE_OMP_GRID_BODY:
3805 ctx = new_omp_context (stmt, ctx);
3806 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3807 break;
3808
3809 case GIMPLE_OMP_TASKGROUP:
3810 ctx = new_omp_context (stmt, ctx);
3811 scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3812 scan_omp (gimple_omp_body_ptr (stmt), ctx);
3813 break;
3814
3815 case GIMPLE_OMP_TARGET:
3816 if (is_gimple_omp_offloaded (stmt))
3817 {
3818 taskreg_nesting_level++;
3819 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3820 taskreg_nesting_level--;
3821 }
3822 else
3823 scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3824 break;
3825
3826 case GIMPLE_OMP_TEAMS:
3827 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3828 {
3829 taskreg_nesting_level++;
3830 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3831 taskreg_nesting_level--;
3832 }
3833 else
3834 scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3835 break;
3836
3837 case GIMPLE_BIND:
3838 {
3839 tree var;
3840
3841 *handled_ops_p = false;
3842 if (ctx)
3843 for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3844 var ;
3845 var = DECL_CHAIN (var))
3846 insert_decl_map (&ctx->cb, var, var);
3847 }
3848 break;
3849 default:
3850 *handled_ops_p = false;
3851 break;
3852 }
3853
3854 return NULL_TREE;
3855 }
3856
3857
3858 /* Scan all the statements starting at the current statement. CTX
3859 contains context information about the OMP directives and
3860 clauses found during the scan. */
3861
3862 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)3863 scan_omp (gimple_seq *body_p, omp_context *ctx)
3864 {
3865 location_t saved_location;
3866 struct walk_stmt_info wi;
3867
3868 memset (&wi, 0, sizeof (wi));
3869 wi.info = ctx;
3870 wi.want_locations = true;
3871
3872 saved_location = input_location;
3873 walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3874 input_location = saved_location;
3875 }
3876
3877 /* Re-gimplification and code generation routines. */
3878
3879 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3880 of BIND if in a method. */
3881
3882 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)3883 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3884 {
3885 if (DECL_ARGUMENTS (current_function_decl)
3886 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3887 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3888 == POINTER_TYPE))
3889 {
3890 tree vars = gimple_bind_vars (bind);
3891 for (tree *pvar = &vars; *pvar; )
3892 if (omp_member_access_dummy_var (*pvar))
3893 *pvar = DECL_CHAIN (*pvar);
3894 else
3895 pvar = &DECL_CHAIN (*pvar);
3896 gimple_bind_set_vars (bind, vars);
3897 }
3898 }
3899
3900 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3901 block and its subblocks. */
3902
3903 static void
remove_member_access_dummy_vars(tree block)3904 remove_member_access_dummy_vars (tree block)
3905 {
3906 for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3907 if (omp_member_access_dummy_var (*pvar))
3908 *pvar = DECL_CHAIN (*pvar);
3909 else
3910 pvar = &DECL_CHAIN (*pvar);
3911
3912 for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3913 remove_member_access_dummy_vars (block);
3914 }
3915
3916 /* If a context was created for STMT when it was scanned, return it. */
3917
3918 static omp_context *
maybe_lookup_ctx(gimple * stmt)3919 maybe_lookup_ctx (gimple *stmt)
3920 {
3921 splay_tree_node n;
3922 n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3923 return n ? (omp_context *) n->value : NULL;
3924 }
3925
3926
3927 /* Find the mapping for DECL in CTX or the immediately enclosing
3928 context that has a mapping for DECL.
3929
3930 If CTX is a nested parallel directive, we may have to use the decl
3931 mappings created in CTX's parent context. Suppose that we have the
3932 following parallel nesting (variable UIDs showed for clarity):
3933
3934 iD.1562 = 0;
3935 #omp parallel shared(iD.1562) -> outer parallel
3936 iD.1562 = iD.1562 + 1;
3937
3938 #omp parallel shared (iD.1562) -> inner parallel
3939 iD.1562 = iD.1562 - 1;
3940
3941 Each parallel structure will create a distinct .omp_data_s structure
3942 for copying iD.1562 in/out of the directive:
3943
3944 outer parallel .omp_data_s.1.i -> iD.1562
3945 inner parallel .omp_data_s.2.i -> iD.1562
3946
3947 A shared variable mapping will produce a copy-out operation before
3948 the parallel directive and a copy-in operation after it. So, in
3949 this case we would have:
3950
3951 iD.1562 = 0;
3952 .omp_data_o.1.i = iD.1562;
3953 #omp parallel shared(iD.1562) -> outer parallel
3954 .omp_data_i.1 = &.omp_data_o.1
3955 .omp_data_i.1->i = .omp_data_i.1->i + 1;
3956
3957 .omp_data_o.2.i = iD.1562; -> **
3958 #omp parallel shared(iD.1562) -> inner parallel
3959 .omp_data_i.2 = &.omp_data_o.2
3960 .omp_data_i.2->i = .omp_data_i.2->i - 1;
3961
3962
3963 ** This is a problem. The symbol iD.1562 cannot be referenced
3964 inside the body of the outer parallel region. But since we are
3965 emitting this copy operation while expanding the inner parallel
3966 directive, we need to access the CTX structure of the outer
3967 parallel directive to get the correct mapping:
3968
3969 .omp_data_o.2.i = .omp_data_i.1->i
3970
3971 Since there may be other workshare or parallel directives enclosing
3972 the parallel directive, it may be necessary to walk up the context
3973 parent chain. This is not a problem in general because nested
3974 parallelism happens only rarely. */
3975
3976 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3977 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3978 {
3979 tree t;
3980 omp_context *up;
3981
3982 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3983 t = maybe_lookup_decl (decl, up);
3984
3985 gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3986
3987 return t ? t : decl;
3988 }
3989
3990
3991 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3992 in outer contexts. */
3993
3994 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3995 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3996 {
3997 tree t = NULL;
3998 omp_context *up;
3999
4000 for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4001 t = maybe_lookup_decl (decl, up);
4002
4003 return t ? t : decl;
4004 }
4005
4006
4007 /* Construct the initialization value for reduction operation OP. */
4008
4009 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)4010 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4011 {
4012 switch (op)
4013 {
4014 case PLUS_EXPR:
4015 case MINUS_EXPR:
4016 case BIT_IOR_EXPR:
4017 case BIT_XOR_EXPR:
4018 case TRUTH_OR_EXPR:
4019 case TRUTH_ORIF_EXPR:
4020 case TRUTH_XOR_EXPR:
4021 case NE_EXPR:
4022 return build_zero_cst (type);
4023
4024 case MULT_EXPR:
4025 case TRUTH_AND_EXPR:
4026 case TRUTH_ANDIF_EXPR:
4027 case EQ_EXPR:
4028 return fold_convert_loc (loc, type, integer_one_node);
4029
4030 case BIT_AND_EXPR:
4031 return fold_convert_loc (loc, type, integer_minus_one_node);
4032
4033 case MAX_EXPR:
4034 if (SCALAR_FLOAT_TYPE_P (type))
4035 {
4036 REAL_VALUE_TYPE max, min;
4037 if (HONOR_INFINITIES (type))
4038 {
4039 real_inf (&max);
4040 real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4041 }
4042 else
4043 real_maxval (&min, 1, TYPE_MODE (type));
4044 return build_real (type, min);
4045 }
4046 else if (POINTER_TYPE_P (type))
4047 {
4048 wide_int min
4049 = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4050 return wide_int_to_tree (type, min);
4051 }
4052 else
4053 {
4054 gcc_assert (INTEGRAL_TYPE_P (type));
4055 return TYPE_MIN_VALUE (type);
4056 }
4057
4058 case MIN_EXPR:
4059 if (SCALAR_FLOAT_TYPE_P (type))
4060 {
4061 REAL_VALUE_TYPE max;
4062 if (HONOR_INFINITIES (type))
4063 real_inf (&max);
4064 else
4065 real_maxval (&max, 0, TYPE_MODE (type));
4066 return build_real (type, max);
4067 }
4068 else if (POINTER_TYPE_P (type))
4069 {
4070 wide_int max
4071 = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4072 return wide_int_to_tree (type, max);
4073 }
4074 else
4075 {
4076 gcc_assert (INTEGRAL_TYPE_P (type));
4077 return TYPE_MAX_VALUE (type);
4078 }
4079
4080 default:
4081 gcc_unreachable ();
4082 }
4083 }
4084
4085 /* Construct the initialization value for reduction CLAUSE. */
4086
4087 tree
omp_reduction_init(tree clause,tree type)4088 omp_reduction_init (tree clause, tree type)
4089 {
4090 return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4091 OMP_CLAUSE_REDUCTION_CODE (clause), type);
4092 }
4093
4094 /* Return alignment to be assumed for var in CLAUSE, which should be
4095 OMP_CLAUSE_ALIGNED. */
4096
4097 static tree
omp_clause_aligned_alignment(tree clause)4098 omp_clause_aligned_alignment (tree clause)
4099 {
4100 if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4101 return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4102
4103 /* Otherwise return implementation defined alignment. */
4104 unsigned int al = 1;
4105 opt_scalar_mode mode_iter;
4106 auto_vector_modes modes;
4107 targetm.vectorize.autovectorize_vector_modes (&modes, true);
4108 static enum mode_class classes[]
4109 = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4110 for (int i = 0; i < 4; i += 2)
4111 /* The for loop above dictates that we only walk through scalar classes. */
4112 FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4113 {
4114 scalar_mode mode = mode_iter.require ();
4115 machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4116 if (GET_MODE_CLASS (vmode) != classes[i + 1])
4117 continue;
4118 machine_mode alt_vmode;
4119 for (unsigned int j = 0; j < modes.length (); ++j)
4120 if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4121 && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4122 vmode = alt_vmode;
4123
4124 tree type = lang_hooks.types.type_for_mode (mode, 1);
4125 if (type == NULL_TREE || TYPE_MODE (type) != mode)
4126 continue;
4127 type = build_vector_type_for_mode (type, vmode);
4128 if (TYPE_MODE (type) != vmode)
4129 continue;
4130 if (TYPE_ALIGN_UNIT (type) > al)
4131 al = TYPE_ALIGN_UNIT (type);
4132 }
4133 return build_int_cst (integer_type_node, al);
4134 }
4135
4136
4137 /* This structure is part of the interface between lower_rec_simd_input_clauses
4138 and lower_rec_input_clauses. */
4139
4140 class omplow_simd_context {
4141 public:
omplow_simd_context()4142 omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4143 tree idx;
4144 tree lane;
4145 tree lastlane;
4146 vec<tree, va_heap> simt_eargs;
4147 gimple_seq simt_dlist;
4148 poly_uint64_pod max_vf;
4149 bool is_simt;
4150 };
4151
4152 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4153 privatization. */
4154
4155 static bool
4156 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4157 omplow_simd_context *sctx, tree &ivar,
4158 tree &lvar, tree *rvar = NULL,
4159 tree *rvar2 = NULL)
4160 {
4161 if (known_eq (sctx->max_vf, 0U))
4162 {
4163 sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4164 if (maybe_gt (sctx->max_vf, 1U))
4165 {
4166 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4167 OMP_CLAUSE_SAFELEN);
4168 if (c)
4169 {
4170 poly_uint64 safe_len;
4171 if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4172 || maybe_lt (safe_len, 1U))
4173 sctx->max_vf = 1;
4174 else
4175 sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4176 }
4177 }
4178 if (maybe_gt (sctx->max_vf, 1U))
4179 {
4180 sctx->idx = create_tmp_var (unsigned_type_node);
4181 sctx->lane = create_tmp_var (unsigned_type_node);
4182 }
4183 }
4184 if (known_eq (sctx->max_vf, 1U))
4185 return false;
4186
4187 if (sctx->is_simt)
4188 {
4189 if (is_gimple_reg (new_var))
4190 {
4191 ivar = lvar = new_var;
4192 return true;
4193 }
4194 tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4195 ivar = lvar = create_tmp_var (type);
4196 TREE_ADDRESSABLE (ivar) = 1;
4197 DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4198 NULL, DECL_ATTRIBUTES (ivar));
4199 sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4200 tree clobber = build_clobber (type);
4201 gimple *g = gimple_build_assign (ivar, clobber);
4202 gimple_seq_add_stmt (&sctx->simt_dlist, g);
4203 }
4204 else
4205 {
4206 tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4207 tree avar = create_tmp_var_raw (atype);
4208 if (TREE_ADDRESSABLE (new_var))
4209 TREE_ADDRESSABLE (avar) = 1;
4210 DECL_ATTRIBUTES (avar)
4211 = tree_cons (get_identifier ("omp simd array"), NULL,
4212 DECL_ATTRIBUTES (avar));
4213 gimple_add_tmp_var (avar);
4214 tree iavar = avar;
4215 if (rvar && !ctx->for_simd_scan_phase)
4216 {
4217 /* For inscan reductions, create another array temporary,
4218 which will hold the reduced value. */
4219 iavar = create_tmp_var_raw (atype);
4220 if (TREE_ADDRESSABLE (new_var))
4221 TREE_ADDRESSABLE (iavar) = 1;
4222 DECL_ATTRIBUTES (iavar)
4223 = tree_cons (get_identifier ("omp simd array"), NULL,
4224 tree_cons (get_identifier ("omp simd inscan"), NULL,
4225 DECL_ATTRIBUTES (iavar)));
4226 gimple_add_tmp_var (iavar);
4227 ctx->cb.decl_map->put (avar, iavar);
4228 if (sctx->lastlane == NULL_TREE)
4229 sctx->lastlane = create_tmp_var (unsigned_type_node);
4230 *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4231 sctx->lastlane, NULL_TREE, NULL_TREE);
4232 TREE_THIS_NOTRAP (*rvar) = 1;
4233
4234 if (ctx->scan_exclusive)
4235 {
4236 /* And for exclusive scan yet another one, which will
4237 hold the value during the scan phase. */
4238 tree savar = create_tmp_var_raw (atype);
4239 if (TREE_ADDRESSABLE (new_var))
4240 TREE_ADDRESSABLE (savar) = 1;
4241 DECL_ATTRIBUTES (savar)
4242 = tree_cons (get_identifier ("omp simd array"), NULL,
4243 tree_cons (get_identifier ("omp simd inscan "
4244 "exclusive"), NULL,
4245 DECL_ATTRIBUTES (savar)));
4246 gimple_add_tmp_var (savar);
4247 ctx->cb.decl_map->put (iavar, savar);
4248 *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4249 sctx->idx, NULL_TREE, NULL_TREE);
4250 TREE_THIS_NOTRAP (*rvar2) = 1;
4251 }
4252 }
4253 ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4254 NULL_TREE, NULL_TREE);
4255 lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4256 NULL_TREE, NULL_TREE);
4257 TREE_THIS_NOTRAP (ivar) = 1;
4258 TREE_THIS_NOTRAP (lvar) = 1;
4259 }
4260 if (DECL_P (new_var))
4261 {
4262 SET_DECL_VALUE_EXPR (new_var, lvar);
4263 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4264 }
4265 return true;
4266 }
4267
4268 /* Helper function of lower_rec_input_clauses. For a reference
4269 in simd reduction, add an underlying variable it will reference. */
4270
4271 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)4272 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4273 {
4274 tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4275 if (TREE_CONSTANT (z))
4276 {
4277 z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4278 get_name (new_vard));
4279 gimple_add_tmp_var (z);
4280 TREE_ADDRESSABLE (z) = 1;
4281 z = build_fold_addr_expr_loc (loc, z);
4282 gimplify_assign (new_vard, z, ilist);
4283 }
4284 }
4285
4286 /* Helper function for lower_rec_input_clauses. Emit into ilist sequence
4287 code to emit (type) (tskred_temp[idx]). */
4288
4289 static tree
task_reduction_read(gimple_seq * ilist,tree tskred_temp,tree type,unsigned idx)4290 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4291 unsigned idx)
4292 {
4293 unsigned HOST_WIDE_INT sz
4294 = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4295 tree r = build2 (MEM_REF, pointer_sized_int_node,
4296 tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4297 idx * sz));
4298 tree v = create_tmp_var (pointer_sized_int_node);
4299 gimple *g = gimple_build_assign (v, r);
4300 gimple_seq_add_stmt (ilist, g);
4301 if (!useless_type_conversion_p (type, pointer_sized_int_node))
4302 {
4303 v = create_tmp_var (type);
4304 g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4305 gimple_seq_add_stmt (ilist, g);
4306 }
4307 return v;
4308 }
4309
4310 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4311 from the receiver (aka child) side and initializers for REFERENCE_TYPE
4312 private variables. Initialization statements go in ILIST, while calls
4313 to destructors go in DLIST. */
4314
4315 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)4316 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4317 omp_context *ctx, struct omp_for_data *fd)
4318 {
4319 tree c, copyin_seq, x, ptr;
4320 bool copyin_by_ref = false;
4321 bool lastprivate_firstprivate = false;
4322 bool reduction_omp_orig_ref = false;
4323 int pass;
4324 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4325 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4326 omplow_simd_context sctx = omplow_simd_context ();
4327 tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4328 tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4329 gimple_seq llist[4] = { };
4330 tree nonconst_simd_if = NULL_TREE;
4331
4332 copyin_seq = NULL;
4333 sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4334
4335 /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4336 with data sharing clauses referencing variable sized vars. That
4337 is unnecessarily hard to support and very unlikely to result in
4338 vectorized code anyway. */
4339 if (is_simd)
4340 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4341 switch (OMP_CLAUSE_CODE (c))
4342 {
4343 case OMP_CLAUSE_LINEAR:
4344 if (OMP_CLAUSE_LINEAR_ARRAY (c))
4345 sctx.max_vf = 1;
4346 /* FALLTHRU */
4347 case OMP_CLAUSE_PRIVATE:
4348 case OMP_CLAUSE_FIRSTPRIVATE:
4349 case OMP_CLAUSE_LASTPRIVATE:
4350 if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4351 sctx.max_vf = 1;
4352 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4353 {
4354 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4355 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4356 sctx.max_vf = 1;
4357 }
4358 break;
4359 case OMP_CLAUSE_REDUCTION:
4360 case OMP_CLAUSE_IN_REDUCTION:
4361 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4362 || is_variable_sized (OMP_CLAUSE_DECL (c)))
4363 sctx.max_vf = 1;
4364 else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4365 {
4366 tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4367 if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4368 sctx.max_vf = 1;
4369 }
4370 break;
4371 case OMP_CLAUSE_IF:
4372 if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4373 sctx.max_vf = 1;
4374 else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4375 nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4376 break;
4377 case OMP_CLAUSE_SIMDLEN:
4378 if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4379 sctx.max_vf = 1;
4380 break;
4381 case OMP_CLAUSE__CONDTEMP_:
4382 /* FIXME: lastprivate(conditional:) not handled for SIMT yet. */
4383 if (sctx.is_simt)
4384 sctx.max_vf = 1;
4385 break;
4386 default:
4387 continue;
4388 }
4389
4390 /* Add a placeholder for simduid. */
4391 if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4392 sctx.simt_eargs.safe_push (NULL_TREE);
4393
4394 unsigned task_reduction_cnt = 0;
4395 unsigned task_reduction_cntorig = 0;
4396 unsigned task_reduction_cnt_full = 0;
4397 unsigned task_reduction_cntorig_full = 0;
4398 unsigned task_reduction_other_cnt = 0;
4399 tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4400 tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4401 /* Do all the fixed sized types in the first pass, and the variable sized
4402 types in the second pass. This makes sure that the scalar arguments to
4403 the variable sized types are processed before we use them in the
4404 variable sized operations. For task reductions we use 4 passes, in the
4405 first two we ignore them, in the third one gather arguments for
4406 GOMP_task_reduction_remap call and in the last pass actually handle
4407 the task reductions. */
4408 for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4409 ? 4 : 2); ++pass)
4410 {
4411 if (pass == 2 && task_reduction_cnt)
4412 {
4413 tskred_atype
4414 = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4415 + task_reduction_cntorig);
4416 tskred_avar = create_tmp_var_raw (tskred_atype);
4417 gimple_add_tmp_var (tskred_avar);
4418 TREE_ADDRESSABLE (tskred_avar) = 1;
4419 task_reduction_cnt_full = task_reduction_cnt;
4420 task_reduction_cntorig_full = task_reduction_cntorig;
4421 }
4422 else if (pass == 3 && task_reduction_cnt)
4423 {
4424 x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4425 gimple *g
4426 = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4427 size_int (task_reduction_cntorig),
4428 build_fold_addr_expr (tskred_avar));
4429 gimple_seq_add_stmt (ilist, g);
4430 }
4431 if (pass == 3 && task_reduction_other_cnt)
4432 {
4433 /* For reduction clauses, build
4434 tskred_base = (void *) tskred_temp[2]
4435 + omp_get_thread_num () * tskred_temp[1]
4436 or if tskred_temp[1] is known to be constant, that constant
4437 directly. This is the start of the private reduction copy block
4438 for the current thread. */
4439 tree v = create_tmp_var (integer_type_node);
4440 x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4441 gimple *g = gimple_build_call (x, 0);
4442 gimple_call_set_lhs (g, v);
4443 gimple_seq_add_stmt (ilist, g);
4444 c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4445 tskred_temp = OMP_CLAUSE_DECL (c);
4446 if (is_taskreg_ctx (ctx))
4447 tskred_temp = lookup_decl (tskred_temp, ctx);
4448 tree v2 = create_tmp_var (sizetype);
4449 g = gimple_build_assign (v2, NOP_EXPR, v);
4450 gimple_seq_add_stmt (ilist, g);
4451 if (ctx->task_reductions[0])
4452 v = fold_convert (sizetype, ctx->task_reductions[0]);
4453 else
4454 v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4455 tree v3 = create_tmp_var (sizetype);
4456 g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4457 gimple_seq_add_stmt (ilist, g);
4458 v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4459 tskred_base = create_tmp_var (ptr_type_node);
4460 g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4461 gimple_seq_add_stmt (ilist, g);
4462 }
4463 task_reduction_cnt = 0;
4464 task_reduction_cntorig = 0;
4465 task_reduction_other_cnt = 0;
4466 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4467 {
4468 enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4469 tree var, new_var;
4470 bool by_ref;
4471 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4472 bool task_reduction_p = false;
4473 bool task_reduction_needs_orig_p = false;
4474 tree cond = NULL_TREE;
4475
4476 switch (c_kind)
4477 {
4478 case OMP_CLAUSE_PRIVATE:
4479 if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4480 continue;
4481 break;
4482 case OMP_CLAUSE_SHARED:
4483 /* Ignore shared directives in teams construct inside
4484 of target construct. */
4485 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4486 && !is_host_teams_ctx (ctx))
4487 continue;
4488 if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4489 {
4490 gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4491 || is_global_var (OMP_CLAUSE_DECL (c)));
4492 continue;
4493 }
4494 case OMP_CLAUSE_FIRSTPRIVATE:
4495 case OMP_CLAUSE_COPYIN:
4496 break;
4497 case OMP_CLAUSE_LINEAR:
4498 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4499 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4500 lastprivate_firstprivate = true;
4501 break;
4502 case OMP_CLAUSE_REDUCTION:
4503 case OMP_CLAUSE_IN_REDUCTION:
4504 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4505 {
4506 task_reduction_p = true;
4507 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4508 {
4509 task_reduction_other_cnt++;
4510 if (pass == 2)
4511 continue;
4512 }
4513 else
4514 task_reduction_cnt++;
4515 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4516 {
4517 var = OMP_CLAUSE_DECL (c);
4518 /* If var is a global variable that isn't privatized
4519 in outer contexts, we don't need to look up the
4520 original address, it is always the address of the
4521 global variable itself. */
4522 if (!DECL_P (var)
4523 || omp_is_reference (var)
4524 || !is_global_var
4525 (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4526 {
4527 task_reduction_needs_orig_p = true;
4528 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4529 task_reduction_cntorig++;
4530 }
4531 }
4532 }
4533 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4534 reduction_omp_orig_ref = true;
4535 break;
4536 case OMP_CLAUSE__REDUCTEMP_:
4537 if (!is_taskreg_ctx (ctx))
4538 continue;
4539 /* FALLTHRU */
4540 case OMP_CLAUSE__LOOPTEMP_:
4541 /* Handle _looptemp_/_reductemp_ clauses only on
4542 parallel/task. */
4543 if (fd)
4544 continue;
4545 break;
4546 case OMP_CLAUSE_LASTPRIVATE:
4547 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4548 {
4549 lastprivate_firstprivate = true;
4550 if (pass != 0 || is_taskloop_ctx (ctx))
4551 continue;
4552 }
4553 /* Even without corresponding firstprivate, if
4554 decl is Fortran allocatable, it needs outer var
4555 reference. */
4556 else if (pass == 0
4557 && lang_hooks.decls.omp_private_outer_ref
4558 (OMP_CLAUSE_DECL (c)))
4559 lastprivate_firstprivate = true;
4560 break;
4561 case OMP_CLAUSE_ALIGNED:
4562 if (pass != 1)
4563 continue;
4564 var = OMP_CLAUSE_DECL (c);
4565 if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4566 && !is_global_var (var))
4567 {
4568 new_var = maybe_lookup_decl (var, ctx);
4569 if (new_var == NULL_TREE)
4570 new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4571 x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4572 tree alarg = omp_clause_aligned_alignment (c);
4573 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4574 x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4575 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4576 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4577 gimplify_and_add (x, ilist);
4578 }
4579 else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4580 && is_global_var (var))
4581 {
4582 tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4583 new_var = lookup_decl (var, ctx);
4584 t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4585 t = build_fold_addr_expr_loc (clause_loc, t);
4586 t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4587 tree alarg = omp_clause_aligned_alignment (c);
4588 alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4589 t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4590 t = fold_convert_loc (clause_loc, ptype, t);
4591 x = create_tmp_var (ptype);
4592 t = build2 (MODIFY_EXPR, ptype, x, t);
4593 gimplify_and_add (t, ilist);
4594 t = build_simple_mem_ref_loc (clause_loc, x);
4595 SET_DECL_VALUE_EXPR (new_var, t);
4596 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4597 }
4598 continue;
4599 case OMP_CLAUSE__CONDTEMP_:
4600 if (is_parallel_ctx (ctx)
4601 || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4602 break;
4603 continue;
4604 default:
4605 continue;
4606 }
4607
4608 if (task_reduction_p != (pass >= 2))
4609 continue;
4610
4611 new_var = var = OMP_CLAUSE_DECL (c);
4612 if ((c_kind == OMP_CLAUSE_REDUCTION
4613 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4614 && TREE_CODE (var) == MEM_REF)
4615 {
4616 var = TREE_OPERAND (var, 0);
4617 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4618 var = TREE_OPERAND (var, 0);
4619 if (TREE_CODE (var) == INDIRECT_REF
4620 || TREE_CODE (var) == ADDR_EXPR)
4621 var = TREE_OPERAND (var, 0);
4622 if (is_variable_sized (var))
4623 {
4624 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4625 var = DECL_VALUE_EXPR (var);
4626 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4627 var = TREE_OPERAND (var, 0);
4628 gcc_assert (DECL_P (var));
4629 }
4630 new_var = var;
4631 }
4632 if (c_kind != OMP_CLAUSE_COPYIN)
4633 new_var = lookup_decl (var, ctx);
4634
4635 if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4636 {
4637 if (pass != 0)
4638 continue;
4639 }
4640 /* C/C++ array section reductions. */
4641 else if ((c_kind == OMP_CLAUSE_REDUCTION
4642 || c_kind == OMP_CLAUSE_IN_REDUCTION)
4643 && var != OMP_CLAUSE_DECL (c))
4644 {
4645 if (pass == 0)
4646 continue;
4647
4648 tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4649 tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4650
4651 if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4652 {
4653 tree b = TREE_OPERAND (orig_var, 1);
4654 b = maybe_lookup_decl (b, ctx);
4655 if (b == NULL)
4656 {
4657 b = TREE_OPERAND (orig_var, 1);
4658 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4659 }
4660 if (integer_zerop (bias))
4661 bias = b;
4662 else
4663 {
4664 bias = fold_convert_loc (clause_loc,
4665 TREE_TYPE (b), bias);
4666 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4667 TREE_TYPE (b), b, bias);
4668 }
4669 orig_var = TREE_OPERAND (orig_var, 0);
4670 }
4671 if (pass == 2)
4672 {
4673 tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4674 if (is_global_var (out)
4675 && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4676 && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4677 || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4678 != POINTER_TYPE)))
4679 x = var;
4680 else
4681 {
4682 bool by_ref = use_pointer_for_field (var, NULL);
4683 x = build_receiver_ref (var, by_ref, ctx);
4684 if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4685 && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4686 == POINTER_TYPE))
4687 x = build_fold_addr_expr (x);
4688 }
4689 if (TREE_CODE (orig_var) == INDIRECT_REF)
4690 x = build_simple_mem_ref (x);
4691 else if (TREE_CODE (orig_var) == ADDR_EXPR)
4692 {
4693 if (var == TREE_OPERAND (orig_var, 0))
4694 x = build_fold_addr_expr (x);
4695 }
4696 bias = fold_convert (sizetype, bias);
4697 x = fold_convert (ptr_type_node, x);
4698 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4699 TREE_TYPE (x), x, bias);
4700 unsigned cnt = task_reduction_cnt - 1;
4701 if (!task_reduction_needs_orig_p)
4702 cnt += (task_reduction_cntorig_full
4703 - task_reduction_cntorig);
4704 else
4705 cnt = task_reduction_cntorig - 1;
4706 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4707 size_int (cnt), NULL_TREE, NULL_TREE);
4708 gimplify_assign (r, x, ilist);
4709 continue;
4710 }
4711
4712 if (TREE_CODE (orig_var) == INDIRECT_REF
4713 || TREE_CODE (orig_var) == ADDR_EXPR)
4714 orig_var = TREE_OPERAND (orig_var, 0);
4715 tree d = OMP_CLAUSE_DECL (c);
4716 tree type = TREE_TYPE (d);
4717 gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4718 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4719 const char *name = get_name (orig_var);
4720 if (pass == 3)
4721 {
4722 tree xv = create_tmp_var (ptr_type_node);
4723 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4724 {
4725 unsigned cnt = task_reduction_cnt - 1;
4726 if (!task_reduction_needs_orig_p)
4727 cnt += (task_reduction_cntorig_full
4728 - task_reduction_cntorig);
4729 else
4730 cnt = task_reduction_cntorig - 1;
4731 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4732 size_int (cnt), NULL_TREE, NULL_TREE);
4733
4734 gimple *g = gimple_build_assign (xv, x);
4735 gimple_seq_add_stmt (ilist, g);
4736 }
4737 else
4738 {
4739 unsigned int idx = *ctx->task_reduction_map->get (c);
4740 tree off;
4741 if (ctx->task_reductions[1 + idx])
4742 off = fold_convert (sizetype,
4743 ctx->task_reductions[1 + idx]);
4744 else
4745 off = task_reduction_read (ilist, tskred_temp, sizetype,
4746 7 + 3 * idx + 1);
4747 gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4748 tskred_base, off);
4749 gimple_seq_add_stmt (ilist, g);
4750 }
4751 x = fold_convert (build_pointer_type (boolean_type_node),
4752 xv);
4753 if (TREE_CONSTANT (v))
4754 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4755 TYPE_SIZE_UNIT (type));
4756 else
4757 {
4758 tree t = maybe_lookup_decl (v, ctx);
4759 if (t)
4760 v = t;
4761 else
4762 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4763 gimplify_expr (&v, ilist, NULL, is_gimple_val,
4764 fb_rvalue);
4765 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4766 TREE_TYPE (v), v,
4767 build_int_cst (TREE_TYPE (v), 1));
4768 t = fold_build2_loc (clause_loc, MULT_EXPR,
4769 TREE_TYPE (v), t,
4770 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4771 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4772 }
4773 cond = create_tmp_var (TREE_TYPE (x));
4774 gimplify_assign (cond, x, ilist);
4775 x = xv;
4776 }
4777 else if (TREE_CONSTANT (v))
4778 {
4779 x = create_tmp_var_raw (type, name);
4780 gimple_add_tmp_var (x);
4781 TREE_ADDRESSABLE (x) = 1;
4782 x = build_fold_addr_expr_loc (clause_loc, x);
4783 }
4784 else
4785 {
4786 tree atmp
4787 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4788 tree t = maybe_lookup_decl (v, ctx);
4789 if (t)
4790 v = t;
4791 else
4792 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4793 gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4794 t = fold_build2_loc (clause_loc, PLUS_EXPR,
4795 TREE_TYPE (v), v,
4796 build_int_cst (TREE_TYPE (v), 1));
4797 t = fold_build2_loc (clause_loc, MULT_EXPR,
4798 TREE_TYPE (v), t,
4799 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4800 tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4801 x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4802 }
4803
4804 tree ptype = build_pointer_type (TREE_TYPE (type));
4805 x = fold_convert_loc (clause_loc, ptype, x);
4806 tree y = create_tmp_var (ptype, name);
4807 gimplify_assign (y, x, ilist);
4808 x = y;
4809 tree yb = y;
4810
4811 if (!integer_zerop (bias))
4812 {
4813 bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4814 bias);
4815 yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4816 x);
4817 yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4818 pointer_sized_int_node, yb, bias);
4819 x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4820 yb = create_tmp_var (ptype, name);
4821 gimplify_assign (yb, x, ilist);
4822 x = yb;
4823 }
4824
4825 d = TREE_OPERAND (d, 0);
4826 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4827 d = TREE_OPERAND (d, 0);
4828 if (TREE_CODE (d) == ADDR_EXPR)
4829 {
4830 if (orig_var != var)
4831 {
4832 gcc_assert (is_variable_sized (orig_var));
4833 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4834 x);
4835 gimplify_assign (new_var, x, ilist);
4836 tree new_orig_var = lookup_decl (orig_var, ctx);
4837 tree t = build_fold_indirect_ref (new_var);
4838 DECL_IGNORED_P (new_var) = 0;
4839 TREE_THIS_NOTRAP (t) = 1;
4840 SET_DECL_VALUE_EXPR (new_orig_var, t);
4841 DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4842 }
4843 else
4844 {
4845 x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4846 build_int_cst (ptype, 0));
4847 SET_DECL_VALUE_EXPR (new_var, x);
4848 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4849 }
4850 }
4851 else
4852 {
4853 gcc_assert (orig_var == var);
4854 if (TREE_CODE (d) == INDIRECT_REF)
4855 {
4856 x = create_tmp_var (ptype, name);
4857 TREE_ADDRESSABLE (x) = 1;
4858 gimplify_assign (x, yb, ilist);
4859 x = build_fold_addr_expr_loc (clause_loc, x);
4860 }
4861 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4862 gimplify_assign (new_var, x, ilist);
4863 }
4864 /* GOMP_taskgroup_reduction_register memsets the whole
4865 array to zero. If the initializer is zero, we don't
4866 need to initialize it again, just mark it as ever
4867 used unconditionally, i.e. cond = true. */
4868 if (cond
4869 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4870 && initializer_zerop (omp_reduction_init (c,
4871 TREE_TYPE (type))))
4872 {
4873 gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4874 boolean_true_node);
4875 gimple_seq_add_stmt (ilist, g);
4876 continue;
4877 }
4878 tree end = create_artificial_label (UNKNOWN_LOCATION);
4879 if (cond)
4880 {
4881 gimple *g;
4882 if (!is_parallel_ctx (ctx))
4883 {
4884 tree condv = create_tmp_var (boolean_type_node);
4885 g = gimple_build_assign (condv,
4886 build_simple_mem_ref (cond));
4887 gimple_seq_add_stmt (ilist, g);
4888 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4889 g = gimple_build_cond (NE_EXPR, condv,
4890 boolean_false_node, end, lab1);
4891 gimple_seq_add_stmt (ilist, g);
4892 gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4893 }
4894 g = gimple_build_assign (build_simple_mem_ref (cond),
4895 boolean_true_node);
4896 gimple_seq_add_stmt (ilist, g);
4897 }
4898
4899 tree y1 = create_tmp_var (ptype);
4900 gimplify_assign (y1, y, ilist);
4901 tree i2 = NULL_TREE, y2 = NULL_TREE;
4902 tree body2 = NULL_TREE, end2 = NULL_TREE;
4903 tree y3 = NULL_TREE, y4 = NULL_TREE;
4904 if (task_reduction_needs_orig_p)
4905 {
4906 y3 = create_tmp_var (ptype);
4907 tree ref;
4908 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4909 ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4910 size_int (task_reduction_cnt_full
4911 + task_reduction_cntorig - 1),
4912 NULL_TREE, NULL_TREE);
4913 else
4914 {
4915 unsigned int idx = *ctx->task_reduction_map->get (c);
4916 ref = task_reduction_read (ilist, tskred_temp, ptype,
4917 7 + 3 * idx);
4918 }
4919 gimplify_assign (y3, ref, ilist);
4920 }
4921 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4922 {
4923 if (pass != 3)
4924 {
4925 y2 = create_tmp_var (ptype);
4926 gimplify_assign (y2, y, ilist);
4927 }
4928 if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4929 {
4930 tree ref = build_outer_var_ref (var, ctx);
4931 /* For ref build_outer_var_ref already performs this. */
4932 if (TREE_CODE (d) == INDIRECT_REF)
4933 gcc_assert (omp_is_reference (var));
4934 else if (TREE_CODE (d) == ADDR_EXPR)
4935 ref = build_fold_addr_expr (ref);
4936 else if (omp_is_reference (var))
4937 ref = build_fold_addr_expr (ref);
4938 ref = fold_convert_loc (clause_loc, ptype, ref);
4939 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4940 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4941 {
4942 y3 = create_tmp_var (ptype);
4943 gimplify_assign (y3, unshare_expr (ref), ilist);
4944 }
4945 if (is_simd)
4946 {
4947 y4 = create_tmp_var (ptype);
4948 gimplify_assign (y4, ref, dlist);
4949 }
4950 }
4951 }
4952 tree i = create_tmp_var (TREE_TYPE (v));
4953 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4954 tree body = create_artificial_label (UNKNOWN_LOCATION);
4955 gimple_seq_add_stmt (ilist, gimple_build_label (body));
4956 if (y2)
4957 {
4958 i2 = create_tmp_var (TREE_TYPE (v));
4959 gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4960 body2 = create_artificial_label (UNKNOWN_LOCATION);
4961 end2 = create_artificial_label (UNKNOWN_LOCATION);
4962 gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4963 }
4964 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4965 {
4966 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4967 tree decl_placeholder
4968 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4969 SET_DECL_VALUE_EXPR (decl_placeholder,
4970 build_simple_mem_ref (y1));
4971 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4972 SET_DECL_VALUE_EXPR (placeholder,
4973 y3 ? build_simple_mem_ref (y3)
4974 : error_mark_node);
4975 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4976 x = lang_hooks.decls.omp_clause_default_ctor
4977 (c, build_simple_mem_ref (y1),
4978 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4979 if (x)
4980 gimplify_and_add (x, ilist);
4981 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4982 {
4983 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4984 lower_omp (&tseq, ctx);
4985 gimple_seq_add_seq (ilist, tseq);
4986 }
4987 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4988 if (is_simd)
4989 {
4990 SET_DECL_VALUE_EXPR (decl_placeholder,
4991 build_simple_mem_ref (y2));
4992 SET_DECL_VALUE_EXPR (placeholder,
4993 build_simple_mem_ref (y4));
4994 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4995 lower_omp (&tseq, ctx);
4996 gimple_seq_add_seq (dlist, tseq);
4997 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4998 }
4999 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5000 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5001 if (y2)
5002 {
5003 x = lang_hooks.decls.omp_clause_dtor
5004 (c, build_simple_mem_ref (y2));
5005 if (x)
5006 gimplify_and_add (x, dlist);
5007 }
5008 }
5009 else
5010 {
5011 x = omp_reduction_init (c, TREE_TYPE (type));
5012 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5013
5014 /* reduction(-:var) sums up the partial results, so it
5015 acts identically to reduction(+:var). */
5016 if (code == MINUS_EXPR)
5017 code = PLUS_EXPR;
5018
5019 gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5020 if (is_simd)
5021 {
5022 x = build2 (code, TREE_TYPE (type),
5023 build_simple_mem_ref (y4),
5024 build_simple_mem_ref (y2));
5025 gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5026 }
5027 }
5028 gimple *g
5029 = gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5030 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5031 gimple_seq_add_stmt (ilist, g);
5032 if (y3)
5033 {
5034 g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5035 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5036 gimple_seq_add_stmt (ilist, g);
5037 }
5038 g = gimple_build_assign (i, PLUS_EXPR, i,
5039 build_int_cst (TREE_TYPE (i), 1));
5040 gimple_seq_add_stmt (ilist, g);
5041 g = gimple_build_cond (LE_EXPR, i, v, body, end);
5042 gimple_seq_add_stmt (ilist, g);
5043 gimple_seq_add_stmt (ilist, gimple_build_label (end));
5044 if (y2)
5045 {
5046 g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5047 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5048 gimple_seq_add_stmt (dlist, g);
5049 if (y4)
5050 {
5051 g = gimple_build_assign
5052 (y4, POINTER_PLUS_EXPR, y4,
5053 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5054 gimple_seq_add_stmt (dlist, g);
5055 }
5056 g = gimple_build_assign (i2, PLUS_EXPR, i2,
5057 build_int_cst (TREE_TYPE (i2), 1));
5058 gimple_seq_add_stmt (dlist, g);
5059 g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5060 gimple_seq_add_stmt (dlist, g);
5061 gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5062 }
5063 continue;
5064 }
5065 else if (pass == 2)
5066 {
5067 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5068 x = var;
5069 else
5070 {
5071 bool by_ref = use_pointer_for_field (var, ctx);
5072 x = build_receiver_ref (var, by_ref, ctx);
5073 }
5074 if (!omp_is_reference (var))
5075 x = build_fold_addr_expr (x);
5076 x = fold_convert (ptr_type_node, x);
5077 unsigned cnt = task_reduction_cnt - 1;
5078 if (!task_reduction_needs_orig_p)
5079 cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5080 else
5081 cnt = task_reduction_cntorig - 1;
5082 tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5083 size_int (cnt), NULL_TREE, NULL_TREE);
5084 gimplify_assign (r, x, ilist);
5085 continue;
5086 }
5087 else if (pass == 3)
5088 {
5089 tree type = TREE_TYPE (new_var);
5090 if (!omp_is_reference (var))
5091 type = build_pointer_type (type);
5092 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5093 {
5094 unsigned cnt = task_reduction_cnt - 1;
5095 if (!task_reduction_needs_orig_p)
5096 cnt += (task_reduction_cntorig_full
5097 - task_reduction_cntorig);
5098 else
5099 cnt = task_reduction_cntorig - 1;
5100 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5101 size_int (cnt), NULL_TREE, NULL_TREE);
5102 }
5103 else
5104 {
5105 unsigned int idx = *ctx->task_reduction_map->get (c);
5106 tree off;
5107 if (ctx->task_reductions[1 + idx])
5108 off = fold_convert (sizetype,
5109 ctx->task_reductions[1 + idx]);
5110 else
5111 off = task_reduction_read (ilist, tskred_temp, sizetype,
5112 7 + 3 * idx + 1);
5113 x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5114 tskred_base, off);
5115 }
5116 x = fold_convert (type, x);
5117 tree t;
5118 if (omp_is_reference (var))
5119 {
5120 gimplify_assign (new_var, x, ilist);
5121 t = new_var;
5122 new_var = build_simple_mem_ref (new_var);
5123 }
5124 else
5125 {
5126 t = create_tmp_var (type);
5127 gimplify_assign (t, x, ilist);
5128 SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5129 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5130 }
5131 t = fold_convert (build_pointer_type (boolean_type_node), t);
5132 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5133 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5134 cond = create_tmp_var (TREE_TYPE (t));
5135 gimplify_assign (cond, t, ilist);
5136 }
5137 else if (is_variable_sized (var))
5138 {
5139 /* For variable sized types, we need to allocate the
5140 actual storage here. Call alloca and store the
5141 result in the pointer decl that we created elsewhere. */
5142 if (pass == 0)
5143 continue;
5144
5145 if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5146 {
5147 gcall *stmt;
5148 tree tmp, atmp;
5149
5150 ptr = DECL_VALUE_EXPR (new_var);
5151 gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5152 ptr = TREE_OPERAND (ptr, 0);
5153 gcc_assert (DECL_P (ptr));
5154 x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5155
5156 /* void *tmp = __builtin_alloca */
5157 atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5158 stmt = gimple_build_call (atmp, 2, x,
5159 size_int (DECL_ALIGN (var)));
5160 cfun->calls_alloca = 1;
5161 tmp = create_tmp_var_raw (ptr_type_node);
5162 gimple_add_tmp_var (tmp);
5163 gimple_call_set_lhs (stmt, tmp);
5164
5165 gimple_seq_add_stmt (ilist, stmt);
5166
5167 x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5168 gimplify_assign (ptr, x, ilist);
5169 }
5170 }
5171 else if (omp_is_reference (var)
5172 && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5173 || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5174 {
5175 /* For references that are being privatized for Fortran,
5176 allocate new backing storage for the new pointer
5177 variable. This allows us to avoid changing all the
5178 code that expects a pointer to something that expects
5179 a direct variable. */
5180 if (pass == 0)
5181 continue;
5182
5183 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5184 if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5185 {
5186 x = build_receiver_ref (var, false, ctx);
5187 x = build_fold_addr_expr_loc (clause_loc, x);
5188 }
5189 else if (TREE_CONSTANT (x))
5190 {
5191 /* For reduction in SIMD loop, defer adding the
5192 initialization of the reference, because if we decide
5193 to use SIMD array for it, the initilization could cause
5194 expansion ICE. Ditto for other privatization clauses. */
5195 if (is_simd)
5196 x = NULL_TREE;
5197 else
5198 {
5199 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5200 get_name (var));
5201 gimple_add_tmp_var (x);
5202 TREE_ADDRESSABLE (x) = 1;
5203 x = build_fold_addr_expr_loc (clause_loc, x);
5204 }
5205 }
5206 else
5207 {
5208 tree atmp
5209 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5210 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5211 tree al = size_int (TYPE_ALIGN (rtype));
5212 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5213 }
5214
5215 if (x)
5216 {
5217 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5218 gimplify_assign (new_var, x, ilist);
5219 }
5220
5221 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5222 }
5223 else if ((c_kind == OMP_CLAUSE_REDUCTION
5224 || c_kind == OMP_CLAUSE_IN_REDUCTION)
5225 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5226 {
5227 if (pass == 0)
5228 continue;
5229 }
5230 else if (pass != 0)
5231 continue;
5232
5233 switch (OMP_CLAUSE_CODE (c))
5234 {
5235 case OMP_CLAUSE_SHARED:
5236 /* Ignore shared directives in teams construct inside
5237 target construct. */
5238 if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5239 && !is_host_teams_ctx (ctx))
5240 continue;
5241 /* Shared global vars are just accessed directly. */
5242 if (is_global_var (new_var))
5243 break;
5244 /* For taskloop firstprivate/lastprivate, represented
5245 as firstprivate and shared clause on the task, new_var
5246 is the firstprivate var. */
5247 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5248 break;
5249 /* Set up the DECL_VALUE_EXPR for shared variables now. This
5250 needs to be delayed until after fixup_child_record_type so
5251 that we get the correct type during the dereference. */
5252 by_ref = use_pointer_for_field (var, ctx);
5253 x = build_receiver_ref (var, by_ref, ctx);
5254 SET_DECL_VALUE_EXPR (new_var, x);
5255 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5256
5257 /* ??? If VAR is not passed by reference, and the variable
5258 hasn't been initialized yet, then we'll get a warning for
5259 the store into the omp_data_s structure. Ideally, we'd be
5260 able to notice this and not store anything at all, but
5261 we're generating code too early. Suppress the warning. */
5262 if (!by_ref)
5263 TREE_NO_WARNING (var) = 1;
5264 break;
5265
5266 case OMP_CLAUSE__CONDTEMP_:
5267 if (is_parallel_ctx (ctx))
5268 {
5269 x = build_receiver_ref (var, false, ctx);
5270 SET_DECL_VALUE_EXPR (new_var, x);
5271 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5272 }
5273 else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5274 {
5275 x = build_zero_cst (TREE_TYPE (var));
5276 goto do_private;
5277 }
5278 break;
5279
5280 case OMP_CLAUSE_LASTPRIVATE:
5281 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5282 break;
5283 /* FALLTHRU */
5284
5285 case OMP_CLAUSE_PRIVATE:
5286 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5287 x = build_outer_var_ref (var, ctx);
5288 else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5289 {
5290 if (is_task_ctx (ctx))
5291 x = build_receiver_ref (var, false, ctx);
5292 else
5293 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5294 }
5295 else
5296 x = NULL;
5297 do_private:
5298 tree nx;
5299 bool copy_ctor;
5300 copy_ctor = false;
5301 nx = unshare_expr (new_var);
5302 if (is_simd
5303 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5304 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5305 copy_ctor = true;
5306 if (copy_ctor)
5307 nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5308 else
5309 nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5310 if (is_simd)
5311 {
5312 tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5313 if ((TREE_ADDRESSABLE (new_var) || nx || y
5314 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5315 && (gimple_omp_for_collapse (ctx->stmt) != 1
5316 || (gimple_omp_for_index (ctx->stmt, 0)
5317 != new_var)))
5318 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5319 || omp_is_reference (var))
5320 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5321 ivar, lvar))
5322 {
5323 if (omp_is_reference (var))
5324 {
5325 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5326 tree new_vard = TREE_OPERAND (new_var, 0);
5327 gcc_assert (DECL_P (new_vard));
5328 SET_DECL_VALUE_EXPR (new_vard,
5329 build_fold_addr_expr (lvar));
5330 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5331 }
5332
5333 if (nx)
5334 {
5335 tree iv = unshare_expr (ivar);
5336 if (copy_ctor)
5337 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5338 x);
5339 else
5340 x = lang_hooks.decls.omp_clause_default_ctor (c,
5341 iv,
5342 x);
5343 }
5344 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5345 {
5346 x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5347 unshare_expr (ivar), x);
5348 nx = x;
5349 }
5350 if (nx && x)
5351 gimplify_and_add (x, &llist[0]);
5352 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5353 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5354 {
5355 tree v = new_var;
5356 if (!DECL_P (v))
5357 {
5358 gcc_assert (TREE_CODE (v) == MEM_REF);
5359 v = TREE_OPERAND (v, 0);
5360 gcc_assert (DECL_P (v));
5361 }
5362 v = *ctx->lastprivate_conditional_map->get (v);
5363 tree t = create_tmp_var (TREE_TYPE (v));
5364 tree z = build_zero_cst (TREE_TYPE (v));
5365 tree orig_v
5366 = build_outer_var_ref (var, ctx,
5367 OMP_CLAUSE_LASTPRIVATE);
5368 gimple_seq_add_stmt (dlist,
5369 gimple_build_assign (t, z));
5370 gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5371 tree civar = DECL_VALUE_EXPR (v);
5372 gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5373 civar = unshare_expr (civar);
5374 TREE_OPERAND (civar, 1) = sctx.idx;
5375 x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5376 unshare_expr (civar));
5377 x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5378 build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5379 orig_v, unshare_expr (ivar)));
5380 tree cond = build2 (LT_EXPR, boolean_type_node, t,
5381 civar);
5382 x = build3 (COND_EXPR, void_type_node, cond, x,
5383 void_node);
5384 gimple_seq tseq = NULL;
5385 gimplify_and_add (x, &tseq);
5386 if (ctx->outer)
5387 lower_omp (&tseq, ctx->outer);
5388 gimple_seq_add_seq (&llist[1], tseq);
5389 }
5390 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5391 && ctx->for_simd_scan_phase)
5392 {
5393 x = unshare_expr (ivar);
5394 tree orig_v
5395 = build_outer_var_ref (var, ctx,
5396 OMP_CLAUSE_LASTPRIVATE);
5397 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5398 orig_v);
5399 gimplify_and_add (x, &llist[0]);
5400 }
5401 if (y)
5402 {
5403 y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5404 if (y)
5405 gimplify_and_add (y, &llist[1]);
5406 }
5407 break;
5408 }
5409 if (omp_is_reference (var))
5410 {
5411 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5412 tree new_vard = TREE_OPERAND (new_var, 0);
5413 gcc_assert (DECL_P (new_vard));
5414 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5415 x = TYPE_SIZE_UNIT (type);
5416 if (TREE_CONSTANT (x))
5417 {
5418 x = create_tmp_var_raw (type, get_name (var));
5419 gimple_add_tmp_var (x);
5420 TREE_ADDRESSABLE (x) = 1;
5421 x = build_fold_addr_expr_loc (clause_loc, x);
5422 x = fold_convert_loc (clause_loc,
5423 TREE_TYPE (new_vard), x);
5424 gimplify_assign (new_vard, x, ilist);
5425 }
5426 }
5427 }
5428 if (nx)
5429 gimplify_and_add (nx, ilist);
5430 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5431 && is_simd
5432 && ctx->for_simd_scan_phase)
5433 {
5434 tree orig_v = build_outer_var_ref (var, ctx,
5435 OMP_CLAUSE_LASTPRIVATE);
5436 x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5437 orig_v);
5438 gimplify_and_add (x, ilist);
5439 }
5440 /* FALLTHRU */
5441
5442 do_dtor:
5443 x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5444 if (x)
5445 gimplify_and_add (x, dlist);
5446 break;
5447
5448 case OMP_CLAUSE_LINEAR:
5449 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5450 goto do_firstprivate;
5451 if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5452 x = NULL;
5453 else
5454 x = build_outer_var_ref (var, ctx);
5455 goto do_private;
5456
5457 case OMP_CLAUSE_FIRSTPRIVATE:
5458 if (is_task_ctx (ctx))
5459 {
5460 if ((omp_is_reference (var)
5461 && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5462 || is_variable_sized (var))
5463 goto do_dtor;
5464 else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5465 ctx))
5466 || use_pointer_for_field (var, NULL))
5467 {
5468 x = build_receiver_ref (var, false, ctx);
5469 SET_DECL_VALUE_EXPR (new_var, x);
5470 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5471 goto do_dtor;
5472 }
5473 }
5474 if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5475 && omp_is_reference (var))
5476 {
5477 x = build_outer_var_ref (var, ctx);
5478 gcc_assert (TREE_CODE (x) == MEM_REF
5479 && integer_zerop (TREE_OPERAND (x, 1)));
5480 x = TREE_OPERAND (x, 0);
5481 x = lang_hooks.decls.omp_clause_copy_ctor
5482 (c, unshare_expr (new_var), x);
5483 gimplify_and_add (x, ilist);
5484 goto do_dtor;
5485 }
5486 do_firstprivate:
5487 x = build_outer_var_ref (var, ctx);
5488 if (is_simd)
5489 {
5490 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5491 && gimple_omp_for_combined_into_p (ctx->stmt))
5492 {
5493 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5494 tree stept = TREE_TYPE (t);
5495 tree ct = omp_find_clause (clauses,
5496 OMP_CLAUSE__LOOPTEMP_);
5497 gcc_assert (ct);
5498 tree l = OMP_CLAUSE_DECL (ct);
5499 tree n1 = fd->loop.n1;
5500 tree step = fd->loop.step;
5501 tree itype = TREE_TYPE (l);
5502 if (POINTER_TYPE_P (itype))
5503 itype = signed_type_for (itype);
5504 l = fold_build2 (MINUS_EXPR, itype, l, n1);
5505 if (TYPE_UNSIGNED (itype)
5506 && fd->loop.cond_code == GT_EXPR)
5507 l = fold_build2 (TRUNC_DIV_EXPR, itype,
5508 fold_build1 (NEGATE_EXPR, itype, l),
5509 fold_build1 (NEGATE_EXPR,
5510 itype, step));
5511 else
5512 l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5513 t = fold_build2 (MULT_EXPR, stept,
5514 fold_convert (stept, l), t);
5515
5516 if (OMP_CLAUSE_LINEAR_ARRAY (c))
5517 {
5518 if (omp_is_reference (var))
5519 {
5520 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5521 tree new_vard = TREE_OPERAND (new_var, 0);
5522 gcc_assert (DECL_P (new_vard));
5523 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5524 nx = TYPE_SIZE_UNIT (type);
5525 if (TREE_CONSTANT (nx))
5526 {
5527 nx = create_tmp_var_raw (type,
5528 get_name (var));
5529 gimple_add_tmp_var (nx);
5530 TREE_ADDRESSABLE (nx) = 1;
5531 nx = build_fold_addr_expr_loc (clause_loc,
5532 nx);
5533 nx = fold_convert_loc (clause_loc,
5534 TREE_TYPE (new_vard),
5535 nx);
5536 gimplify_assign (new_vard, nx, ilist);
5537 }
5538 }
5539
5540 x = lang_hooks.decls.omp_clause_linear_ctor
5541 (c, new_var, x, t);
5542 gimplify_and_add (x, ilist);
5543 goto do_dtor;
5544 }
5545
5546 if (POINTER_TYPE_P (TREE_TYPE (x)))
5547 x = fold_build2 (POINTER_PLUS_EXPR,
5548 TREE_TYPE (x), x, t);
5549 else
5550 x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5551 }
5552
5553 if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5554 || TREE_ADDRESSABLE (new_var)
5555 || omp_is_reference (var))
5556 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5557 ivar, lvar))
5558 {
5559 if (omp_is_reference (var))
5560 {
5561 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5562 tree new_vard = TREE_OPERAND (new_var, 0);
5563 gcc_assert (DECL_P (new_vard));
5564 SET_DECL_VALUE_EXPR (new_vard,
5565 build_fold_addr_expr (lvar));
5566 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5567 }
5568 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5569 {
5570 tree iv = create_tmp_var (TREE_TYPE (new_var));
5571 x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5572 gimplify_and_add (x, ilist);
5573 gimple_stmt_iterator gsi
5574 = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5575 gassign *g
5576 = gimple_build_assign (unshare_expr (lvar), iv);
5577 gsi_insert_before_without_update (&gsi, g,
5578 GSI_SAME_STMT);
5579 tree t = OMP_CLAUSE_LINEAR_STEP (c);
5580 enum tree_code code = PLUS_EXPR;
5581 if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5582 code = POINTER_PLUS_EXPR;
5583 g = gimple_build_assign (iv, code, iv, t);
5584 gsi_insert_before_without_update (&gsi, g,
5585 GSI_SAME_STMT);
5586 break;
5587 }
5588 x = lang_hooks.decls.omp_clause_copy_ctor
5589 (c, unshare_expr (ivar), x);
5590 gimplify_and_add (x, &llist[0]);
5591 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5592 if (x)
5593 gimplify_and_add (x, &llist[1]);
5594 break;
5595 }
5596 if (omp_is_reference (var))
5597 {
5598 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5599 tree new_vard = TREE_OPERAND (new_var, 0);
5600 gcc_assert (DECL_P (new_vard));
5601 tree type = TREE_TYPE (TREE_TYPE (new_vard));
5602 nx = TYPE_SIZE_UNIT (type);
5603 if (TREE_CONSTANT (nx))
5604 {
5605 nx = create_tmp_var_raw (type, get_name (var));
5606 gimple_add_tmp_var (nx);
5607 TREE_ADDRESSABLE (nx) = 1;
5608 nx = build_fold_addr_expr_loc (clause_loc, nx);
5609 nx = fold_convert_loc (clause_loc,
5610 TREE_TYPE (new_vard), nx);
5611 gimplify_assign (new_vard, nx, ilist);
5612 }
5613 }
5614 }
5615 x = lang_hooks.decls.omp_clause_copy_ctor
5616 (c, unshare_expr (new_var), x);
5617 gimplify_and_add (x, ilist);
5618 goto do_dtor;
5619
5620 case OMP_CLAUSE__LOOPTEMP_:
5621 case OMP_CLAUSE__REDUCTEMP_:
5622 gcc_assert (is_taskreg_ctx (ctx));
5623 x = build_outer_var_ref (var, ctx);
5624 x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5625 gimplify_and_add (x, ilist);
5626 break;
5627
5628 case OMP_CLAUSE_COPYIN:
5629 by_ref = use_pointer_for_field (var, NULL);
5630 x = build_receiver_ref (var, by_ref, ctx);
5631 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5632 append_to_statement_list (x, ©in_seq);
5633 copyin_by_ref |= by_ref;
5634 break;
5635
5636 case OMP_CLAUSE_REDUCTION:
5637 case OMP_CLAUSE_IN_REDUCTION:
5638 /* OpenACC reductions are initialized using the
5639 GOACC_REDUCTION internal function. */
5640 if (is_gimple_omp_oacc (ctx->stmt))
5641 break;
5642 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5643 {
5644 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5645 gimple *tseq;
5646 tree ptype = TREE_TYPE (placeholder);
5647 if (cond)
5648 {
5649 x = error_mark_node;
5650 if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5651 && !task_reduction_needs_orig_p)
5652 x = var;
5653 else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5654 {
5655 tree pptype = build_pointer_type (ptype);
5656 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5657 x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5658 size_int (task_reduction_cnt_full
5659 + task_reduction_cntorig - 1),
5660 NULL_TREE, NULL_TREE);
5661 else
5662 {
5663 unsigned int idx
5664 = *ctx->task_reduction_map->get (c);
5665 x = task_reduction_read (ilist, tskred_temp,
5666 pptype, 7 + 3 * idx);
5667 }
5668 x = fold_convert (pptype, x);
5669 x = build_simple_mem_ref (x);
5670 }
5671 }
5672 else
5673 {
5674 x = build_outer_var_ref (var, ctx);
5675
5676 if (omp_is_reference (var)
5677 && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5678 x = build_fold_addr_expr_loc (clause_loc, x);
5679 }
5680 SET_DECL_VALUE_EXPR (placeholder, x);
5681 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5682 tree new_vard = new_var;
5683 if (omp_is_reference (var))
5684 {
5685 gcc_assert (TREE_CODE (new_var) == MEM_REF);
5686 new_vard = TREE_OPERAND (new_var, 0);
5687 gcc_assert (DECL_P (new_vard));
5688 }
5689 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5690 if (is_simd
5691 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5692 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5693 rvarp = &rvar;
5694 if (is_simd
5695 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5696 ivar, lvar, rvarp,
5697 &rvar2))
5698 {
5699 if (new_vard == new_var)
5700 {
5701 gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5702 SET_DECL_VALUE_EXPR (new_var, ivar);
5703 }
5704 else
5705 {
5706 SET_DECL_VALUE_EXPR (new_vard,
5707 build_fold_addr_expr (ivar));
5708 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5709 }
5710 x = lang_hooks.decls.omp_clause_default_ctor
5711 (c, unshare_expr (ivar),
5712 build_outer_var_ref (var, ctx));
5713 if (rvarp && ctx->for_simd_scan_phase)
5714 {
5715 if (x)
5716 gimplify_and_add (x, &llist[0]);
5717 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5718 if (x)
5719 gimplify_and_add (x, &llist[1]);
5720 break;
5721 }
5722 else if (rvarp)
5723 {
5724 if (x)
5725 {
5726 gimplify_and_add (x, &llist[0]);
5727
5728 tree ivar2 = unshare_expr (lvar);
5729 TREE_OPERAND (ivar2, 1) = sctx.idx;
5730 x = lang_hooks.decls.omp_clause_default_ctor
5731 (c, ivar2, build_outer_var_ref (var, ctx));
5732 gimplify_and_add (x, &llist[0]);
5733
5734 if (rvar2)
5735 {
5736 x = lang_hooks.decls.omp_clause_default_ctor
5737 (c, unshare_expr (rvar2),
5738 build_outer_var_ref (var, ctx));
5739 gimplify_and_add (x, &llist[0]);
5740 }
5741
5742 /* For types that need construction, add another
5743 private var which will be default constructed
5744 and optionally initialized with
5745 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5746 loop we want to assign this value instead of
5747 constructing and destructing it in each
5748 iteration. */
5749 tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5750 gimple_add_tmp_var (nv);
5751 ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5752 ? rvar2
5753 : ivar, 0),
5754 nv);
5755 x = lang_hooks.decls.omp_clause_default_ctor
5756 (c, nv, build_outer_var_ref (var, ctx));
5757 gimplify_and_add (x, ilist);
5758
5759 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5760 {
5761 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5762 x = DECL_VALUE_EXPR (new_vard);
5763 tree vexpr = nv;
5764 if (new_vard != new_var)
5765 vexpr = build_fold_addr_expr (nv);
5766 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5767 lower_omp (&tseq, ctx);
5768 SET_DECL_VALUE_EXPR (new_vard, x);
5769 gimple_seq_add_seq (ilist, tseq);
5770 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5771 }
5772
5773 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5774 if (x)
5775 gimplify_and_add (x, dlist);
5776 }
5777
5778 tree ref = build_outer_var_ref (var, ctx);
5779 x = unshare_expr (ivar);
5780 x = lang_hooks.decls.omp_clause_assign_op (c, x,
5781 ref);
5782 gimplify_and_add (x, &llist[0]);
5783
5784 ref = build_outer_var_ref (var, ctx);
5785 x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5786 rvar);
5787 gimplify_and_add (x, &llist[3]);
5788
5789 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5790 if (new_vard == new_var)
5791 SET_DECL_VALUE_EXPR (new_var, lvar);
5792 else
5793 SET_DECL_VALUE_EXPR (new_vard,
5794 build_fold_addr_expr (lvar));
5795
5796 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5797 if (x)
5798 gimplify_and_add (x, &llist[1]);
5799
5800 tree ivar2 = unshare_expr (lvar);
5801 TREE_OPERAND (ivar2, 1) = sctx.idx;
5802 x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5803 if (x)
5804 gimplify_and_add (x, &llist[1]);
5805
5806 if (rvar2)
5807 {
5808 x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5809 if (x)
5810 gimplify_and_add (x, &llist[1]);
5811 }
5812 break;
5813 }
5814 if (x)
5815 gimplify_and_add (x, &llist[0]);
5816 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5817 {
5818 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5819 lower_omp (&tseq, ctx);
5820 gimple_seq_add_seq (&llist[0], tseq);
5821 }
5822 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5823 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5824 lower_omp (&tseq, ctx);
5825 gimple_seq_add_seq (&llist[1], tseq);
5826 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5827 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5828 if (new_vard == new_var)
5829 SET_DECL_VALUE_EXPR (new_var, lvar);
5830 else
5831 SET_DECL_VALUE_EXPR (new_vard,
5832 build_fold_addr_expr (lvar));
5833 x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5834 if (x)
5835 gimplify_and_add (x, &llist[1]);
5836 break;
5837 }
5838 /* If this is a reference to constant size reduction var
5839 with placeholder, we haven't emitted the initializer
5840 for it because it is undesirable if SIMD arrays are used.
5841 But if they aren't used, we need to emit the deferred
5842 initialization now. */
5843 else if (omp_is_reference (var) && is_simd)
5844 handle_simd_reference (clause_loc, new_vard, ilist);
5845
5846 tree lab2 = NULL_TREE;
5847 if (cond)
5848 {
5849 gimple *g;
5850 if (!is_parallel_ctx (ctx))
5851 {
5852 tree condv = create_tmp_var (boolean_type_node);
5853 tree m = build_simple_mem_ref (cond);
5854 g = gimple_build_assign (condv, m);
5855 gimple_seq_add_stmt (ilist, g);
5856 tree lab1
5857 = create_artificial_label (UNKNOWN_LOCATION);
5858 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5859 g = gimple_build_cond (NE_EXPR, condv,
5860 boolean_false_node,
5861 lab2, lab1);
5862 gimple_seq_add_stmt (ilist, g);
5863 gimple_seq_add_stmt (ilist,
5864 gimple_build_label (lab1));
5865 }
5866 g = gimple_build_assign (build_simple_mem_ref (cond),
5867 boolean_true_node);
5868 gimple_seq_add_stmt (ilist, g);
5869 }
5870 x = lang_hooks.decls.omp_clause_default_ctor
5871 (c, unshare_expr (new_var),
5872 cond ? NULL_TREE
5873 : build_outer_var_ref (var, ctx));
5874 if (x)
5875 gimplify_and_add (x, ilist);
5876
5877 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5878 && OMP_CLAUSE_REDUCTION_INSCAN (c))
5879 {
5880 if (ctx->for_simd_scan_phase)
5881 goto do_dtor;
5882 if (x || (!is_simd
5883 && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5884 {
5885 tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5886 gimple_add_tmp_var (nv);
5887 ctx->cb.decl_map->put (new_vard, nv);
5888 x = lang_hooks.decls.omp_clause_default_ctor
5889 (c, nv, build_outer_var_ref (var, ctx));
5890 if (x)
5891 gimplify_and_add (x, ilist);
5892 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5893 {
5894 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5895 tree vexpr = nv;
5896 if (new_vard != new_var)
5897 vexpr = build_fold_addr_expr (nv);
5898 SET_DECL_VALUE_EXPR (new_vard, vexpr);
5899 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5900 lower_omp (&tseq, ctx);
5901 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5902 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5903 gimple_seq_add_seq (ilist, tseq);
5904 }
5905 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5906 if (is_simd && ctx->scan_exclusive)
5907 {
5908 tree nv2
5909 = create_tmp_var_raw (TREE_TYPE (new_var));
5910 gimple_add_tmp_var (nv2);
5911 ctx->cb.decl_map->put (nv, nv2);
5912 x = lang_hooks.decls.omp_clause_default_ctor
5913 (c, nv2, build_outer_var_ref (var, ctx));
5914 gimplify_and_add (x, ilist);
5915 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5916 if (x)
5917 gimplify_and_add (x, dlist);
5918 }
5919 x = lang_hooks.decls.omp_clause_dtor (c, nv);
5920 if (x)
5921 gimplify_and_add (x, dlist);
5922 }
5923 else if (is_simd
5924 && ctx->scan_exclusive
5925 && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5926 {
5927 tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5928 gimple_add_tmp_var (nv2);
5929 ctx->cb.decl_map->put (new_vard, nv2);
5930 x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5931 if (x)
5932 gimplify_and_add (x, dlist);
5933 }
5934 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5935 goto do_dtor;
5936 }
5937
5938 if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5939 {
5940 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5941 lower_omp (&tseq, ctx);
5942 gimple_seq_add_seq (ilist, tseq);
5943 }
5944 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5945 if (is_simd)
5946 {
5947 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5948 lower_omp (&tseq, ctx);
5949 gimple_seq_add_seq (dlist, tseq);
5950 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5951 }
5952 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5953 if (cond)
5954 {
5955 if (lab2)
5956 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5957 break;
5958 }
5959 goto do_dtor;
5960 }
5961 else
5962 {
5963 x = omp_reduction_init (c, TREE_TYPE (new_var));
5964 gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5965 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5966
5967 if (cond)
5968 {
5969 gimple *g;
5970 tree lab2 = NULL_TREE;
5971 /* GOMP_taskgroup_reduction_register memsets the whole
5972 array to zero. If the initializer is zero, we don't
5973 need to initialize it again, just mark it as ever
5974 used unconditionally, i.e. cond = true. */
5975 if (initializer_zerop (x))
5976 {
5977 g = gimple_build_assign (build_simple_mem_ref (cond),
5978 boolean_true_node);
5979 gimple_seq_add_stmt (ilist, g);
5980 break;
5981 }
5982
5983 /* Otherwise, emit
5984 if (!cond) { cond = true; new_var = x; } */
5985 if (!is_parallel_ctx (ctx))
5986 {
5987 tree condv = create_tmp_var (boolean_type_node);
5988 tree m = build_simple_mem_ref (cond);
5989 g = gimple_build_assign (condv, m);
5990 gimple_seq_add_stmt (ilist, g);
5991 tree lab1
5992 = create_artificial_label (UNKNOWN_LOCATION);
5993 lab2 = create_artificial_label (UNKNOWN_LOCATION);
5994 g = gimple_build_cond (NE_EXPR, condv,
5995 boolean_false_node,
5996 lab2, lab1);
5997 gimple_seq_add_stmt (ilist, g);
5998 gimple_seq_add_stmt (ilist,
5999 gimple_build_label (lab1));
6000 }
6001 g = gimple_build_assign (build_simple_mem_ref (cond),
6002 boolean_true_node);
6003 gimple_seq_add_stmt (ilist, g);
6004 gimplify_assign (new_var, x, ilist);
6005 if (lab2)
6006 gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6007 break;
6008 }
6009
6010 /* reduction(-:var) sums up the partial results, so it
6011 acts identically to reduction(+:var). */
6012 if (code == MINUS_EXPR)
6013 code = PLUS_EXPR;
6014
6015 tree new_vard = new_var;
6016 if (is_simd && omp_is_reference (var))
6017 {
6018 gcc_assert (TREE_CODE (new_var) == MEM_REF);
6019 new_vard = TREE_OPERAND (new_var, 0);
6020 gcc_assert (DECL_P (new_vard));
6021 }
6022 tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6023 if (is_simd
6024 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6025 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6026 rvarp = &rvar;
6027 if (is_simd
6028 && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6029 ivar, lvar, rvarp,
6030 &rvar2))
6031 {
6032 if (new_vard != new_var)
6033 {
6034 SET_DECL_VALUE_EXPR (new_vard,
6035 build_fold_addr_expr (lvar));
6036 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6037 }
6038
6039 tree ref = build_outer_var_ref (var, ctx);
6040
6041 if (rvarp)
6042 {
6043 if (ctx->for_simd_scan_phase)
6044 break;
6045 gimplify_assign (ivar, ref, &llist[0]);
6046 ref = build_outer_var_ref (var, ctx);
6047 gimplify_assign (ref, rvar, &llist[3]);
6048 break;
6049 }
6050
6051 gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6052
6053 if (sctx.is_simt)
6054 {
6055 if (!simt_lane)
6056 simt_lane = create_tmp_var (unsigned_type_node);
6057 x = build_call_expr_internal_loc
6058 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6059 TREE_TYPE (ivar), 2, ivar, simt_lane);
6060 x = build2 (code, TREE_TYPE (ivar), ivar, x);
6061 gimplify_assign (ivar, x, &llist[2]);
6062 }
6063 x = build2 (code, TREE_TYPE (ref), ref, ivar);
6064 ref = build_outer_var_ref (var, ctx);
6065 gimplify_assign (ref, x, &llist[1]);
6066
6067 }
6068 else
6069 {
6070 if (omp_is_reference (var) && is_simd)
6071 handle_simd_reference (clause_loc, new_vard, ilist);
6072 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6073 && OMP_CLAUSE_REDUCTION_INSCAN (c))
6074 break;
6075 gimplify_assign (new_var, x, ilist);
6076 if (is_simd)
6077 {
6078 tree ref = build_outer_var_ref (var, ctx);
6079
6080 x = build2 (code, TREE_TYPE (ref), ref, new_var);
6081 ref = build_outer_var_ref (var, ctx);
6082 gimplify_assign (ref, x, dlist);
6083 }
6084 }
6085 }
6086 break;
6087
6088 default:
6089 gcc_unreachable ();
6090 }
6091 }
6092 }
6093 if (tskred_avar)
6094 {
6095 tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6096 gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6097 }
6098
6099 if (known_eq (sctx.max_vf, 1U))
6100 {
6101 sctx.is_simt = false;
6102 if (ctx->lastprivate_conditional_map)
6103 {
6104 if (gimple_omp_for_combined_into_p (ctx->stmt))
6105 {
6106 /* Signal to lower_omp_1 that it should use parent context. */
6107 ctx->combined_into_simd_safelen1 = true;
6108 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6109 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6110 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6111 {
6112 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6113 omp_context *outer = ctx->outer;
6114 if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6115 outer = outer->outer;
6116 tree *v = ctx->lastprivate_conditional_map->get (o);
6117 tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6118 tree *pv = outer->lastprivate_conditional_map->get (po);
6119 *v = *pv;
6120 }
6121 }
6122 else
6123 {
6124 /* When not vectorized, treat lastprivate(conditional:) like
6125 normal lastprivate, as there will be just one simd lane
6126 writing the privatized variable. */
6127 delete ctx->lastprivate_conditional_map;
6128 ctx->lastprivate_conditional_map = NULL;
6129 }
6130 }
6131 }
6132
6133 if (nonconst_simd_if)
6134 {
6135 if (sctx.lane == NULL_TREE)
6136 {
6137 sctx.idx = create_tmp_var (unsigned_type_node);
6138 sctx.lane = create_tmp_var (unsigned_type_node);
6139 }
6140 /* FIXME: For now. */
6141 sctx.is_simt = false;
6142 }
6143
6144 if (sctx.lane || sctx.is_simt)
6145 {
6146 uid = create_tmp_var (ptr_type_node, "simduid");
6147 /* Don't want uninit warnings on simduid, it is always uninitialized,
6148 but we use it not for the value, but for the DECL_UID only. */
6149 TREE_NO_WARNING (uid) = 1;
6150 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6151 OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6152 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6153 gimple_omp_for_set_clauses (ctx->stmt, c);
6154 }
6155 /* Emit calls denoting privatized variables and initializing a pointer to
6156 structure that holds private variables as fields after ompdevlow pass. */
6157 if (sctx.is_simt)
6158 {
6159 sctx.simt_eargs[0] = uid;
6160 gimple *g
6161 = gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6162 gimple_call_set_lhs (g, uid);
6163 gimple_seq_add_stmt (ilist, g);
6164 sctx.simt_eargs.release ();
6165
6166 simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6167 g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6168 gimple_call_set_lhs (g, simtrec);
6169 gimple_seq_add_stmt (ilist, g);
6170 }
6171 if (sctx.lane)
6172 {
6173 gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6174 2 + (nonconst_simd_if != NULL),
6175 uid, integer_zero_node,
6176 nonconst_simd_if);
6177 gimple_call_set_lhs (g, sctx.lane);
6178 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6179 gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6180 g = gimple_build_assign (sctx.lane, INTEGER_CST,
6181 build_int_cst (unsigned_type_node, 0));
6182 gimple_seq_add_stmt (ilist, g);
6183 if (sctx.lastlane)
6184 {
6185 g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6186 2, uid, sctx.lane);
6187 gimple_call_set_lhs (g, sctx.lastlane);
6188 gimple_seq_add_stmt (dlist, g);
6189 gimple_seq_add_seq (dlist, llist[3]);
6190 }
6191 /* Emit reductions across SIMT lanes in log_2(simt_vf) steps. */
6192 if (llist[2])
6193 {
6194 tree simt_vf = create_tmp_var (unsigned_type_node);
6195 g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6196 gimple_call_set_lhs (g, simt_vf);
6197 gimple_seq_add_stmt (dlist, g);
6198
6199 tree t = build_int_cst (unsigned_type_node, 1);
6200 g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6201 gimple_seq_add_stmt (dlist, g);
6202
6203 t = build_int_cst (unsigned_type_node, 0);
6204 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6205 gimple_seq_add_stmt (dlist, g);
6206
6207 tree body = create_artificial_label (UNKNOWN_LOCATION);
6208 tree header = create_artificial_label (UNKNOWN_LOCATION);
6209 tree end = create_artificial_label (UNKNOWN_LOCATION);
6210 gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6211 gimple_seq_add_stmt (dlist, gimple_build_label (body));
6212
6213 gimple_seq_add_seq (dlist, llist[2]);
6214
6215 g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6216 gimple_seq_add_stmt (dlist, g);
6217
6218 gimple_seq_add_stmt (dlist, gimple_build_label (header));
6219 g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6220 gimple_seq_add_stmt (dlist, g);
6221
6222 gimple_seq_add_stmt (dlist, gimple_build_label (end));
6223 }
6224 for (int i = 0; i < 2; i++)
6225 if (llist[i])
6226 {
6227 tree vf = create_tmp_var (unsigned_type_node);
6228 g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6229 gimple_call_set_lhs (g, vf);
6230 gimple_seq *seq = i == 0 ? ilist : dlist;
6231 gimple_seq_add_stmt (seq, g);
6232 tree t = build_int_cst (unsigned_type_node, 0);
6233 g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6234 gimple_seq_add_stmt (seq, g);
6235 tree body = create_artificial_label (UNKNOWN_LOCATION);
6236 tree header = create_artificial_label (UNKNOWN_LOCATION);
6237 tree end = create_artificial_label (UNKNOWN_LOCATION);
6238 gimple_seq_add_stmt (seq, gimple_build_goto (header));
6239 gimple_seq_add_stmt (seq, gimple_build_label (body));
6240 gimple_seq_add_seq (seq, llist[i]);
6241 t = build_int_cst (unsigned_type_node, 1);
6242 g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6243 gimple_seq_add_stmt (seq, g);
6244 gimple_seq_add_stmt (seq, gimple_build_label (header));
6245 g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6246 gimple_seq_add_stmt (seq, g);
6247 gimple_seq_add_stmt (seq, gimple_build_label (end));
6248 }
6249 }
6250 if (sctx.is_simt)
6251 {
6252 gimple_seq_add_seq (dlist, sctx.simt_dlist);
6253 gimple *g
6254 = gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6255 gimple_seq_add_stmt (dlist, g);
6256 }
6257
6258 /* The copyin sequence is not to be executed by the main thread, since
6259 that would result in self-copies. Perhaps not visible to scalars,
6260 but it certainly is to C++ operator=. */
6261 if (copyin_seq)
6262 {
6263 x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6264 0);
6265 x = build2 (NE_EXPR, boolean_type_node, x,
6266 build_int_cst (TREE_TYPE (x), 0));
6267 x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6268 gimplify_and_add (x, ilist);
6269 }
6270
6271 /* If any copyin variable is passed by reference, we must ensure the
6272 master thread doesn't modify it before it is copied over in all
6273 threads. Similarly for variables in both firstprivate and
6274 lastprivate clauses we need to ensure the lastprivate copying
6275 happens after firstprivate copying in all threads. And similarly
6276 for UDRs if initializer expression refers to omp_orig. */
6277 if (copyin_by_ref || lastprivate_firstprivate
6278 || (reduction_omp_orig_ref
6279 && !ctx->scan_inclusive
6280 && !ctx->scan_exclusive))
6281 {
6282 /* Don't add any barrier for #pragma omp simd or
6283 #pragma omp distribute. */
6284 if (!is_task_ctx (ctx)
6285 && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6286 || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6287 gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6288 }
6289
6290 /* If max_vf is non-zero, then we can use only a vectorization factor
6291 up to the max_vf we chose. So stick it into the safelen clause. */
6292 if (maybe_ne (sctx.max_vf, 0U))
6293 {
6294 tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6295 OMP_CLAUSE_SAFELEN);
6296 poly_uint64 safe_len;
6297 if (c == NULL_TREE
6298 || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6299 && maybe_gt (safe_len, sctx.max_vf)))
6300 {
6301 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6302 OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6303 sctx.max_vf);
6304 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6305 gimple_omp_for_set_clauses (ctx->stmt, c);
6306 }
6307 }
6308 }
6309
6310 /* Create temporary variables for lastprivate(conditional:) implementation
6311 in context CTX with CLAUSES. */
6312
6313 static void
lower_lastprivate_conditional_clauses(tree * clauses,omp_context * ctx)6314 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6315 {
6316 tree iter_type = NULL_TREE;
6317 tree cond_ptr = NULL_TREE;
6318 tree iter_var = NULL_TREE;
6319 bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6320 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6321 tree next = *clauses;
6322 for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6323 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6324 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6325 {
6326 if (is_simd)
6327 {
6328 tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6329 gcc_assert (cc);
6330 if (iter_type == NULL_TREE)
6331 {
6332 iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6333 iter_var = create_tmp_var_raw (iter_type);
6334 DECL_CONTEXT (iter_var) = current_function_decl;
6335 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6336 DECL_CHAIN (iter_var) = ctx->block_vars;
6337 ctx->block_vars = iter_var;
6338 tree c3
6339 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6340 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6341 OMP_CLAUSE_DECL (c3) = iter_var;
6342 OMP_CLAUSE_CHAIN (c3) = *clauses;
6343 *clauses = c3;
6344 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6345 }
6346 next = OMP_CLAUSE_CHAIN (cc);
6347 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6348 tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6349 ctx->lastprivate_conditional_map->put (o, v);
6350 continue;
6351 }
6352 if (iter_type == NULL)
6353 {
6354 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6355 {
6356 struct omp_for_data fd;
6357 omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6358 NULL);
6359 iter_type = unsigned_type_for (fd.iter_type);
6360 }
6361 else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6362 iter_type = unsigned_type_node;
6363 tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6364 if (c2)
6365 {
6366 cond_ptr
6367 = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6368 OMP_CLAUSE_DECL (c2) = cond_ptr;
6369 }
6370 else
6371 {
6372 cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6373 DECL_CONTEXT (cond_ptr) = current_function_decl;
6374 DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6375 DECL_CHAIN (cond_ptr) = ctx->block_vars;
6376 ctx->block_vars = cond_ptr;
6377 c2 = build_omp_clause (UNKNOWN_LOCATION,
6378 OMP_CLAUSE__CONDTEMP_);
6379 OMP_CLAUSE_DECL (c2) = cond_ptr;
6380 OMP_CLAUSE_CHAIN (c2) = *clauses;
6381 *clauses = c2;
6382 }
6383 iter_var = create_tmp_var_raw (iter_type);
6384 DECL_CONTEXT (iter_var) = current_function_decl;
6385 DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6386 DECL_CHAIN (iter_var) = ctx->block_vars;
6387 ctx->block_vars = iter_var;
6388 tree c3
6389 = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6390 OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6391 OMP_CLAUSE_DECL (c3) = iter_var;
6392 OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6393 OMP_CLAUSE_CHAIN (c2) = c3;
6394 ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6395 }
6396 tree v = create_tmp_var_raw (iter_type);
6397 DECL_CONTEXT (v) = current_function_decl;
6398 DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6399 DECL_CHAIN (v) = ctx->block_vars;
6400 ctx->block_vars = v;
6401 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6402 ctx->lastprivate_conditional_map->put (o, v);
6403 }
6404 }
6405
6406
6407 /* Generate code to implement the LASTPRIVATE clauses. This is used for
6408 both parallel and workshare constructs. PREDICATE may be NULL if it's
6409 always true. BODY_P is the sequence to insert early initialization
6410 if needed, STMT_LIST is where the non-conditional lastprivate handling
6411 goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6412 section. */
6413
6414 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * body_p,gimple_seq * stmt_list,gimple_seq * cstmt_list,omp_context * ctx)6415 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6416 gimple_seq *stmt_list, gimple_seq *cstmt_list,
6417 omp_context *ctx)
6418 {
6419 tree x, c, label = NULL, orig_clauses = clauses;
6420 bool par_clauses = false;
6421 tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6422 unsigned HOST_WIDE_INT conditional_off = 0;
6423 gimple_seq post_stmt_list = NULL;
6424
6425 /* Early exit if there are no lastprivate or linear clauses. */
6426 for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6427 if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6428 || (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6429 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6430 break;
6431 if (clauses == NULL)
6432 {
6433 /* If this was a workshare clause, see if it had been combined
6434 with its parallel. In that case, look for the clauses on the
6435 parallel statement itself. */
6436 if (is_parallel_ctx (ctx))
6437 return;
6438
6439 ctx = ctx->outer;
6440 if (ctx == NULL || !is_parallel_ctx (ctx))
6441 return;
6442
6443 clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6444 OMP_CLAUSE_LASTPRIVATE);
6445 if (clauses == NULL)
6446 return;
6447 par_clauses = true;
6448 }
6449
6450 bool maybe_simt = false;
6451 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6452 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6453 {
6454 maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6455 simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6456 if (simduid)
6457 simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6458 }
6459
6460 if (predicate)
6461 {
6462 gcond *stmt;
6463 tree label_true, arm1, arm2;
6464 enum tree_code pred_code = TREE_CODE (predicate);
6465
6466 label = create_artificial_label (UNKNOWN_LOCATION);
6467 label_true = create_artificial_label (UNKNOWN_LOCATION);
6468 if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6469 {
6470 arm1 = TREE_OPERAND (predicate, 0);
6471 arm2 = TREE_OPERAND (predicate, 1);
6472 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6473 gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6474 }
6475 else
6476 {
6477 arm1 = predicate;
6478 gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6479 arm2 = boolean_false_node;
6480 pred_code = NE_EXPR;
6481 }
6482 if (maybe_simt)
6483 {
6484 c = build2 (pred_code, boolean_type_node, arm1, arm2);
6485 c = fold_convert (integer_type_node, c);
6486 simtcond = create_tmp_var (integer_type_node);
6487 gimplify_assign (simtcond, c, stmt_list);
6488 gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6489 1, simtcond);
6490 c = create_tmp_var (integer_type_node);
6491 gimple_call_set_lhs (g, c);
6492 gimple_seq_add_stmt (stmt_list, g);
6493 stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6494 label_true, label);
6495 }
6496 else
6497 stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6498 gimple_seq_add_stmt (stmt_list, stmt);
6499 gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6500 }
6501
6502 tree cond_ptr = NULL_TREE;
6503 for (c = clauses; c ;)
6504 {
6505 tree var, new_var;
6506 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6507 gimple_seq *this_stmt_list = stmt_list;
6508 tree lab2 = NULL_TREE;
6509
6510 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6511 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6512 && ctx->lastprivate_conditional_map
6513 && !ctx->combined_into_simd_safelen1)
6514 {
6515 gcc_assert (body_p);
6516 if (simduid)
6517 goto next;
6518 if (cond_ptr == NULL_TREE)
6519 {
6520 cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6521 cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6522 }
6523 tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6524 tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6525 tree v = *ctx->lastprivate_conditional_map->get (o);
6526 gimplify_assign (v, build_zero_cst (type), body_p);
6527 this_stmt_list = cstmt_list;
6528 tree mem;
6529 if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6530 {
6531 mem = build2 (MEM_REF, type, cond_ptr,
6532 build_int_cst (TREE_TYPE (cond_ptr),
6533 conditional_off));
6534 conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6535 }
6536 else
6537 mem = build4 (ARRAY_REF, type, cond_ptr,
6538 size_int (conditional_off++), NULL_TREE, NULL_TREE);
6539 tree mem2 = copy_node (mem);
6540 gimple_seq seq = NULL;
6541 mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6542 gimple_seq_add_seq (this_stmt_list, seq);
6543 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6544 lab2 = create_artificial_label (UNKNOWN_LOCATION);
6545 gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6546 gimple_seq_add_stmt (this_stmt_list, g);
6547 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6548 gimplify_assign (mem2, v, this_stmt_list);
6549 }
6550 else if (predicate
6551 && ctx->combined_into_simd_safelen1
6552 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6553 && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6554 && ctx->lastprivate_conditional_map)
6555 this_stmt_list = &post_stmt_list;
6556
6557 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6558 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6559 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6560 {
6561 var = OMP_CLAUSE_DECL (c);
6562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6563 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6564 && is_taskloop_ctx (ctx))
6565 {
6566 gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6567 new_var = lookup_decl (var, ctx->outer);
6568 }
6569 else
6570 {
6571 new_var = lookup_decl (var, ctx);
6572 /* Avoid uninitialized warnings for lastprivate and
6573 for linear iterators. */
6574 if (predicate
6575 && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6576 || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6577 TREE_NO_WARNING (new_var) = 1;
6578 }
6579
6580 if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6581 {
6582 tree val = DECL_VALUE_EXPR (new_var);
6583 if (TREE_CODE (val) == ARRAY_REF
6584 && VAR_P (TREE_OPERAND (val, 0))
6585 && lookup_attribute ("omp simd array",
6586 DECL_ATTRIBUTES (TREE_OPERAND (val,
6587 0))))
6588 {
6589 if (lastlane == NULL)
6590 {
6591 lastlane = create_tmp_var (unsigned_type_node);
6592 gcall *g
6593 = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6594 2, simduid,
6595 TREE_OPERAND (val, 1));
6596 gimple_call_set_lhs (g, lastlane);
6597 gimple_seq_add_stmt (this_stmt_list, g);
6598 }
6599 new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6600 TREE_OPERAND (val, 0), lastlane,
6601 NULL_TREE, NULL_TREE);
6602 TREE_THIS_NOTRAP (new_var) = 1;
6603 }
6604 }
6605 else if (maybe_simt)
6606 {
6607 tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6608 ? DECL_VALUE_EXPR (new_var)
6609 : new_var);
6610 if (simtlast == NULL)
6611 {
6612 simtlast = create_tmp_var (unsigned_type_node);
6613 gcall *g = gimple_build_call_internal
6614 (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6615 gimple_call_set_lhs (g, simtlast);
6616 gimple_seq_add_stmt (this_stmt_list, g);
6617 }
6618 x = build_call_expr_internal_loc
6619 (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6620 TREE_TYPE (val), 2, val, simtlast);
6621 new_var = unshare_expr (new_var);
6622 gimplify_assign (new_var, x, this_stmt_list);
6623 new_var = unshare_expr (new_var);
6624 }
6625
6626 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6627 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6628 {
6629 lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6630 gimple_seq_add_seq (this_stmt_list,
6631 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6632 OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6633 }
6634 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6635 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6636 {
6637 lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6638 gimple_seq_add_seq (this_stmt_list,
6639 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6640 OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6641 }
6642
6643 x = NULL_TREE;
6644 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6645 && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6646 && is_taskloop_ctx (ctx))
6647 {
6648 tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6649 ctx->outer->outer);
6650 if (is_global_var (ovar))
6651 x = ovar;
6652 }
6653 if (!x)
6654 x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6655 if (omp_is_reference (var))
6656 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6657 x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6658 gimplify_and_add (x, this_stmt_list);
6659
6660 if (lab2)
6661 gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6662 }
6663
6664 next:
6665 c = OMP_CLAUSE_CHAIN (c);
6666 if (c == NULL && !par_clauses)
6667 {
6668 /* If this was a workshare clause, see if it had been combined
6669 with its parallel. In that case, continue looking for the
6670 clauses also on the parallel statement itself. */
6671 if (is_parallel_ctx (ctx))
6672 break;
6673
6674 ctx = ctx->outer;
6675 if (ctx == NULL || !is_parallel_ctx (ctx))
6676 break;
6677
6678 c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6679 OMP_CLAUSE_LASTPRIVATE);
6680 par_clauses = true;
6681 }
6682 }
6683
6684 if (label)
6685 gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6686 gimple_seq_add_seq (stmt_list, post_stmt_list);
6687 }
6688
6689 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6690 (which might be a placeholder). INNER is true if this is an inner
6691 axis of a multi-axis loop. FORK and JOIN are (optional) fork and
6692 join markers. Generate the before-loop forking sequence in
6693 FORK_SEQ and the after-loop joining sequence to JOIN_SEQ. The
6694 general form of these sequences is
6695
6696 GOACC_REDUCTION_SETUP
6697 GOACC_FORK
6698 GOACC_REDUCTION_INIT
6699 ...
6700 GOACC_REDUCTION_FINI
6701 GOACC_JOIN
6702 GOACC_REDUCTION_TEARDOWN. */
6703
6704 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)6705 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6706 gcall *fork, gcall *join, gimple_seq *fork_seq,
6707 gimple_seq *join_seq, omp_context *ctx)
6708 {
6709 gimple_seq before_fork = NULL;
6710 gimple_seq after_fork = NULL;
6711 gimple_seq before_join = NULL;
6712 gimple_seq after_join = NULL;
6713 tree init_code = NULL_TREE, fini_code = NULL_TREE,
6714 setup_code = NULL_TREE, teardown_code = NULL_TREE;
6715 unsigned offset = 0;
6716
6717 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6718 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6719 {
6720 /* No 'reduction' clauses on OpenACC 'kernels'. */
6721 gcc_checking_assert (!is_oacc_kernels (ctx));
6722
6723 tree orig = OMP_CLAUSE_DECL (c);
6724 tree var = maybe_lookup_decl (orig, ctx);
6725 tree ref_to_res = NULL_TREE;
6726 tree incoming, outgoing, v1, v2, v3;
6727 bool is_private = false;
6728
6729 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6730 if (rcode == MINUS_EXPR)
6731 rcode = PLUS_EXPR;
6732 else if (rcode == TRUTH_ANDIF_EXPR)
6733 rcode = BIT_AND_EXPR;
6734 else if (rcode == TRUTH_ORIF_EXPR)
6735 rcode = BIT_IOR_EXPR;
6736 tree op = build_int_cst (unsigned_type_node, rcode);
6737
6738 if (!var)
6739 var = orig;
6740
6741 incoming = outgoing = var;
6742
6743 if (!inner)
6744 {
6745 /* See if an outer construct also reduces this variable. */
6746 omp_context *outer = ctx;
6747
6748 while (omp_context *probe = outer->outer)
6749 {
6750 enum gimple_code type = gimple_code (probe->stmt);
6751 tree cls;
6752
6753 switch (type)
6754 {
6755 case GIMPLE_OMP_FOR:
6756 cls = gimple_omp_for_clauses (probe->stmt);
6757 break;
6758
6759 case GIMPLE_OMP_TARGET:
6760 /* No 'reduction' clauses inside OpenACC 'kernels'
6761 regions. */
6762 gcc_checking_assert (!is_oacc_kernels (probe));
6763
6764 if (!is_gimple_omp_offloaded (probe->stmt))
6765 goto do_lookup;
6766
6767 cls = gimple_omp_target_clauses (probe->stmt);
6768 break;
6769
6770 default:
6771 goto do_lookup;
6772 }
6773
6774 outer = probe;
6775 for (; cls; cls = OMP_CLAUSE_CHAIN (cls))
6776 if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6777 && orig == OMP_CLAUSE_DECL (cls))
6778 {
6779 incoming = outgoing = lookup_decl (orig, probe);
6780 goto has_outer_reduction;
6781 }
6782 else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6783 || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6784 && orig == OMP_CLAUSE_DECL (cls))
6785 {
6786 is_private = true;
6787 goto do_lookup;
6788 }
6789 }
6790
6791 do_lookup:
6792 /* This is the outermost construct with this reduction,
6793 see if there's a mapping for it. */
6794 if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6795 && maybe_lookup_field (orig, outer) && !is_private)
6796 {
6797 ref_to_res = build_receiver_ref (orig, false, outer);
6798 if (omp_is_reference (orig))
6799 ref_to_res = build_simple_mem_ref (ref_to_res);
6800
6801 tree type = TREE_TYPE (var);
6802 if (POINTER_TYPE_P (type))
6803 type = TREE_TYPE (type);
6804
6805 outgoing = var;
6806 incoming = omp_reduction_init_op (loc, rcode, type);
6807 }
6808 else
6809 {
6810 /* Try to look at enclosing contexts for reduction var,
6811 use original if no mapping found. */
6812 tree t = NULL_TREE;
6813 omp_context *c = ctx->outer;
6814 while (c && !t)
6815 {
6816 t = maybe_lookup_decl (orig, c);
6817 c = c->outer;
6818 }
6819 incoming = outgoing = (t ? t : orig);
6820 }
6821
6822 has_outer_reduction:;
6823 }
6824
6825 if (!ref_to_res)
6826 ref_to_res = integer_zero_node;
6827
6828 if (omp_is_reference (orig))
6829 {
6830 tree type = TREE_TYPE (var);
6831 const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6832
6833 if (!inner)
6834 {
6835 tree x = create_tmp_var (TREE_TYPE (type), id);
6836 gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6837 }
6838
6839 v1 = create_tmp_var (type, id);
6840 v2 = create_tmp_var (type, id);
6841 v3 = create_tmp_var (type, id);
6842
6843 gimplify_assign (v1, var, fork_seq);
6844 gimplify_assign (v2, var, fork_seq);
6845 gimplify_assign (v3, var, fork_seq);
6846
6847 var = build_simple_mem_ref (var);
6848 v1 = build_simple_mem_ref (v1);
6849 v2 = build_simple_mem_ref (v2);
6850 v3 = build_simple_mem_ref (v3);
6851 outgoing = build_simple_mem_ref (outgoing);
6852
6853 if (!TREE_CONSTANT (incoming))
6854 incoming = build_simple_mem_ref (incoming);
6855 }
6856 else
6857 v1 = v2 = v3 = var;
6858
6859 /* Determine position in reduction buffer, which may be used
6860 by target. The parser has ensured that this is not a
6861 variable-sized type. */
6862 fixed_size_mode mode
6863 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6864 unsigned align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6865 offset = (offset + align - 1) & ~(align - 1);
6866 tree off = build_int_cst (sizetype, offset);
6867 offset += GET_MODE_SIZE (mode);
6868
6869 if (!init_code)
6870 {
6871 init_code = build_int_cst (integer_type_node,
6872 IFN_GOACC_REDUCTION_INIT);
6873 fini_code = build_int_cst (integer_type_node,
6874 IFN_GOACC_REDUCTION_FINI);
6875 setup_code = build_int_cst (integer_type_node,
6876 IFN_GOACC_REDUCTION_SETUP);
6877 teardown_code = build_int_cst (integer_type_node,
6878 IFN_GOACC_REDUCTION_TEARDOWN);
6879 }
6880
6881 tree setup_call
6882 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6883 TREE_TYPE (var), 6, setup_code,
6884 unshare_expr (ref_to_res),
6885 incoming, level, op, off);
6886 tree init_call
6887 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6888 TREE_TYPE (var), 6, init_code,
6889 unshare_expr (ref_to_res),
6890 v1, level, op, off);
6891 tree fini_call
6892 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6893 TREE_TYPE (var), 6, fini_code,
6894 unshare_expr (ref_to_res),
6895 v2, level, op, off);
6896 tree teardown_call
6897 = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6898 TREE_TYPE (var), 6, teardown_code,
6899 ref_to_res, v3, level, op, off);
6900
6901 gimplify_assign (v1, setup_call, &before_fork);
6902 gimplify_assign (v2, init_call, &after_fork);
6903 gimplify_assign (v3, fini_call, &before_join);
6904 gimplify_assign (outgoing, teardown_call, &after_join);
6905 }
6906
6907 /* Now stitch things together. */
6908 gimple_seq_add_seq (fork_seq, before_fork);
6909 if (fork)
6910 gimple_seq_add_stmt (fork_seq, fork);
6911 gimple_seq_add_seq (fork_seq, after_fork);
6912
6913 gimple_seq_add_seq (join_seq, before_join);
6914 if (join)
6915 gimple_seq_add_stmt (join_seq, join);
6916 gimple_seq_add_seq (join_seq, after_join);
6917 }
6918
6919 /* Generate code to implement the REDUCTION clauses, append it
6920 to STMT_SEQP. CLIST if non-NULL is a pointer to a sequence
6921 that should be emitted also inside of the critical section,
6922 in that case clear *CLIST afterwards, otherwise leave it as is
6923 and let the caller emit it itself. */
6924
6925 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,gimple_seq * clist,omp_context * ctx)6926 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6927 gimple_seq *clist, omp_context *ctx)
6928 {
6929 gimple_seq sub_seq = NULL;
6930 gimple *stmt;
6931 tree x, c;
6932 int count = 0;
6933
6934 /* OpenACC loop reductions are handled elsewhere. */
6935 if (is_gimple_omp_oacc (ctx->stmt))
6936 return;
6937
6938 /* SIMD reductions are handled in lower_rec_input_clauses. */
6939 if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6940 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6941 return;
6942
6943 /* inscan reductions are handled elsewhere. */
6944 if (ctx->scan_inclusive || ctx->scan_exclusive)
6945 return;
6946
6947 /* First see if there is exactly one reduction clause. Use OMP_ATOMIC
6948 update in that case, otherwise use a lock. */
6949 for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6950 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6951 && !OMP_CLAUSE_REDUCTION_TASK (c))
6952 {
6953 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6954 || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6955 {
6956 /* Never use OMP_ATOMIC for array reductions or UDRs. */
6957 count = -1;
6958 break;
6959 }
6960 count++;
6961 }
6962
6963 if (count == 0)
6964 return;
6965
6966 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6967 {
6968 tree var, ref, new_var, orig_var;
6969 enum tree_code code;
6970 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6971
6972 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6973 || OMP_CLAUSE_REDUCTION_TASK (c))
6974 continue;
6975
6976 enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
6977 orig_var = var = OMP_CLAUSE_DECL (c);
6978 if (TREE_CODE (var) == MEM_REF)
6979 {
6980 var = TREE_OPERAND (var, 0);
6981 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
6982 var = TREE_OPERAND (var, 0);
6983 if (TREE_CODE (var) == ADDR_EXPR)
6984 var = TREE_OPERAND (var, 0);
6985 else
6986 {
6987 /* If this is a pointer or referenced based array
6988 section, the var could be private in the outer
6989 context e.g. on orphaned loop construct. Pretend this
6990 is private variable's outer reference. */
6991 ccode = OMP_CLAUSE_PRIVATE;
6992 if (TREE_CODE (var) == INDIRECT_REF)
6993 var = TREE_OPERAND (var, 0);
6994 }
6995 orig_var = var;
6996 if (is_variable_sized (var))
6997 {
6998 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
6999 var = DECL_VALUE_EXPR (var);
7000 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7001 var = TREE_OPERAND (var, 0);
7002 gcc_assert (DECL_P (var));
7003 }
7004 }
7005 new_var = lookup_decl (var, ctx);
7006 if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7007 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7008 ref = build_outer_var_ref (var, ctx, ccode);
7009 code = OMP_CLAUSE_REDUCTION_CODE (c);
7010
7011 /* reduction(-:var) sums up the partial results, so it acts
7012 identically to reduction(+:var). */
7013 if (code == MINUS_EXPR)
7014 code = PLUS_EXPR;
7015
7016 if (count == 1)
7017 {
7018 tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7019
7020 addr = save_expr (addr);
7021 ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7022 x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
7023 x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7024 OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7025 gimplify_and_add (x, stmt_seqp);
7026 return;
7027 }
7028 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7029 {
7030 tree d = OMP_CLAUSE_DECL (c);
7031 tree type = TREE_TYPE (d);
7032 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7033 tree i = create_tmp_var (TREE_TYPE (v));
7034 tree ptype = build_pointer_type (TREE_TYPE (type));
7035 tree bias = TREE_OPERAND (d, 1);
7036 d = TREE_OPERAND (d, 0);
7037 if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7038 {
7039 tree b = TREE_OPERAND (d, 1);
7040 b = maybe_lookup_decl (b, ctx);
7041 if (b == NULL)
7042 {
7043 b = TREE_OPERAND (d, 1);
7044 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7045 }
7046 if (integer_zerop (bias))
7047 bias = b;
7048 else
7049 {
7050 bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7051 bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7052 TREE_TYPE (b), b, bias);
7053 }
7054 d = TREE_OPERAND (d, 0);
7055 }
7056 /* For ref build_outer_var_ref already performs this, so
7057 only new_var needs a dereference. */
7058 if (TREE_CODE (d) == INDIRECT_REF)
7059 {
7060 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7061 gcc_assert (omp_is_reference (var) && var == orig_var);
7062 }
7063 else if (TREE_CODE (d) == ADDR_EXPR)
7064 {
7065 if (orig_var == var)
7066 {
7067 new_var = build_fold_addr_expr (new_var);
7068 ref = build_fold_addr_expr (ref);
7069 }
7070 }
7071 else
7072 {
7073 gcc_assert (orig_var == var);
7074 if (omp_is_reference (var))
7075 ref = build_fold_addr_expr (ref);
7076 }
7077 if (DECL_P (v))
7078 {
7079 tree t = maybe_lookup_decl (v, ctx);
7080 if (t)
7081 v = t;
7082 else
7083 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7084 gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7085 }
7086 if (!integer_zerop (bias))
7087 {
7088 bias = fold_convert_loc (clause_loc, sizetype, bias);
7089 new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7090 TREE_TYPE (new_var), new_var,
7091 unshare_expr (bias));
7092 ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7093 TREE_TYPE (ref), ref, bias);
7094 }
7095 new_var = fold_convert_loc (clause_loc, ptype, new_var);
7096 ref = fold_convert_loc (clause_loc, ptype, ref);
7097 tree m = create_tmp_var (ptype);
7098 gimplify_assign (m, new_var, stmt_seqp);
7099 new_var = m;
7100 m = create_tmp_var (ptype);
7101 gimplify_assign (m, ref, stmt_seqp);
7102 ref = m;
7103 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7104 tree body = create_artificial_label (UNKNOWN_LOCATION);
7105 tree end = create_artificial_label (UNKNOWN_LOCATION);
7106 gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7107 tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7108 tree out = build_simple_mem_ref_loc (clause_loc, ref);
7109 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7110 {
7111 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7112 tree decl_placeholder
7113 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7114 SET_DECL_VALUE_EXPR (placeholder, out);
7115 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7116 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7117 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7118 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7119 gimple_seq_add_seq (&sub_seq,
7120 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7121 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7122 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7123 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7124 }
7125 else
7126 {
7127 x = build2 (code, TREE_TYPE (out), out, priv);
7128 out = unshare_expr (out);
7129 gimplify_assign (out, x, &sub_seq);
7130 }
7131 gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7132 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7133 gimple_seq_add_stmt (&sub_seq, g);
7134 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7135 TYPE_SIZE_UNIT (TREE_TYPE (type)));
7136 gimple_seq_add_stmt (&sub_seq, g);
7137 g = gimple_build_assign (i, PLUS_EXPR, i,
7138 build_int_cst (TREE_TYPE (i), 1));
7139 gimple_seq_add_stmt (&sub_seq, g);
7140 g = gimple_build_cond (LE_EXPR, i, v, body, end);
7141 gimple_seq_add_stmt (&sub_seq, g);
7142 gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7143 }
7144 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7145 {
7146 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7147
7148 if (omp_is_reference (var)
7149 && !useless_type_conversion_p (TREE_TYPE (placeholder),
7150 TREE_TYPE (ref)))
7151 ref = build_fold_addr_expr_loc (clause_loc, ref);
7152 SET_DECL_VALUE_EXPR (placeholder, ref);
7153 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7154 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7155 gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7156 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7157 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7158 }
7159 else
7160 {
7161 x = build2 (code, TREE_TYPE (ref), ref, new_var);
7162 ref = build_outer_var_ref (var, ctx);
7163 gimplify_assign (ref, x, &sub_seq);
7164 }
7165 }
7166
7167 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7168 0);
7169 gimple_seq_add_stmt (stmt_seqp, stmt);
7170
7171 gimple_seq_add_seq (stmt_seqp, sub_seq);
7172
7173 if (clist)
7174 {
7175 gimple_seq_add_seq (stmt_seqp, *clist);
7176 *clist = NULL;
7177 }
7178
7179 stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7180 0);
7181 gimple_seq_add_stmt (stmt_seqp, stmt);
7182 }
7183
7184
7185 /* Generate code to implement the COPYPRIVATE clauses. */
7186
7187 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)7188 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7189 omp_context *ctx)
7190 {
7191 tree c;
7192
7193 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7194 {
7195 tree var, new_var, ref, x;
7196 bool by_ref;
7197 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7198
7199 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7200 continue;
7201
7202 var = OMP_CLAUSE_DECL (c);
7203 by_ref = use_pointer_for_field (var, NULL);
7204
7205 ref = build_sender_ref (var, ctx);
7206 x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7207 if (by_ref)
7208 {
7209 x = build_fold_addr_expr_loc (clause_loc, new_var);
7210 x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7211 }
7212 gimplify_assign (ref, x, slist);
7213
7214 ref = build_receiver_ref (var, false, ctx);
7215 if (by_ref)
7216 {
7217 ref = fold_convert_loc (clause_loc,
7218 build_pointer_type (TREE_TYPE (new_var)),
7219 ref);
7220 ref = build_fold_indirect_ref_loc (clause_loc, ref);
7221 }
7222 if (omp_is_reference (var))
7223 {
7224 ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7225 ref = build_simple_mem_ref_loc (clause_loc, ref);
7226 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7227 }
7228 x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7229 gimplify_and_add (x, rlist);
7230 }
7231 }
7232
7233
7234 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7235 and REDUCTION from the sender (aka parent) side. */
7236
7237 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7238 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7239 omp_context *ctx)
7240 {
7241 tree c, t;
7242 int ignored_looptemp = 0;
7243 bool is_taskloop = false;
7244
7245 /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7246 by GOMP_taskloop. */
7247 if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7248 {
7249 ignored_looptemp = 2;
7250 is_taskloop = true;
7251 }
7252
7253 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7254 {
7255 tree val, ref, x, var;
7256 bool by_ref, do_in = false, do_out = false;
7257 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7258
7259 switch (OMP_CLAUSE_CODE (c))
7260 {
7261 case OMP_CLAUSE_PRIVATE:
7262 if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7263 break;
7264 continue;
7265 case OMP_CLAUSE_FIRSTPRIVATE:
7266 case OMP_CLAUSE_COPYIN:
7267 case OMP_CLAUSE_LASTPRIVATE:
7268 case OMP_CLAUSE_IN_REDUCTION:
7269 case OMP_CLAUSE__REDUCTEMP_:
7270 break;
7271 case OMP_CLAUSE_REDUCTION:
7272 if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7273 continue;
7274 break;
7275 case OMP_CLAUSE_SHARED:
7276 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7277 break;
7278 continue;
7279 case OMP_CLAUSE__LOOPTEMP_:
7280 if (ignored_looptemp)
7281 {
7282 ignored_looptemp--;
7283 continue;
7284 }
7285 break;
7286 default:
7287 continue;
7288 }
7289
7290 val = OMP_CLAUSE_DECL (c);
7291 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7292 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7293 && TREE_CODE (val) == MEM_REF)
7294 {
7295 val = TREE_OPERAND (val, 0);
7296 if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7297 val = TREE_OPERAND (val, 0);
7298 if (TREE_CODE (val) == INDIRECT_REF
7299 || TREE_CODE (val) == ADDR_EXPR)
7300 val = TREE_OPERAND (val, 0);
7301 if (is_variable_sized (val))
7302 continue;
7303 }
7304
7305 /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7306 outer taskloop region. */
7307 omp_context *ctx_for_o = ctx;
7308 if (is_taskloop
7309 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7310 && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7311 ctx_for_o = ctx->outer;
7312
7313 var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7314
7315 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7316 && is_global_var (var)
7317 && (val == OMP_CLAUSE_DECL (c)
7318 || !is_task_ctx (ctx)
7319 || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7320 && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7321 || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7322 != POINTER_TYPE)))))
7323 continue;
7324
7325 t = omp_member_access_dummy_var (var);
7326 if (t)
7327 {
7328 var = DECL_VALUE_EXPR (var);
7329 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7330 if (o != t)
7331 var = unshare_and_remap (var, t, o);
7332 else
7333 var = unshare_expr (var);
7334 }
7335
7336 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7337 {
7338 /* Handle taskloop firstprivate/lastprivate, where the
7339 lastprivate on GIMPLE_OMP_TASK is represented as
7340 OMP_CLAUSE_SHARED_FIRSTPRIVATE. */
7341 tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7342 x = omp_build_component_ref (ctx->sender_decl, f);
7343 if (use_pointer_for_field (val, ctx))
7344 var = build_fold_addr_expr (var);
7345 gimplify_assign (x, var, ilist);
7346 DECL_ABSTRACT_ORIGIN (f) = NULL;
7347 continue;
7348 }
7349
7350 if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7351 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7352 || val == OMP_CLAUSE_DECL (c))
7353 && is_variable_sized (val))
7354 continue;
7355 by_ref = use_pointer_for_field (val, NULL);
7356
7357 switch (OMP_CLAUSE_CODE (c))
7358 {
7359 case OMP_CLAUSE_FIRSTPRIVATE:
7360 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7361 && !by_ref
7362 && is_task_ctx (ctx))
7363 TREE_NO_WARNING (var) = 1;
7364 do_in = true;
7365 break;
7366
7367 case OMP_CLAUSE_PRIVATE:
7368 case OMP_CLAUSE_COPYIN:
7369 case OMP_CLAUSE__LOOPTEMP_:
7370 case OMP_CLAUSE__REDUCTEMP_:
7371 do_in = true;
7372 break;
7373
7374 case OMP_CLAUSE_LASTPRIVATE:
7375 if (by_ref || omp_is_reference (val))
7376 {
7377 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7378 continue;
7379 do_in = true;
7380 }
7381 else
7382 {
7383 do_out = true;
7384 if (lang_hooks.decls.omp_private_outer_ref (val))
7385 do_in = true;
7386 }
7387 break;
7388
7389 case OMP_CLAUSE_REDUCTION:
7390 case OMP_CLAUSE_IN_REDUCTION:
7391 do_in = true;
7392 if (val == OMP_CLAUSE_DECL (c))
7393 {
7394 if (is_task_ctx (ctx))
7395 by_ref = use_pointer_for_field (val, ctx);
7396 else
7397 do_out = !(by_ref || omp_is_reference (val));
7398 }
7399 else
7400 by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7401 break;
7402
7403 default:
7404 gcc_unreachable ();
7405 }
7406
7407 if (do_in)
7408 {
7409 ref = build_sender_ref (val, ctx);
7410 x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7411 gimplify_assign (ref, x, ilist);
7412 if (is_task_ctx (ctx))
7413 DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7414 }
7415
7416 if (do_out)
7417 {
7418 ref = build_sender_ref (val, ctx);
7419 gimplify_assign (var, ref, olist);
7420 }
7421 }
7422 }
7423
7424 /* Generate code to implement SHARED from the sender (aka parent)
7425 side. This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7426 list things that got automatically shared. */
7427
7428 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7429 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7430 {
7431 tree var, ovar, nvar, t, f, x, record_type;
7432
7433 if (ctx->record_type == NULL)
7434 return;
7435
7436 record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7437 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7438 {
7439 ovar = DECL_ABSTRACT_ORIGIN (f);
7440 if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7441 continue;
7442
7443 nvar = maybe_lookup_decl (ovar, ctx);
7444 if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7445 continue;
7446
7447 /* If CTX is a nested parallel directive. Find the immediately
7448 enclosing parallel or workshare construct that contains a
7449 mapping for OVAR. */
7450 var = lookup_decl_in_outer_ctx (ovar, ctx);
7451
7452 t = omp_member_access_dummy_var (var);
7453 if (t)
7454 {
7455 var = DECL_VALUE_EXPR (var);
7456 tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7457 if (o != t)
7458 var = unshare_and_remap (var, t, o);
7459 else
7460 var = unshare_expr (var);
7461 }
7462
7463 if (use_pointer_for_field (ovar, ctx))
7464 {
7465 x = build_sender_ref (ovar, ctx);
7466 if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7467 && TREE_TYPE (f) == TREE_TYPE (ovar))
7468 {
7469 gcc_assert (is_parallel_ctx (ctx)
7470 && DECL_ARTIFICIAL (ovar));
7471 /* _condtemp_ clause. */
7472 var = build_constructor (TREE_TYPE (x), NULL);
7473 }
7474 else
7475 var = build_fold_addr_expr (var);
7476 gimplify_assign (x, var, ilist);
7477 }
7478 else
7479 {
7480 x = build_sender_ref (ovar, ctx);
7481 gimplify_assign (x, var, ilist);
7482
7483 if (!TREE_READONLY (var)
7484 /* We don't need to receive a new reference to a result
7485 or parm decl. In fact we may not store to it as we will
7486 invalidate any pending RSO and generate wrong gimple
7487 during inlining. */
7488 && !((TREE_CODE (var) == RESULT_DECL
7489 || TREE_CODE (var) == PARM_DECL)
7490 && DECL_BY_REFERENCE (var)))
7491 {
7492 x = build_sender_ref (ovar, ctx);
7493 gimplify_assign (var, x, olist);
7494 }
7495 }
7496 }
7497 }
7498
7499 /* Emit an OpenACC head marker call, encapulating the partitioning and
7500 other information that must be processed by the target compiler.
7501 Return the maximum number of dimensions the associated loop might
7502 be partitioned over. */
7503
7504 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)7505 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7506 gimple_seq *seq, omp_context *ctx)
7507 {
7508 unsigned levels = 0;
7509 unsigned tag = 0;
7510 tree gang_static = NULL_TREE;
7511 auto_vec<tree, 5> args;
7512
7513 args.quick_push (build_int_cst
7514 (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7515 args.quick_push (ddvar);
7516 for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7517 {
7518 switch (OMP_CLAUSE_CODE (c))
7519 {
7520 case OMP_CLAUSE_GANG:
7521 tag |= OLF_DIM_GANG;
7522 gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7523 /* static:* is represented by -1, and we can ignore it, as
7524 scheduling is always static. */
7525 if (gang_static && integer_minus_onep (gang_static))
7526 gang_static = NULL_TREE;
7527 levels++;
7528 break;
7529
7530 case OMP_CLAUSE_WORKER:
7531 tag |= OLF_DIM_WORKER;
7532 levels++;
7533 break;
7534
7535 case OMP_CLAUSE_VECTOR:
7536 tag |= OLF_DIM_VECTOR;
7537 levels++;
7538 break;
7539
7540 case OMP_CLAUSE_SEQ:
7541 tag |= OLF_SEQ;
7542 break;
7543
7544 case OMP_CLAUSE_AUTO:
7545 tag |= OLF_AUTO;
7546 break;
7547
7548 case OMP_CLAUSE_INDEPENDENT:
7549 tag |= OLF_INDEPENDENT;
7550 break;
7551
7552 case OMP_CLAUSE_TILE:
7553 tag |= OLF_TILE;
7554 break;
7555
7556 default:
7557 continue;
7558 }
7559 }
7560
7561 if (gang_static)
7562 {
7563 if (DECL_P (gang_static))
7564 gang_static = build_outer_var_ref (gang_static, ctx);
7565 tag |= OLF_GANG_STATIC;
7566 }
7567
7568 omp_context *tgt = enclosing_target_ctx (ctx);
7569 if (!tgt || is_oacc_parallel_or_serial (tgt))
7570 ;
7571 else if (is_oacc_kernels (tgt))
7572 /* Not using this loops handling inside OpenACC 'kernels' regions. */
7573 gcc_unreachable ();
7574 else
7575 gcc_unreachable ();
7576
7577 /* In a parallel region, loops are implicitly INDEPENDENT. */
7578 if (!tgt || is_oacc_parallel_or_serial (tgt))
7579 tag |= OLF_INDEPENDENT;
7580
7581 if (tag & OLF_TILE)
7582 /* Tiling could use all 3 levels. */
7583 levels = 3;
7584 else
7585 {
7586 /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7587 Ensure at least one level, or 2 for possible auto
7588 partitioning */
7589 bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7590 << OLF_DIM_BASE) | OLF_SEQ));
7591
7592 if (levels < 1u + maybe_auto)
7593 levels = 1u + maybe_auto;
7594 }
7595
7596 args.quick_push (build_int_cst (integer_type_node, levels));
7597 args.quick_push (build_int_cst (integer_type_node, tag));
7598 if (gang_static)
7599 args.quick_push (gang_static);
7600
7601 gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7602 gimple_set_location (call, loc);
7603 gimple_set_lhs (call, ddvar);
7604 gimple_seq_add_stmt (seq, call);
7605
7606 return levels;
7607 }
7608
7609 /* Emit an OpenACC lopp head or tail marker to SEQ. LEVEL is the
7610 partitioning level of the enclosed region. */
7611
7612 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)7613 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7614 tree tofollow, gimple_seq *seq)
7615 {
7616 int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7617 : IFN_UNIQUE_OACC_TAIL_MARK);
7618 tree marker = build_int_cst (integer_type_node, marker_kind);
7619 int nargs = 2 + (tofollow != NULL_TREE);
7620 gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7621 marker, ddvar, tofollow);
7622 gimple_set_location (call, loc);
7623 gimple_set_lhs (call, ddvar);
7624 gimple_seq_add_stmt (seq, call);
7625 }
7626
7627 /* Generate the before and after OpenACC loop sequences. CLAUSES are
7628 the loop clauses, from which we extract reductions. Initialize
7629 HEAD and TAIL. */
7630
7631 static void
lower_oacc_head_tail(location_t loc,tree clauses,gimple_seq * head,gimple_seq * tail,omp_context * ctx)7632 lower_oacc_head_tail (location_t loc, tree clauses,
7633 gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7634 {
7635 bool inner = false;
7636 tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7637 gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7638
7639 unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7640 tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7641 tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7642
7643 gcc_assert (count);
7644 for (unsigned done = 1; count; count--, done++)
7645 {
7646 gimple_seq fork_seq = NULL;
7647 gimple_seq join_seq = NULL;
7648
7649 tree place = build_int_cst (integer_type_node, -1);
7650 gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7651 fork_kind, ddvar, place);
7652 gimple_set_location (fork, loc);
7653 gimple_set_lhs (fork, ddvar);
7654
7655 gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7656 join_kind, ddvar, place);
7657 gimple_set_location (join, loc);
7658 gimple_set_lhs (join, ddvar);
7659
7660 /* Mark the beginning of this level sequence. */
7661 if (inner)
7662 lower_oacc_loop_marker (loc, ddvar, true,
7663 build_int_cst (integer_type_node, count),
7664 &fork_seq);
7665 lower_oacc_loop_marker (loc, ddvar, false,
7666 build_int_cst (integer_type_node, done),
7667 &join_seq);
7668
7669 lower_oacc_reductions (loc, clauses, place, inner,
7670 fork, join, &fork_seq, &join_seq, ctx);
7671
7672 /* Append this level to head. */
7673 gimple_seq_add_seq (head, fork_seq);
7674 /* Prepend it to tail. */
7675 gimple_seq_add_seq (&join_seq, *tail);
7676 *tail = join_seq;
7677
7678 inner = true;
7679 }
7680
7681 /* Mark the end of the sequence. */
7682 lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7683 lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7684 }
7685
7686 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7687 catch handler and return it. This prevents programs from violating the
7688 structured block semantics with throws. */
7689
7690 static gimple_seq
maybe_catch_exception(gimple_seq body)7691 maybe_catch_exception (gimple_seq body)
7692 {
7693 gimple *g;
7694 tree decl;
7695
7696 if (!flag_exceptions)
7697 return body;
7698
7699 if (lang_hooks.eh_protect_cleanup_actions != NULL)
7700 decl = lang_hooks.eh_protect_cleanup_actions ();
7701 else
7702 decl = builtin_decl_explicit (BUILT_IN_TRAP);
7703
7704 g = gimple_build_eh_must_not_throw (decl);
7705 g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7706 GIMPLE_TRY_CATCH);
7707
7708 return gimple_seq_alloc_with_stmt (g);
7709 }
7710
7711
7712 /* Routines to lower OMP directives into OMP-GIMPLE. */
7713
7714 /* If ctx is a worksharing context inside of a cancellable parallel
7715 region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7716 and conditional branch to parallel's cancel_label to handle
7717 cancellation in the implicit barrier. */
7718
7719 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple * omp_return,gimple_seq * body)7720 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7721 gimple_seq *body)
7722 {
7723 gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7724 if (gimple_omp_return_nowait_p (omp_return))
7725 return;
7726 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7727 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7728 && outer->cancellable)
7729 {
7730 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7731 tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7732 tree lhs = create_tmp_var (c_bool_type);
7733 gimple_omp_return_set_lhs (omp_return, lhs);
7734 tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7735 gimple *g = gimple_build_cond (NE_EXPR, lhs,
7736 fold_convert (c_bool_type,
7737 boolean_false_node),
7738 outer->cancel_label, fallthru_label);
7739 gimple_seq_add_stmt (body, g);
7740 gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7741 }
7742 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7743 return;
7744 }
7745
7746 /* Find the first task_reduction or reduction clause or return NULL
7747 if there are none. */
7748
7749 static inline tree
omp_task_reductions_find_first(tree clauses,enum tree_code code,enum omp_clause_code ccode)7750 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7751 enum omp_clause_code ccode)
7752 {
7753 while (1)
7754 {
7755 clauses = omp_find_clause (clauses, ccode);
7756 if (clauses == NULL_TREE)
7757 return NULL_TREE;
7758 if (ccode != OMP_CLAUSE_REDUCTION
7759 || code == OMP_TASKLOOP
7760 || OMP_CLAUSE_REDUCTION_TASK (clauses))
7761 return clauses;
7762 clauses = OMP_CLAUSE_CHAIN (clauses);
7763 }
7764 }
7765
7766 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7767 gimple_seq *, gimple_seq *);
7768
7769 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7770 CTX is the enclosing OMP context for the current statement. */
7771
7772 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)7773 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7774 {
7775 tree block, control;
7776 gimple_stmt_iterator tgsi;
7777 gomp_sections *stmt;
7778 gimple *t;
7779 gbind *new_stmt, *bind;
7780 gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7781
7782 stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7783
7784 push_gimplify_context ();
7785
7786 dlist = NULL;
7787 ilist = NULL;
7788
7789 tree rclauses
7790 = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7791 OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7792 tree rtmp = NULL_TREE;
7793 if (rclauses)
7794 {
7795 tree type = build_pointer_type (pointer_sized_int_node);
7796 tree temp = create_tmp_var (type);
7797 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7798 OMP_CLAUSE_DECL (c) = temp;
7799 OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7800 gimple_omp_sections_set_clauses (stmt, c);
7801 lower_omp_task_reductions (ctx, OMP_SECTIONS,
7802 gimple_omp_sections_clauses (stmt),
7803 &ilist, &tred_dlist);
7804 rclauses = c;
7805 rtmp = make_ssa_name (type);
7806 gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7807 }
7808
7809 tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7810 lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7811
7812 lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7813 &ilist, &dlist, ctx, NULL);
7814
7815 control = create_tmp_var (unsigned_type_node, ".section");
7816 gimple_omp_sections_set_control (stmt, control);
7817
7818 new_body = gimple_omp_body (stmt);
7819 gimple_omp_set_body (stmt, NULL);
7820 tgsi = gsi_start (new_body);
7821 for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7822 {
7823 omp_context *sctx;
7824 gimple *sec_start;
7825
7826 sec_start = gsi_stmt (tgsi);
7827 sctx = maybe_lookup_ctx (sec_start);
7828 gcc_assert (sctx);
7829
7830 lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7831 gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7832 GSI_CONTINUE_LINKING);
7833 gimple_omp_set_body (sec_start, NULL);
7834
7835 if (gsi_one_before_end_p (tgsi))
7836 {
7837 gimple_seq l = NULL;
7838 lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7839 &ilist, &l, &clist, ctx);
7840 gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7841 gimple_omp_section_set_last (sec_start);
7842 }
7843
7844 gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7845 GSI_CONTINUE_LINKING);
7846 }
7847
7848 block = make_node (BLOCK);
7849 bind = gimple_build_bind (NULL, new_body, block);
7850
7851 olist = NULL;
7852 lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7853 &clist, ctx);
7854 if (clist)
7855 {
7856 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7857 gcall *g = gimple_build_call (fndecl, 0);
7858 gimple_seq_add_stmt (&olist, g);
7859 gimple_seq_add_seq (&olist, clist);
7860 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7861 g = gimple_build_call (fndecl, 0);
7862 gimple_seq_add_stmt (&olist, g);
7863 }
7864
7865 block = make_node (BLOCK);
7866 new_stmt = gimple_build_bind (NULL, NULL, block);
7867 gsi_replace (gsi_p, new_stmt, true);
7868
7869 pop_gimplify_context (new_stmt);
7870 gimple_bind_append_vars (new_stmt, ctx->block_vars);
7871 BLOCK_VARS (block) = gimple_bind_vars (bind);
7872 if (BLOCK_VARS (block))
7873 TREE_USED (block) = 1;
7874
7875 new_body = NULL;
7876 gimple_seq_add_seq (&new_body, ilist);
7877 gimple_seq_add_stmt (&new_body, stmt);
7878 gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7879 gimple_seq_add_stmt (&new_body, bind);
7880
7881 t = gimple_build_omp_continue (control, control);
7882 gimple_seq_add_stmt (&new_body, t);
7883
7884 gimple_seq_add_seq (&new_body, olist);
7885 if (ctx->cancellable)
7886 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7887 gimple_seq_add_seq (&new_body, dlist);
7888
7889 new_body = maybe_catch_exception (new_body);
7890
7891 bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7892 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7893 t = gimple_build_omp_return (nowait);
7894 gimple_seq_add_stmt (&new_body, t);
7895 gimple_seq_add_seq (&new_body, tred_dlist);
7896 maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7897
7898 if (rclauses)
7899 OMP_CLAUSE_DECL (rclauses) = rtmp;
7900
7901 gimple_bind_set_body (new_stmt, new_body);
7902 }
7903
7904
7905 /* A subroutine of lower_omp_single. Expand the simple form of
7906 a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7907
7908 if (GOMP_single_start ())
7909 BODY;
7910 [ GOMP_barrier (); ] -> unless 'nowait' is present.
7911
7912 FIXME. It may be better to delay expanding the logic of this until
7913 pass_expand_omp. The expanded logic may make the job more difficult
7914 to a synchronization analysis pass. */
7915
7916 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)7917 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7918 {
7919 location_t loc = gimple_location (single_stmt);
7920 tree tlabel = create_artificial_label (loc);
7921 tree flabel = create_artificial_label (loc);
7922 gimple *call, *cond;
7923 tree lhs, decl;
7924
7925 decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7926 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7927 call = gimple_build_call (decl, 0);
7928 gimple_call_set_lhs (call, lhs);
7929 gimple_seq_add_stmt (pre_p, call);
7930
7931 cond = gimple_build_cond (EQ_EXPR, lhs,
7932 fold_convert_loc (loc, TREE_TYPE (lhs),
7933 boolean_true_node),
7934 tlabel, flabel);
7935 gimple_seq_add_stmt (pre_p, cond);
7936 gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
7937 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
7938 gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
7939 }
7940
7941
7942 /* A subroutine of lower_omp_single. Expand the simple form of
7943 a GIMPLE_OMP_SINGLE, with a copyprivate clause:
7944
7945 #pragma omp single copyprivate (a, b, c)
7946
7947 Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
7948
7949 {
7950 if ((copyout_p = GOMP_single_copy_start ()) == NULL)
7951 {
7952 BODY;
7953 copyout.a = a;
7954 copyout.b = b;
7955 copyout.c = c;
7956 GOMP_single_copy_end (©out);
7957 }
7958 else
7959 {
7960 a = copyout_p->a;
7961 b = copyout_p->b;
7962 c = copyout_p->c;
7963 }
7964 GOMP_barrier ();
7965 }
7966
7967 FIXME. It may be better to delay expanding the logic of this until
7968 pass_expand_omp. The expanded logic may make the job more difficult
7969 to a synchronization analysis pass. */
7970
7971 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)7972 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
7973 omp_context *ctx)
7974 {
7975 tree ptr_type, t, l0, l1, l2, bfn_decl;
7976 gimple_seq copyin_seq;
7977 location_t loc = gimple_location (single_stmt);
7978
7979 ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
7980
7981 ptr_type = build_pointer_type (ctx->record_type);
7982 ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
7983
7984 l0 = create_artificial_label (loc);
7985 l1 = create_artificial_label (loc);
7986 l2 = create_artificial_label (loc);
7987
7988 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
7989 t = build_call_expr_loc (loc, bfn_decl, 0);
7990 t = fold_convert_loc (loc, ptr_type, t);
7991 gimplify_assign (ctx->receiver_decl, t, pre_p);
7992
7993 t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
7994 build_int_cst (ptr_type, 0));
7995 t = build3 (COND_EXPR, void_type_node, t,
7996 build_and_jump (&l0), build_and_jump (&l1));
7997 gimplify_and_add (t, pre_p);
7998
7999 gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8000
8001 gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8002
8003 copyin_seq = NULL;
8004 lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8005 ©in_seq, ctx);
8006
8007 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8008 bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8009 t = build_call_expr_loc (loc, bfn_decl, 1, t);
8010 gimplify_and_add (t, pre_p);
8011
8012 t = build_and_jump (&l2);
8013 gimplify_and_add (t, pre_p);
8014
8015 gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8016
8017 gimple_seq_add_seq (pre_p, copyin_seq);
8018
8019 gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8020 }
8021
8022
8023 /* Expand code for an OpenMP single directive. */
8024
8025 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)8026 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8027 {
8028 tree block;
8029 gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8030 gbind *bind;
8031 gimple_seq bind_body, bind_body_tail = NULL, dlist;
8032
8033 push_gimplify_context ();
8034
8035 block = make_node (BLOCK);
8036 bind = gimple_build_bind (NULL, NULL, block);
8037 gsi_replace (gsi_p, bind, true);
8038 bind_body = NULL;
8039 dlist = NULL;
8040 lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8041 &bind_body, &dlist, ctx, NULL);
8042 lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8043
8044 gimple_seq_add_stmt (&bind_body, single_stmt);
8045
8046 if (ctx->record_type)
8047 lower_omp_single_copy (single_stmt, &bind_body, ctx);
8048 else
8049 lower_omp_single_simple (single_stmt, &bind_body);
8050
8051 gimple_omp_set_body (single_stmt, NULL);
8052
8053 gimple_seq_add_seq (&bind_body, dlist);
8054
8055 bind_body = maybe_catch_exception (bind_body);
8056
8057 bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8058 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8059 gimple *g = gimple_build_omp_return (nowait);
8060 gimple_seq_add_stmt (&bind_body_tail, g);
8061 maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8062 if (ctx->record_type)
8063 {
8064 gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8065 tree clobber = build_clobber (ctx->record_type);
8066 gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8067 clobber), GSI_SAME_STMT);
8068 }
8069 gimple_seq_add_seq (&bind_body, bind_body_tail);
8070 gimple_bind_set_body (bind, bind_body);
8071
8072 pop_gimplify_context (bind);
8073
8074 gimple_bind_append_vars (bind, ctx->block_vars);
8075 BLOCK_VARS (block) = ctx->block_vars;
8076 if (BLOCK_VARS (block))
8077 TREE_USED (block) = 1;
8078 }
8079
8080
8081 /* Expand code for an OpenMP master directive. */
8082
8083 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)8084 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8085 {
8086 tree block, lab = NULL, x, bfn_decl;
8087 gimple *stmt = gsi_stmt (*gsi_p);
8088 gbind *bind;
8089 location_t loc = gimple_location (stmt);
8090 gimple_seq tseq;
8091
8092 push_gimplify_context ();
8093
8094 block = make_node (BLOCK);
8095 bind = gimple_build_bind (NULL, NULL, block);
8096 gsi_replace (gsi_p, bind, true);
8097 gimple_bind_add_stmt (bind, stmt);
8098
8099 bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8100 x = build_call_expr_loc (loc, bfn_decl, 0);
8101 x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8102 x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8103 tseq = NULL;
8104 gimplify_and_add (x, &tseq);
8105 gimple_bind_add_seq (bind, tseq);
8106
8107 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8108 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8109 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8110 gimple_omp_set_body (stmt, NULL);
8111
8112 gimple_bind_add_stmt (bind, gimple_build_label (lab));
8113
8114 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8115
8116 pop_gimplify_context (bind);
8117
8118 gimple_bind_append_vars (bind, ctx->block_vars);
8119 BLOCK_VARS (block) = ctx->block_vars;
8120 }
8121
8122 /* Helper function for lower_omp_task_reductions. For a specific PASS
8123 find out the current clause it should be processed, or return false
8124 if all have been processed already. */
8125
8126 static inline bool
omp_task_reduction_iterate(int pass,enum tree_code code,enum omp_clause_code ccode,tree * c,tree * decl,tree * type,tree * next)8127 omp_task_reduction_iterate (int pass, enum tree_code code,
8128 enum omp_clause_code ccode, tree *c, tree *decl,
8129 tree *type, tree *next)
8130 {
8131 for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8132 {
8133 if (ccode == OMP_CLAUSE_REDUCTION
8134 && code != OMP_TASKLOOP
8135 && !OMP_CLAUSE_REDUCTION_TASK (*c))
8136 continue;
8137 *decl = OMP_CLAUSE_DECL (*c);
8138 *type = TREE_TYPE (*decl);
8139 if (TREE_CODE (*decl) == MEM_REF)
8140 {
8141 if (pass != 1)
8142 continue;
8143 }
8144 else
8145 {
8146 if (omp_is_reference (*decl))
8147 *type = TREE_TYPE (*type);
8148 if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8149 continue;
8150 }
8151 *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8152 return true;
8153 }
8154 *decl = NULL_TREE;
8155 *type = NULL_TREE;
8156 *next = NULL_TREE;
8157 return false;
8158 }
8159
8160 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8161 OMP_TASKGROUP only with task modifier). Register mapping of those in
8162 START sequence and reducing them and unregister them in the END sequence. */
8163
8164 static void
lower_omp_task_reductions(omp_context * ctx,enum tree_code code,tree clauses,gimple_seq * start,gimple_seq * end)8165 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8166 gimple_seq *start, gimple_seq *end)
8167 {
8168 enum omp_clause_code ccode
8169 = (code == OMP_TASKGROUP
8170 ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8171 tree cancellable = NULL_TREE;
8172 clauses = omp_task_reductions_find_first (clauses, code, ccode);
8173 if (clauses == NULL_TREE)
8174 return;
8175 if (code == OMP_FOR || code == OMP_SECTIONS)
8176 {
8177 for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8178 if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8179 && outer->cancellable)
8180 {
8181 cancellable = error_mark_node;
8182 break;
8183 }
8184 else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8185 break;
8186 }
8187 tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8188 tree *last = &TYPE_FIELDS (record_type);
8189 unsigned cnt = 0;
8190 if (cancellable)
8191 {
8192 tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8193 ptr_type_node);
8194 tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8195 integer_type_node);
8196 *last = field;
8197 DECL_CHAIN (field) = ifield;
8198 last = &DECL_CHAIN (ifield);
8199 DECL_CONTEXT (field) = record_type;
8200 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8201 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8202 DECL_CONTEXT (ifield) = record_type;
8203 if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8204 SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8205 }
8206 for (int pass = 0; pass < 2; pass++)
8207 {
8208 tree decl, type, next;
8209 for (tree c = clauses;
8210 omp_task_reduction_iterate (pass, code, ccode,
8211 &c, &decl, &type, &next); c = next)
8212 {
8213 ++cnt;
8214 tree new_type = type;
8215 if (ctx->outer)
8216 new_type = remap_type (type, &ctx->outer->cb);
8217 tree field
8218 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8219 DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8220 new_type);
8221 if (DECL_P (decl) && type == TREE_TYPE (decl))
8222 {
8223 SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8224 DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8225 TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8226 }
8227 else
8228 SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8229 DECL_CONTEXT (field) = record_type;
8230 if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8231 SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8232 *last = field;
8233 last = &DECL_CHAIN (field);
8234 tree bfield
8235 = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8236 boolean_type_node);
8237 DECL_CONTEXT (bfield) = record_type;
8238 if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8239 SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8240 *last = bfield;
8241 last = &DECL_CHAIN (bfield);
8242 }
8243 }
8244 *last = NULL_TREE;
8245 layout_type (record_type);
8246
8247 /* Build up an array which registers with the runtime all the reductions
8248 and deregisters them at the end. Format documented in libgomp/task.c. */
8249 tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8250 tree avar = create_tmp_var_raw (atype);
8251 gimple_add_tmp_var (avar);
8252 TREE_ADDRESSABLE (avar) = 1;
8253 tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8254 NULL_TREE, NULL_TREE);
8255 tree t = build_int_cst (pointer_sized_int_node, cnt);
8256 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8257 gimple_seq seq = NULL;
8258 tree sz = fold_convert (pointer_sized_int_node,
8259 TYPE_SIZE_UNIT (record_type));
8260 int cachesz = 64;
8261 sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8262 build_int_cst (pointer_sized_int_node, cachesz - 1));
8263 sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8264 build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8265 ctx->task_reductions.create (1 + cnt);
8266 ctx->task_reduction_map = new hash_map<tree, unsigned>;
8267 ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8268 ? sz : NULL_TREE);
8269 sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8270 gimple_seq_add_seq (start, seq);
8271 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8272 NULL_TREE, NULL_TREE);
8273 gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8274 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8275 NULL_TREE, NULL_TREE);
8276 t = build_int_cst (pointer_sized_int_node,
8277 MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8278 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8279 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8280 NULL_TREE, NULL_TREE);
8281 t = build_int_cst (pointer_sized_int_node, -1);
8282 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8283 r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8284 NULL_TREE, NULL_TREE);
8285 t = build_int_cst (pointer_sized_int_node, 0);
8286 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8287
8288 /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8289 and for each task reduction checks a bool right after the private variable
8290 within that thread's chunk; if the bool is clear, it hasn't been
8291 initialized and thus isn't going to be reduced nor destructed, otherwise
8292 reduce and destruct it. */
8293 tree idx = create_tmp_var (size_type_node);
8294 gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8295 tree num_thr_sz = create_tmp_var (size_type_node);
8296 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8297 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8298 tree lab3 = NULL_TREE;
8299 gimple *g;
8300 if (code == OMP_FOR || code == OMP_SECTIONS)
8301 {
8302 /* For worksharing constructs, only perform it in the master thread,
8303 with the exception of cancelled implicit barriers - then only handle
8304 the current thread. */
8305 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8306 t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8307 tree thr_num = create_tmp_var (integer_type_node);
8308 g = gimple_build_call (t, 0);
8309 gimple_call_set_lhs (g, thr_num);
8310 gimple_seq_add_stmt (end, g);
8311 if (cancellable)
8312 {
8313 tree c;
8314 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8315 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8316 lab3 = create_artificial_label (UNKNOWN_LOCATION);
8317 if (code == OMP_FOR)
8318 c = gimple_omp_for_clauses (ctx->stmt);
8319 else /* if (code == OMP_SECTIONS) */
8320 c = gimple_omp_sections_clauses (ctx->stmt);
8321 c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8322 cancellable = c;
8323 g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8324 lab5, lab6);
8325 gimple_seq_add_stmt (end, g);
8326 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8327 g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8328 gimple_seq_add_stmt (end, g);
8329 g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8330 build_one_cst (TREE_TYPE (idx)));
8331 gimple_seq_add_stmt (end, g);
8332 gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8333 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8334 }
8335 g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8336 gimple_seq_add_stmt (end, g);
8337 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8338 }
8339 if (code != OMP_PARALLEL)
8340 {
8341 t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8342 tree num_thr = create_tmp_var (integer_type_node);
8343 g = gimple_build_call (t, 0);
8344 gimple_call_set_lhs (g, num_thr);
8345 gimple_seq_add_stmt (end, g);
8346 g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8347 gimple_seq_add_stmt (end, g);
8348 if (cancellable)
8349 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8350 }
8351 else
8352 {
8353 tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8354 OMP_CLAUSE__REDUCTEMP_);
8355 t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8356 t = fold_convert (size_type_node, t);
8357 gimplify_assign (num_thr_sz, t, end);
8358 }
8359 t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8360 NULL_TREE, NULL_TREE);
8361 tree data = create_tmp_var (pointer_sized_int_node);
8362 gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8363 gimple_seq_add_stmt (end, gimple_build_label (lab1));
8364 tree ptr;
8365 if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8366 ptr = create_tmp_var (build_pointer_type (record_type));
8367 else
8368 ptr = create_tmp_var (ptr_type_node);
8369 gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8370
8371 tree field = TYPE_FIELDS (record_type);
8372 cnt = 0;
8373 if (cancellable)
8374 field = DECL_CHAIN (DECL_CHAIN (field));
8375 for (int pass = 0; pass < 2; pass++)
8376 {
8377 tree decl, type, next;
8378 for (tree c = clauses;
8379 omp_task_reduction_iterate (pass, code, ccode,
8380 &c, &decl, &type, &next); c = next)
8381 {
8382 tree var = decl, ref;
8383 if (TREE_CODE (decl) == MEM_REF)
8384 {
8385 var = TREE_OPERAND (var, 0);
8386 if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8387 var = TREE_OPERAND (var, 0);
8388 tree v = var;
8389 if (TREE_CODE (var) == ADDR_EXPR)
8390 var = TREE_OPERAND (var, 0);
8391 else if (TREE_CODE (var) == INDIRECT_REF)
8392 var = TREE_OPERAND (var, 0);
8393 tree orig_var = var;
8394 if (is_variable_sized (var))
8395 {
8396 gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8397 var = DECL_VALUE_EXPR (var);
8398 gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8399 var = TREE_OPERAND (var, 0);
8400 gcc_assert (DECL_P (var));
8401 }
8402 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8403 if (orig_var != var)
8404 gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8405 else if (TREE_CODE (v) == ADDR_EXPR)
8406 t = build_fold_addr_expr (t);
8407 else if (TREE_CODE (v) == INDIRECT_REF)
8408 t = build_fold_indirect_ref (t);
8409 if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8410 {
8411 tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8412 b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8413 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8414 }
8415 if (!integer_zerop (TREE_OPERAND (decl, 1)))
8416 t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8417 fold_convert (size_type_node,
8418 TREE_OPERAND (decl, 1)));
8419 }
8420 else
8421 {
8422 t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8423 if (!omp_is_reference (decl))
8424 t = build_fold_addr_expr (t);
8425 }
8426 t = fold_convert (pointer_sized_int_node, t);
8427 seq = NULL;
8428 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8429 gimple_seq_add_seq (start, seq);
8430 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8431 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8432 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8433 t = unshare_expr (byte_position (field));
8434 t = fold_convert (pointer_sized_int_node, t);
8435 ctx->task_reduction_map->put (c, cnt);
8436 ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8437 ? t : NULL_TREE);
8438 seq = NULL;
8439 t = force_gimple_operand (t, &seq, true, NULL_TREE);
8440 gimple_seq_add_seq (start, seq);
8441 r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8442 size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8443 gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8444
8445 tree bfield = DECL_CHAIN (field);
8446 tree cond;
8447 if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8448 /* In parallel or worksharing all threads unconditionally
8449 initialize all their task reduction private variables. */
8450 cond = boolean_true_node;
8451 else if (TREE_TYPE (ptr) == ptr_type_node)
8452 {
8453 cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8454 unshare_expr (byte_position (bfield)));
8455 seq = NULL;
8456 cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8457 gimple_seq_add_seq (end, seq);
8458 tree pbool = build_pointer_type (TREE_TYPE (bfield));
8459 cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8460 build_int_cst (pbool, 0));
8461 }
8462 else
8463 cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8464 build_simple_mem_ref (ptr), bfield, NULL_TREE);
8465 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8466 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8467 tree condv = create_tmp_var (boolean_type_node);
8468 gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8469 g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8470 lab3, lab4);
8471 gimple_seq_add_stmt (end, g);
8472 gimple_seq_add_stmt (end, gimple_build_label (lab3));
8473 if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8474 {
8475 /* If this reduction doesn't need destruction and parallel
8476 has been cancelled, there is nothing to do for this
8477 reduction, so jump around the merge operation. */
8478 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8479 g = gimple_build_cond (NE_EXPR, cancellable,
8480 build_zero_cst (TREE_TYPE (cancellable)),
8481 lab4, lab5);
8482 gimple_seq_add_stmt (end, g);
8483 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8484 }
8485
8486 tree new_var;
8487 if (TREE_TYPE (ptr) == ptr_type_node)
8488 {
8489 new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8490 unshare_expr (byte_position (field)));
8491 seq = NULL;
8492 new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8493 gimple_seq_add_seq (end, seq);
8494 tree pbool = build_pointer_type (TREE_TYPE (field));
8495 new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8496 build_int_cst (pbool, 0));
8497 }
8498 else
8499 new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8500 build_simple_mem_ref (ptr), field, NULL_TREE);
8501
8502 enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8503 if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8504 ref = build_simple_mem_ref (ref);
8505 /* reduction(-:var) sums up the partial results, so it acts
8506 identically to reduction(+:var). */
8507 if (rcode == MINUS_EXPR)
8508 rcode = PLUS_EXPR;
8509 if (TREE_CODE (decl) == MEM_REF)
8510 {
8511 tree type = TREE_TYPE (new_var);
8512 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8513 tree i = create_tmp_var (TREE_TYPE (v));
8514 tree ptype = build_pointer_type (TREE_TYPE (type));
8515 if (DECL_P (v))
8516 {
8517 v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8518 tree vv = create_tmp_var (TREE_TYPE (v));
8519 gimplify_assign (vv, v, start);
8520 v = vv;
8521 }
8522 ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8523 size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8524 new_var = build_fold_addr_expr (new_var);
8525 new_var = fold_convert (ptype, new_var);
8526 ref = fold_convert (ptype, ref);
8527 tree m = create_tmp_var (ptype);
8528 gimplify_assign (m, new_var, end);
8529 new_var = m;
8530 m = create_tmp_var (ptype);
8531 gimplify_assign (m, ref, end);
8532 ref = m;
8533 gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8534 tree body = create_artificial_label (UNKNOWN_LOCATION);
8535 tree endl = create_artificial_label (UNKNOWN_LOCATION);
8536 gimple_seq_add_stmt (end, gimple_build_label (body));
8537 tree priv = build_simple_mem_ref (new_var);
8538 tree out = build_simple_mem_ref (ref);
8539 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8540 {
8541 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8542 tree decl_placeholder
8543 = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8544 tree lab6 = NULL_TREE;
8545 if (cancellable)
8546 {
8547 /* If this reduction needs destruction and parallel
8548 has been cancelled, jump around the merge operation
8549 to the destruction. */
8550 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8551 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8552 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8553 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8554 lab6, lab5);
8555 gimple_seq_add_stmt (end, g);
8556 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8557 }
8558 SET_DECL_VALUE_EXPR (placeholder, out);
8559 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8560 SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8561 DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8562 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8563 gimple_seq_add_seq (end,
8564 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8565 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8566 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8567 {
8568 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8569 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8570 }
8571 if (cancellable)
8572 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8573 tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8574 if (x)
8575 {
8576 gimple_seq tseq = NULL;
8577 gimplify_stmt (&x, &tseq);
8578 gimple_seq_add_seq (end, tseq);
8579 }
8580 }
8581 else
8582 {
8583 tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8584 out = unshare_expr (out);
8585 gimplify_assign (out, x, end);
8586 }
8587 gimple *g
8588 = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8589 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8590 gimple_seq_add_stmt (end, g);
8591 g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8592 TYPE_SIZE_UNIT (TREE_TYPE (type)));
8593 gimple_seq_add_stmt (end, g);
8594 g = gimple_build_assign (i, PLUS_EXPR, i,
8595 build_int_cst (TREE_TYPE (i), 1));
8596 gimple_seq_add_stmt (end, g);
8597 g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8598 gimple_seq_add_stmt (end, g);
8599 gimple_seq_add_stmt (end, gimple_build_label (endl));
8600 }
8601 else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8602 {
8603 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8604 tree oldv = NULL_TREE;
8605 tree lab6 = NULL_TREE;
8606 if (cancellable)
8607 {
8608 /* If this reduction needs destruction and parallel
8609 has been cancelled, jump around the merge operation
8610 to the destruction. */
8611 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8612 lab6 = create_artificial_label (UNKNOWN_LOCATION);
8613 tree zero = build_zero_cst (TREE_TYPE (cancellable));
8614 g = gimple_build_cond (NE_EXPR, cancellable, zero,
8615 lab6, lab5);
8616 gimple_seq_add_stmt (end, g);
8617 gimple_seq_add_stmt (end, gimple_build_label (lab5));
8618 }
8619 if (omp_is_reference (decl)
8620 && !useless_type_conversion_p (TREE_TYPE (placeholder),
8621 TREE_TYPE (ref)))
8622 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8623 ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8624 tree refv = create_tmp_var (TREE_TYPE (ref));
8625 gimplify_assign (refv, ref, end);
8626 ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8627 SET_DECL_VALUE_EXPR (placeholder, ref);
8628 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8629 tree d = maybe_lookup_decl (decl, ctx);
8630 gcc_assert (d);
8631 if (DECL_HAS_VALUE_EXPR_P (d))
8632 oldv = DECL_VALUE_EXPR (d);
8633 if (omp_is_reference (var))
8634 {
8635 tree v = fold_convert (TREE_TYPE (d),
8636 build_fold_addr_expr (new_var));
8637 SET_DECL_VALUE_EXPR (d, v);
8638 }
8639 else
8640 SET_DECL_VALUE_EXPR (d, new_var);
8641 DECL_HAS_VALUE_EXPR_P (d) = 1;
8642 lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8643 if (oldv)
8644 SET_DECL_VALUE_EXPR (d, oldv);
8645 else
8646 {
8647 SET_DECL_VALUE_EXPR (d, NULL_TREE);
8648 DECL_HAS_VALUE_EXPR_P (d) = 0;
8649 }
8650 gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8651 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8652 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8653 OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8654 if (cancellable)
8655 gimple_seq_add_stmt (end, gimple_build_label (lab6));
8656 tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8657 if (x)
8658 {
8659 gimple_seq tseq = NULL;
8660 gimplify_stmt (&x, &tseq);
8661 gimple_seq_add_seq (end, tseq);
8662 }
8663 }
8664 else
8665 {
8666 tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8667 ref = unshare_expr (ref);
8668 gimplify_assign (ref, x, end);
8669 }
8670 gimple_seq_add_stmt (end, gimple_build_label (lab4));
8671 ++cnt;
8672 field = DECL_CHAIN (bfield);
8673 }
8674 }
8675
8676 if (code == OMP_TASKGROUP)
8677 {
8678 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8679 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8680 gimple_seq_add_stmt (start, g);
8681 }
8682 else
8683 {
8684 tree c;
8685 if (code == OMP_FOR)
8686 c = gimple_omp_for_clauses (ctx->stmt);
8687 else if (code == OMP_SECTIONS)
8688 c = gimple_omp_sections_clauses (ctx->stmt);
8689 else
8690 c = gimple_omp_taskreg_clauses (ctx->stmt);
8691 c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8692 t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8693 build_fold_addr_expr (avar));
8694 gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8695 }
8696
8697 gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8698 gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8699 size_one_node));
8700 g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8701 gimple_seq_add_stmt (end, g);
8702 gimple_seq_add_stmt (end, gimple_build_label (lab2));
8703 if (code == OMP_FOR || code == OMP_SECTIONS)
8704 {
8705 enum built_in_function bfn
8706 = BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8707 t = builtin_decl_explicit (bfn);
8708 tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8709 tree arg;
8710 if (cancellable)
8711 {
8712 arg = create_tmp_var (c_bool_type);
8713 gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8714 cancellable));
8715 }
8716 else
8717 arg = build_int_cst (c_bool_type, 0);
8718 g = gimple_build_call (t, 1, arg);
8719 }
8720 else
8721 {
8722 t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8723 g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8724 }
8725 gimple_seq_add_stmt (end, g);
8726 t = build_constructor (atype, NULL);
8727 TREE_THIS_VOLATILE (t) = 1;
8728 gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8729 }
8730
8731 /* Expand code for an OpenMP taskgroup directive. */
8732
8733 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)8734 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8735 {
8736 gimple *stmt = gsi_stmt (*gsi_p);
8737 gcall *x;
8738 gbind *bind;
8739 gimple_seq dseq = NULL;
8740 tree block = make_node (BLOCK);
8741
8742 bind = gimple_build_bind (NULL, NULL, block);
8743 gsi_replace (gsi_p, bind, true);
8744 gimple_bind_add_stmt (bind, stmt);
8745
8746 push_gimplify_context ();
8747
8748 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8749 0);
8750 gimple_bind_add_stmt (bind, x);
8751
8752 lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8753 gimple_omp_taskgroup_clauses (stmt),
8754 gimple_bind_body_ptr (bind), &dseq);
8755
8756 lower_omp (gimple_omp_body_ptr (stmt), ctx);
8757 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8758 gimple_omp_set_body (stmt, NULL);
8759
8760 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8761 gimple_bind_add_seq (bind, dseq);
8762
8763 pop_gimplify_context (bind);
8764
8765 gimple_bind_append_vars (bind, ctx->block_vars);
8766 BLOCK_VARS (block) = ctx->block_vars;
8767 }
8768
8769
8770 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible. */
8771
8772 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)8773 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8774 omp_context *ctx)
8775 {
8776 struct omp_for_data fd;
8777 if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8778 return;
8779
8780 unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8781 struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8782 omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8783 if (!fd.ordered)
8784 return;
8785
8786 tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8787 tree c = gimple_omp_ordered_clauses (ord_stmt);
8788 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8789 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8790 {
8791 /* Merge depend clauses from multiple adjacent
8792 #pragma omp ordered depend(sink:...) constructs
8793 into one #pragma omp ordered depend(sink:...), so that
8794 we can optimize them together. */
8795 gimple_stmt_iterator gsi = *gsi_p;
8796 gsi_next (&gsi);
8797 while (!gsi_end_p (gsi))
8798 {
8799 gimple *stmt = gsi_stmt (gsi);
8800 if (is_gimple_debug (stmt)
8801 || gimple_code (stmt) == GIMPLE_NOP)
8802 {
8803 gsi_next (&gsi);
8804 continue;
8805 }
8806 if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8807 break;
8808 gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8809 c = gimple_omp_ordered_clauses (ord_stmt2);
8810 if (c == NULL_TREE
8811 || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8812 || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8813 break;
8814 while (*list_p)
8815 list_p = &OMP_CLAUSE_CHAIN (*list_p);
8816 *list_p = c;
8817 gsi_remove (&gsi, true);
8818 }
8819 }
8820
8821 /* Canonicalize sink dependence clauses into one folded clause if
8822 possible.
8823
8824 The basic algorithm is to create a sink vector whose first
8825 element is the GCD of all the first elements, and whose remaining
8826 elements are the minimum of the subsequent columns.
8827
8828 We ignore dependence vectors whose first element is zero because
8829 such dependencies are known to be executed by the same thread.
8830
8831 We take into account the direction of the loop, so a minimum
8832 becomes a maximum if the loop is iterating forwards. We also
8833 ignore sink clauses where the loop direction is unknown, or where
8834 the offsets are clearly invalid because they are not a multiple
8835 of the loop increment.
8836
8837 For example:
8838
8839 #pragma omp for ordered(2)
8840 for (i=0; i < N; ++i)
8841 for (j=0; j < M; ++j)
8842 {
8843 #pragma omp ordered \
8844 depend(sink:i-8,j-2) \
8845 depend(sink:i,j-1) \ // Completely ignored because i+0.
8846 depend(sink:i-4,j-3) \
8847 depend(sink:i-6,j-4)
8848 #pragma omp ordered depend(source)
8849 }
8850
8851 Folded clause is:
8852
8853 depend(sink:-gcd(8,4,6),-min(2,3,4))
8854 -or-
8855 depend(sink:-2,-2)
8856 */
8857
8858 /* FIXME: Computing GCD's where the first element is zero is
8859 non-trivial in the presence of collapsed loops. Do this later. */
8860 if (fd.collapse > 1)
8861 return;
8862
8863 wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8864
8865 /* wide_int is not a POD so it must be default-constructed. */
8866 for (unsigned i = 0; i != 2 * len - 1; ++i)
8867 new (static_cast<void*>(folded_deps + i)) wide_int ();
8868
8869 tree folded_dep = NULL_TREE;
8870 /* TRUE if the first dimension's offset is negative. */
8871 bool neg_offset_p = false;
8872
8873 list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8874 unsigned int i;
8875 while ((c = *list_p) != NULL)
8876 {
8877 bool remove = false;
8878
8879 gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8880 if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8881 goto next_ordered_clause;
8882
8883 tree vec;
8884 for (vec = OMP_CLAUSE_DECL (c), i = 0;
8885 vec && TREE_CODE (vec) == TREE_LIST;
8886 vec = TREE_CHAIN (vec), ++i)
8887 {
8888 gcc_assert (i < len);
8889
8890 /* omp_extract_for_data has canonicalized the condition. */
8891 gcc_assert (fd.loops[i].cond_code == LT_EXPR
8892 || fd.loops[i].cond_code == GT_EXPR);
8893 bool forward = fd.loops[i].cond_code == LT_EXPR;
8894 bool maybe_lexically_later = true;
8895
8896 /* While the committee makes up its mind, bail if we have any
8897 non-constant steps. */
8898 if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8899 goto lower_omp_ordered_ret;
8900
8901 tree itype = TREE_TYPE (TREE_VALUE (vec));
8902 if (POINTER_TYPE_P (itype))
8903 itype = sizetype;
8904 wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8905 TYPE_PRECISION (itype),
8906 TYPE_SIGN (itype));
8907
8908 /* Ignore invalid offsets that are not multiples of the step. */
8909 if (!wi::multiple_of_p (wi::abs (offset),
8910 wi::abs (wi::to_wide (fd.loops[i].step)),
8911 UNSIGNED))
8912 {
8913 warning_at (OMP_CLAUSE_LOCATION (c), 0,
8914 "ignoring sink clause with offset that is not "
8915 "a multiple of the loop step");
8916 remove = true;
8917 goto next_ordered_clause;
8918 }
8919
8920 /* Calculate the first dimension. The first dimension of
8921 the folded dependency vector is the GCD of the first
8922 elements, while ignoring any first elements whose offset
8923 is 0. */
8924 if (i == 0)
8925 {
8926 /* Ignore dependence vectors whose first dimension is 0. */
8927 if (offset == 0)
8928 {
8929 remove = true;
8930 goto next_ordered_clause;
8931 }
8932 else
8933 {
8934 if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
8935 {
8936 error_at (OMP_CLAUSE_LOCATION (c),
8937 "first offset must be in opposite direction "
8938 "of loop iterations");
8939 goto lower_omp_ordered_ret;
8940 }
8941 if (forward)
8942 offset = -offset;
8943 neg_offset_p = forward;
8944 /* Initialize the first time around. */
8945 if (folded_dep == NULL_TREE)
8946 {
8947 folded_dep = c;
8948 folded_deps[0] = offset;
8949 }
8950 else
8951 folded_deps[0] = wi::gcd (folded_deps[0],
8952 offset, UNSIGNED);
8953 }
8954 }
8955 /* Calculate minimum for the remaining dimensions. */
8956 else
8957 {
8958 folded_deps[len + i - 1] = offset;
8959 if (folded_dep == c)
8960 folded_deps[i] = offset;
8961 else if (maybe_lexically_later
8962 && !wi::eq_p (folded_deps[i], offset))
8963 {
8964 if (forward ^ wi::gts_p (folded_deps[i], offset))
8965 {
8966 unsigned int j;
8967 folded_dep = c;
8968 for (j = 1; j <= i; j++)
8969 folded_deps[j] = folded_deps[len + j - 1];
8970 }
8971 else
8972 maybe_lexically_later = false;
8973 }
8974 }
8975 }
8976 gcc_assert (i == len);
8977
8978 remove = true;
8979
8980 next_ordered_clause:
8981 if (remove)
8982 *list_p = OMP_CLAUSE_CHAIN (c);
8983 else
8984 list_p = &OMP_CLAUSE_CHAIN (c);
8985 }
8986
8987 if (folded_dep)
8988 {
8989 if (neg_offset_p)
8990 folded_deps[0] = -folded_deps[0];
8991
8992 tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
8993 if (POINTER_TYPE_P (itype))
8994 itype = sizetype;
8995
8996 TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
8997 = wide_int_to_tree (itype, folded_deps[0]);
8998 OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
8999 *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9000 }
9001
9002 lower_omp_ordered_ret:
9003
9004 /* Ordered without clauses is #pragma omp threads, while we want
9005 a nop instead if we remove all clauses. */
9006 if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9007 gsi_replace (gsi_p, gimple_build_nop (), true);
9008 }
9009
9010
9011 /* Expand code for an OpenMP ordered directive. */
9012
9013 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)9014 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9015 {
9016 tree block;
9017 gimple *stmt = gsi_stmt (*gsi_p), *g;
9018 gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9019 gcall *x;
9020 gbind *bind;
9021 bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9022 OMP_CLAUSE_SIMD);
9023 /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9024 loop. */
9025 bool maybe_simt
9026 = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9027 bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9028 OMP_CLAUSE_THREADS);
9029
9030 if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9031 OMP_CLAUSE_DEPEND))
9032 {
9033 /* FIXME: This is needs to be moved to the expansion to verify various
9034 conditions only testable on cfg with dominators computed, and also
9035 all the depend clauses to be merged still might need to be available
9036 for the runtime checks. */
9037 if (0)
9038 lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9039 return;
9040 }
9041
9042 push_gimplify_context ();
9043
9044 block = make_node (BLOCK);
9045 bind = gimple_build_bind (NULL, NULL, block);
9046 gsi_replace (gsi_p, bind, true);
9047 gimple_bind_add_stmt (bind, stmt);
9048
9049 if (simd)
9050 {
9051 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9052 build_int_cst (NULL_TREE, threads));
9053 cfun->has_simduid_loops = true;
9054 }
9055 else
9056 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9057 0);
9058 gimple_bind_add_stmt (bind, x);
9059
9060 tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9061 if (maybe_simt)
9062 {
9063 counter = create_tmp_var (integer_type_node);
9064 g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9065 gimple_call_set_lhs (g, counter);
9066 gimple_bind_add_stmt (bind, g);
9067
9068 body = create_artificial_label (UNKNOWN_LOCATION);
9069 test = create_artificial_label (UNKNOWN_LOCATION);
9070 gimple_bind_add_stmt (bind, gimple_build_label (body));
9071
9072 tree simt_pred = create_tmp_var (integer_type_node);
9073 g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9074 gimple_call_set_lhs (g, simt_pred);
9075 gimple_bind_add_stmt (bind, g);
9076
9077 tree t = create_artificial_label (UNKNOWN_LOCATION);
9078 g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9079 gimple_bind_add_stmt (bind, g);
9080
9081 gimple_bind_add_stmt (bind, gimple_build_label (t));
9082 }
9083 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9084 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9085 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9086 gimple_omp_set_body (stmt, NULL);
9087
9088 if (maybe_simt)
9089 {
9090 gimple_bind_add_stmt (bind, gimple_build_label (test));
9091 g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9092 gimple_bind_add_stmt (bind, g);
9093
9094 tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9095 tree nonneg = create_tmp_var (integer_type_node);
9096 gimple_seq tseq = NULL;
9097 gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9098 gimple_bind_add_seq (bind, tseq);
9099
9100 g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9101 gimple_call_set_lhs (g, nonneg);
9102 gimple_bind_add_stmt (bind, g);
9103
9104 tree end = create_artificial_label (UNKNOWN_LOCATION);
9105 g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9106 gimple_bind_add_stmt (bind, g);
9107
9108 gimple_bind_add_stmt (bind, gimple_build_label (end));
9109 }
9110 if (simd)
9111 x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9112 build_int_cst (NULL_TREE, threads));
9113 else
9114 x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9115 0);
9116 gimple_bind_add_stmt (bind, x);
9117
9118 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9119
9120 pop_gimplify_context (bind);
9121
9122 gimple_bind_append_vars (bind, ctx->block_vars);
9123 BLOCK_VARS (block) = gimple_bind_vars (bind);
9124 }
9125
9126
9127 /* Expand code for an OpenMP scan directive and the structured block
9128 before the scan directive. */
9129
9130 static void
lower_omp_scan(gimple_stmt_iterator * gsi_p,omp_context * ctx)9131 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9132 {
9133 gimple *stmt = gsi_stmt (*gsi_p);
9134 bool has_clauses
9135 = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9136 tree lane = NULL_TREE;
9137 gimple_seq before = NULL;
9138 omp_context *octx = ctx->outer;
9139 gcc_assert (octx);
9140 if (octx->scan_exclusive && !has_clauses)
9141 {
9142 gimple_stmt_iterator gsi2 = *gsi_p;
9143 gsi_next (&gsi2);
9144 gimple *stmt2 = gsi_stmt (gsi2);
9145 /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9146 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9147 the one with exclusive clause(s), comes first. */
9148 if (stmt2
9149 && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9150 && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9151 {
9152 gsi_remove (gsi_p, false);
9153 gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9154 ctx = maybe_lookup_ctx (stmt2);
9155 gcc_assert (ctx);
9156 lower_omp_scan (gsi_p, ctx);
9157 return;
9158 }
9159 }
9160
9161 bool input_phase = has_clauses ^ octx->scan_inclusive;
9162 bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9163 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9164 bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9165 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9166 && !gimple_omp_for_combined_p (octx->stmt));
9167 bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9168 if (is_for_simd && octx->for_simd_scan_phase)
9169 is_simd = false;
9170 if (is_simd)
9171 if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9172 OMP_CLAUSE__SIMDUID_))
9173 {
9174 tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9175 lane = create_tmp_var (unsigned_type_node);
9176 tree t = build_int_cst (integer_type_node,
9177 input_phase ? 1
9178 : octx->scan_inclusive ? 2 : 3);
9179 gimple *g
9180 = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9181 gimple_call_set_lhs (g, lane);
9182 gimple_seq_add_stmt (&before, g);
9183 }
9184
9185 if (is_simd || is_for)
9186 {
9187 for (tree c = gimple_omp_for_clauses (octx->stmt);
9188 c; c = OMP_CLAUSE_CHAIN (c))
9189 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9190 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9191 {
9192 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9193 tree var = OMP_CLAUSE_DECL (c);
9194 tree new_var = lookup_decl (var, octx);
9195 tree val = new_var;
9196 tree var2 = NULL_TREE;
9197 tree var3 = NULL_TREE;
9198 tree var4 = NULL_TREE;
9199 tree lane0 = NULL_TREE;
9200 tree new_vard = new_var;
9201 if (omp_is_reference (var))
9202 {
9203 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9204 val = new_var;
9205 }
9206 if (DECL_HAS_VALUE_EXPR_P (new_vard))
9207 {
9208 val = DECL_VALUE_EXPR (new_vard);
9209 if (new_vard != new_var)
9210 {
9211 gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9212 val = TREE_OPERAND (val, 0);
9213 }
9214 if (TREE_CODE (val) == ARRAY_REF
9215 && VAR_P (TREE_OPERAND (val, 0)))
9216 {
9217 tree v = TREE_OPERAND (val, 0);
9218 if (lookup_attribute ("omp simd array",
9219 DECL_ATTRIBUTES (v)))
9220 {
9221 val = unshare_expr (val);
9222 lane0 = TREE_OPERAND (val, 1);
9223 TREE_OPERAND (val, 1) = lane;
9224 var2 = lookup_decl (v, octx);
9225 if (octx->scan_exclusive)
9226 var4 = lookup_decl (var2, octx);
9227 if (input_phase
9228 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9229 var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9230 if (!input_phase)
9231 {
9232 var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9233 var2, lane, NULL_TREE, NULL_TREE);
9234 TREE_THIS_NOTRAP (var2) = 1;
9235 if (octx->scan_exclusive)
9236 {
9237 var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9238 var4, lane, NULL_TREE,
9239 NULL_TREE);
9240 TREE_THIS_NOTRAP (var4) = 1;
9241 }
9242 }
9243 else
9244 var2 = val;
9245 }
9246 }
9247 gcc_assert (var2);
9248 }
9249 else
9250 {
9251 var2 = build_outer_var_ref (var, octx);
9252 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9253 {
9254 var3 = maybe_lookup_decl (new_vard, octx);
9255 if (var3 == new_vard || var3 == NULL_TREE)
9256 var3 = NULL_TREE;
9257 else if (is_simd && octx->scan_exclusive && !input_phase)
9258 {
9259 var4 = maybe_lookup_decl (var3, octx);
9260 if (var4 == var3 || var4 == NULL_TREE)
9261 {
9262 if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9263 {
9264 var4 = var3;
9265 var3 = NULL_TREE;
9266 }
9267 else
9268 var4 = NULL_TREE;
9269 }
9270 }
9271 }
9272 if (is_simd
9273 && octx->scan_exclusive
9274 && !input_phase
9275 && var4 == NULL_TREE)
9276 var4 = create_tmp_var (TREE_TYPE (val));
9277 }
9278 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9279 {
9280 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9281 if (input_phase)
9282 {
9283 if (var3)
9284 {
9285 /* If we've added a separate identity element
9286 variable, copy it over into val. */
9287 tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9288 var3);
9289 gimplify_and_add (x, &before);
9290 }
9291 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9292 {
9293 /* Otherwise, assign to it the identity element. */
9294 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9295 if (is_for)
9296 tseq = copy_gimple_seq_and_replace_locals (tseq);
9297 tree ref = build_outer_var_ref (var, octx);
9298 tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9299 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9300 if (x)
9301 {
9302 if (new_vard != new_var)
9303 val = build_fold_addr_expr_loc (clause_loc, val);
9304 SET_DECL_VALUE_EXPR (new_vard, val);
9305 }
9306 SET_DECL_VALUE_EXPR (placeholder, ref);
9307 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9308 lower_omp (&tseq, octx);
9309 if (x)
9310 SET_DECL_VALUE_EXPR (new_vard, x);
9311 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9312 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9313 gimple_seq_add_seq (&before, tseq);
9314 if (is_simd)
9315 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9316 }
9317 }
9318 else if (is_simd)
9319 {
9320 tree x;
9321 if (octx->scan_exclusive)
9322 {
9323 tree v4 = unshare_expr (var4);
9324 tree v2 = unshare_expr (var2);
9325 x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9326 gimplify_and_add (x, &before);
9327 }
9328 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9329 x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9330 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9331 tree vexpr = val;
9332 if (x && new_vard != new_var)
9333 vexpr = build_fold_addr_expr_loc (clause_loc, val);
9334 if (x)
9335 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9336 SET_DECL_VALUE_EXPR (placeholder, var2);
9337 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9338 lower_omp (&tseq, octx);
9339 gimple_seq_add_seq (&before, tseq);
9340 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9341 if (x)
9342 SET_DECL_VALUE_EXPR (new_vard, x);
9343 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9344 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9345 if (octx->scan_inclusive)
9346 {
9347 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9348 var2);
9349 gimplify_and_add (x, &before);
9350 }
9351 else if (lane0 == NULL_TREE)
9352 {
9353 x = lang_hooks.decls.omp_clause_assign_op (c, val,
9354 var4);
9355 gimplify_and_add (x, &before);
9356 }
9357 }
9358 }
9359 else
9360 {
9361 if (input_phase)
9362 {
9363 /* input phase. Set val to initializer before
9364 the body. */
9365 tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9366 gimplify_assign (val, x, &before);
9367 }
9368 else if (is_simd)
9369 {
9370 /* scan phase. */
9371 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9372 if (code == MINUS_EXPR)
9373 code = PLUS_EXPR;
9374
9375 tree x = build2 (code, TREE_TYPE (var2),
9376 unshare_expr (var2), unshare_expr (val));
9377 if (octx->scan_inclusive)
9378 {
9379 gimplify_assign (unshare_expr (var2), x, &before);
9380 gimplify_assign (val, var2, &before);
9381 }
9382 else
9383 {
9384 gimplify_assign (unshare_expr (var4),
9385 unshare_expr (var2), &before);
9386 gimplify_assign (var2, x, &before);
9387 if (lane0 == NULL_TREE)
9388 gimplify_assign (val, var4, &before);
9389 }
9390 }
9391 }
9392 if (octx->scan_exclusive && !input_phase && lane0)
9393 {
9394 tree vexpr = unshare_expr (var4);
9395 TREE_OPERAND (vexpr, 1) = lane0;
9396 if (new_vard != new_var)
9397 vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9398 SET_DECL_VALUE_EXPR (new_vard, vexpr);
9399 }
9400 }
9401 }
9402 if (is_simd && !is_for_simd)
9403 {
9404 gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9405 gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9406 gsi_replace (gsi_p, gimple_build_nop (), true);
9407 return;
9408 }
9409 lower_omp (gimple_omp_body_ptr (stmt), octx);
9410 if (before)
9411 {
9412 gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9413 gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9414 }
9415 }
9416
9417
9418 /* Gimplify a GIMPLE_OMP_CRITICAL statement. This is a relatively simple
9419 substitution of a couple of function calls. But in the NAMED case,
9420 requires that languages coordinate a symbol name. It is therefore
9421 best put here in common code. */
9422
9423 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9424
9425 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)9426 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9427 {
9428 tree block;
9429 tree name, lock, unlock;
9430 gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9431 gbind *bind;
9432 location_t loc = gimple_location (stmt);
9433 gimple_seq tbody;
9434
9435 name = gimple_omp_critical_name (stmt);
9436 if (name)
9437 {
9438 tree decl;
9439
9440 if (!critical_name_mutexes)
9441 critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9442
9443 tree *n = critical_name_mutexes->get (name);
9444 if (n == NULL)
9445 {
9446 char *new_str;
9447
9448 decl = create_tmp_var_raw (ptr_type_node);
9449
9450 new_str = ACONCAT ((".gomp_critical_user_",
9451 IDENTIFIER_POINTER (name), NULL));
9452 DECL_NAME (decl) = get_identifier (new_str);
9453 TREE_PUBLIC (decl) = 1;
9454 TREE_STATIC (decl) = 1;
9455 DECL_COMMON (decl) = 1;
9456 DECL_ARTIFICIAL (decl) = 1;
9457 DECL_IGNORED_P (decl) = 1;
9458
9459 varpool_node::finalize_decl (decl);
9460
9461 critical_name_mutexes->put (name, decl);
9462 }
9463 else
9464 decl = *n;
9465
9466 /* If '#pragma omp critical' is inside offloaded region or
9467 inside function marked as offloadable, the symbol must be
9468 marked as offloadable too. */
9469 omp_context *octx;
9470 if (cgraph_node::get (current_function_decl)->offloadable)
9471 varpool_node::get_create (decl)->offloadable = 1;
9472 else
9473 for (octx = ctx->outer; octx; octx = octx->outer)
9474 if (is_gimple_omp_offloaded (octx->stmt))
9475 {
9476 varpool_node::get_create (decl)->offloadable = 1;
9477 break;
9478 }
9479
9480 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9481 lock = build_call_expr_loc (loc, lock, 1,
9482 build_fold_addr_expr_loc (loc, decl));
9483
9484 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9485 unlock = build_call_expr_loc (loc, unlock, 1,
9486 build_fold_addr_expr_loc (loc, decl));
9487 }
9488 else
9489 {
9490 lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9491 lock = build_call_expr_loc (loc, lock, 0);
9492
9493 unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9494 unlock = build_call_expr_loc (loc, unlock, 0);
9495 }
9496
9497 push_gimplify_context ();
9498
9499 block = make_node (BLOCK);
9500 bind = gimple_build_bind (NULL, NULL, block);
9501 gsi_replace (gsi_p, bind, true);
9502 gimple_bind_add_stmt (bind, stmt);
9503
9504 tbody = gimple_bind_body (bind);
9505 gimplify_and_add (lock, &tbody);
9506 gimple_bind_set_body (bind, tbody);
9507
9508 lower_omp (gimple_omp_body_ptr (stmt), ctx);
9509 gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9510 gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9511 gimple_omp_set_body (stmt, NULL);
9512
9513 tbody = gimple_bind_body (bind);
9514 gimplify_and_add (unlock, &tbody);
9515 gimple_bind_set_body (bind, tbody);
9516
9517 gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9518
9519 pop_gimplify_context (bind);
9520 gimple_bind_append_vars (bind, ctx->block_vars);
9521 BLOCK_VARS (block) = gimple_bind_vars (bind);
9522 }
9523
9524 /* A subroutine of lower_omp_for. Generate code to emit the predicate
9525 for a lastprivate clause. Given a loop control predicate of (V
9526 cond N2), we gate the clause on (!(V cond N2)). The lowered form
9527 is appended to *DLIST, iterator initialization is appended to
9528 *BODY_P. *CLIST is for lastprivate(conditional:) code that needs
9529 to be emitted in a critical section. */
9530
9531 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,gimple_seq * clist,struct omp_context * ctx)9532 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9533 gimple_seq *dlist, gimple_seq *clist,
9534 struct omp_context *ctx)
9535 {
9536 tree clauses, cond, vinit;
9537 enum tree_code cond_code;
9538 gimple_seq stmts;
9539
9540 cond_code = fd->loop.cond_code;
9541 cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9542
9543 /* When possible, use a strict equality expression. This can let VRP
9544 type optimizations deduce the value and remove a copy. */
9545 if (tree_fits_shwi_p (fd->loop.step))
9546 {
9547 HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9548 if (step == 1 || step == -1)
9549 cond_code = EQ_EXPR;
9550 }
9551
9552 if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9553 || gimple_omp_for_grid_phony (fd->for_stmt))
9554 cond = omp_grid_lastprivate_predicate (fd);
9555 else
9556 {
9557 tree n2 = fd->loop.n2;
9558 if (fd->collapse > 1
9559 && TREE_CODE (n2) != INTEGER_CST
9560 && gimple_omp_for_combined_into_p (fd->for_stmt))
9561 {
9562 struct omp_context *taskreg_ctx = NULL;
9563 if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9564 {
9565 gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9566 if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9567 || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9568 {
9569 if (gimple_omp_for_combined_into_p (gfor))
9570 {
9571 gcc_assert (ctx->outer->outer
9572 && is_parallel_ctx (ctx->outer->outer));
9573 taskreg_ctx = ctx->outer->outer;
9574 }
9575 else
9576 {
9577 struct omp_for_data outer_fd;
9578 omp_extract_for_data (gfor, &outer_fd, NULL);
9579 n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9580 }
9581 }
9582 else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9583 taskreg_ctx = ctx->outer->outer;
9584 }
9585 else if (is_taskreg_ctx (ctx->outer))
9586 taskreg_ctx = ctx->outer;
9587 if (taskreg_ctx)
9588 {
9589 int i;
9590 tree taskreg_clauses
9591 = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9592 tree innerc = omp_find_clause (taskreg_clauses,
9593 OMP_CLAUSE__LOOPTEMP_);
9594 gcc_assert (innerc);
9595 for (i = 0; i < fd->collapse; i++)
9596 {
9597 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9598 OMP_CLAUSE__LOOPTEMP_);
9599 gcc_assert (innerc);
9600 }
9601 innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9602 OMP_CLAUSE__LOOPTEMP_);
9603 if (innerc)
9604 n2 = fold_convert (TREE_TYPE (n2),
9605 lookup_decl (OMP_CLAUSE_DECL (innerc),
9606 taskreg_ctx));
9607 }
9608 }
9609 cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9610 }
9611
9612 clauses = gimple_omp_for_clauses (fd->for_stmt);
9613 stmts = NULL;
9614 lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9615 if (!gimple_seq_empty_p (stmts))
9616 {
9617 gimple_seq_add_seq (&stmts, *dlist);
9618 *dlist = stmts;
9619
9620 /* Optimize: v = 0; is usually cheaper than v = some_other_constant. */
9621 vinit = fd->loop.n1;
9622 if (cond_code == EQ_EXPR
9623 && tree_fits_shwi_p (fd->loop.n2)
9624 && ! integer_zerop (fd->loop.n2))
9625 vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9626 else
9627 vinit = unshare_expr (vinit);
9628
9629 /* Initialize the iterator variable, so that threads that don't execute
9630 any iterations don't execute the lastprivate clauses by accident. */
9631 gimplify_assign (fd->loop.v, vinit, body_p);
9632 }
9633 }
9634
9635 /* Callback for walk_gimple_seq. Find #pragma omp scan statement. */
9636
9637 static tree
omp_find_scan(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9638 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9639 struct walk_stmt_info *wi)
9640 {
9641 gimple *stmt = gsi_stmt (*gsi_p);
9642
9643 *handled_ops_p = true;
9644 switch (gimple_code (stmt))
9645 {
9646 WALK_SUBSTMTS;
9647
9648 case GIMPLE_OMP_FOR:
9649 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9650 && gimple_omp_for_combined_into_p (stmt))
9651 *handled_ops_p = false;
9652 break;
9653
9654 case GIMPLE_OMP_SCAN:
9655 *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9656 return integer_zero_node;
9657 default:
9658 break;
9659 }
9660 return NULL;
9661 }
9662
9663 /* Helper function for lower_omp_for, add transformations for a worksharing
9664 loop with scan directives inside of it.
9665 For worksharing loop not combined with simd, transform:
9666 #pragma omp for reduction(inscan,+:r) private(i)
9667 for (i = 0; i < n; i = i + 1)
9668 {
9669 {
9670 update (r);
9671 }
9672 #pragma omp scan inclusive(r)
9673 {
9674 use (r);
9675 }
9676 }
9677
9678 into two worksharing loops + code to merge results:
9679
9680 num_threads = omp_get_num_threads ();
9681 thread_num = omp_get_thread_num ();
9682 if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9683 <D.2099>:
9684 var2 = r;
9685 goto <D.2101>;
9686 <D.2100>:
9687 // For UDRs this is UDR init, or if ctors are needed, copy from
9688 // var3 that has been constructed to contain the neutral element.
9689 var2 = 0;
9690 <D.2101>:
9691 ivar = 0;
9692 // The _scantemp_ clauses will arrange for rpriva to be initialized to
9693 // a shared array with num_threads elements and rprivb to a local array
9694 // number of elements equal to the number of (contiguous) iterations the
9695 // current thread will perform. controlb and controlp variables are
9696 // temporaries to handle deallocation of rprivb at the end of second
9697 // GOMP_FOR.
9698 #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9699 _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9700 for (i = 0; i < n; i = i + 1)
9701 {
9702 {
9703 // For UDRs this is UDR init or copy from var3.
9704 r = 0;
9705 // This is the input phase from user code.
9706 update (r);
9707 }
9708 {
9709 // For UDRs this is UDR merge.
9710 var2 = var2 + r;
9711 // Rather than handing it over to the user, save to local thread's
9712 // array.
9713 rprivb[ivar] = var2;
9714 // For exclusive scan, the above two statements are swapped.
9715 ivar = ivar + 1;
9716 }
9717 }
9718 // And remember the final value from this thread's into the shared
9719 // rpriva array.
9720 rpriva[(sizetype) thread_num] = var2;
9721 // If more than one thread, compute using Work-Efficient prefix sum
9722 // the inclusive parallel scan of the rpriva array.
9723 if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9724 <D.2102>:
9725 GOMP_barrier ();
9726 down = 0;
9727 k = 1;
9728 num_threadsu = (unsigned int) num_threads;
9729 thread_numup1 = (unsigned int) thread_num + 1;
9730 <D.2108>:
9731 twok = k << 1;
9732 if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9733 <D.2110>:
9734 down = 4294967295;
9735 k = k >> 1;
9736 if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9737 <D.2112>:
9738 k = k >> 1;
9739 <D.2111>:
9740 twok = k << 1;
9741 cplx = .MUL_OVERFLOW (thread_nump1, twok);
9742 mul = REALPART_EXPR <cplx>;
9743 ovf = IMAGPART_EXPR <cplx>;
9744 if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9745 <D.2116>:
9746 andv = k & down;
9747 andvm1 = andv + 4294967295;
9748 l = mul + andvm1;
9749 if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9750 <D.2120>:
9751 // For UDRs this is UDR merge, performed using var2 variable as temporary,
9752 // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9753 rpriva[l] = rpriva[l - k] + rpriva[l];
9754 <D.2117>:
9755 if (down == 0) goto <D.2121>; else goto <D.2122>;
9756 <D.2121>:
9757 k = k << 1;
9758 goto <D.2123>;
9759 <D.2122>:
9760 k = k >> 1;
9761 <D.2123>:
9762 GOMP_barrier ();
9763 if (k != 0) goto <D.2108>; else goto <D.2103>;
9764 <D.2103>:
9765 if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9766 <D.2124>:
9767 // For UDRs this is UDR init or copy from var3.
9768 var2 = 0;
9769 goto <D.2126>;
9770 <D.2125>:
9771 var2 = rpriva[thread_num - 1];
9772 <D.2126>:
9773 ivar = 0;
9774 #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9775 reduction(inscan,+:r) private(i)
9776 for (i = 0; i < n; i = i + 1)
9777 {
9778 {
9779 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9780 r = var2 + rprivb[ivar];
9781 }
9782 {
9783 // This is the scan phase from user code.
9784 use (r);
9785 // Plus a bump of the iterator.
9786 ivar = ivar + 1;
9787 }
9788 } */
9789
9790 static void
lower_omp_for_scan(gimple_seq * body_p,gimple_seq * dlist,gomp_for * stmt,struct omp_for_data * fd,omp_context * ctx)9791 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9792 struct omp_for_data *fd, omp_context *ctx)
9793 {
9794 bool is_for_simd = gimple_omp_for_combined_p (stmt);
9795 gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9796
9797 gimple_seq body = gimple_omp_body (stmt);
9798 gimple_stmt_iterator input1_gsi = gsi_none ();
9799 struct walk_stmt_info wi;
9800 memset (&wi, 0, sizeof (wi));
9801 wi.val_only = true;
9802 wi.info = (void *) &input1_gsi;
9803 walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9804 gcc_assert (!gsi_end_p (input1_gsi));
9805
9806 gimple *input_stmt1 = gsi_stmt (input1_gsi);
9807 gimple_stmt_iterator gsi = input1_gsi;
9808 gsi_next (&gsi);
9809 gimple_stmt_iterator scan1_gsi = gsi;
9810 gimple *scan_stmt1 = gsi_stmt (gsi);
9811 gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9812
9813 gimple_seq input_body = gimple_omp_body (input_stmt1);
9814 gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9815 gimple_omp_set_body (input_stmt1, NULL);
9816 gimple_omp_set_body (scan_stmt1, NULL);
9817 gimple_omp_set_body (stmt, NULL);
9818
9819 gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9820 gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9821 gimple_omp_set_body (stmt, body);
9822 gimple_omp_set_body (input_stmt1, input_body);
9823
9824 gimple_stmt_iterator input2_gsi = gsi_none ();
9825 memset (&wi, 0, sizeof (wi));
9826 wi.val_only = true;
9827 wi.info = (void *) &input2_gsi;
9828 walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9829 gcc_assert (!gsi_end_p (input2_gsi));
9830
9831 gimple *input_stmt2 = gsi_stmt (input2_gsi);
9832 gsi = input2_gsi;
9833 gsi_next (&gsi);
9834 gimple_stmt_iterator scan2_gsi = gsi;
9835 gimple *scan_stmt2 = gsi_stmt (gsi);
9836 gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9837 gimple_omp_set_body (scan_stmt2, scan_body);
9838
9839 gimple_stmt_iterator input3_gsi = gsi_none ();
9840 gimple_stmt_iterator scan3_gsi = gsi_none ();
9841 gimple_stmt_iterator input4_gsi = gsi_none ();
9842 gimple_stmt_iterator scan4_gsi = gsi_none ();
9843 gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9844 gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9845 omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9846 if (is_for_simd)
9847 {
9848 memset (&wi, 0, sizeof (wi));
9849 wi.val_only = true;
9850 wi.info = (void *) &input3_gsi;
9851 walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9852 gcc_assert (!gsi_end_p (input3_gsi));
9853
9854 input_stmt3 = gsi_stmt (input3_gsi);
9855 gsi = input3_gsi;
9856 gsi_next (&gsi);
9857 scan3_gsi = gsi;
9858 scan_stmt3 = gsi_stmt (gsi);
9859 gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9860
9861 memset (&wi, 0, sizeof (wi));
9862 wi.val_only = true;
9863 wi.info = (void *) &input4_gsi;
9864 walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9865 gcc_assert (!gsi_end_p (input4_gsi));
9866
9867 input_stmt4 = gsi_stmt (input4_gsi);
9868 gsi = input4_gsi;
9869 gsi_next (&gsi);
9870 scan4_gsi = gsi;
9871 scan_stmt4 = gsi_stmt (gsi);
9872 gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9873
9874 input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9875 scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9876 }
9877
9878 tree num_threads = create_tmp_var (integer_type_node);
9879 tree thread_num = create_tmp_var (integer_type_node);
9880 tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9881 tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9882 gimple *g = gimple_build_call (nthreads_decl, 0);
9883 gimple_call_set_lhs (g, num_threads);
9884 gimple_seq_add_stmt (body_p, g);
9885 g = gimple_build_call (threadnum_decl, 0);
9886 gimple_call_set_lhs (g, thread_num);
9887 gimple_seq_add_stmt (body_p, g);
9888
9889 tree ivar = create_tmp_var (sizetype);
9890 tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9891 tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9892 tree k = create_tmp_var (unsigned_type_node);
9893 tree l = create_tmp_var (unsigned_type_node);
9894
9895 gimple_seq clist = NULL, mdlist = NULL;
9896 gimple_seq thr01_list = NULL, thrn1_list = NULL;
9897 gimple_seq thr02_list = NULL, thrn2_list = NULL;
9898 gimple_seq scan1_list = NULL, input2_list = NULL;
9899 gimple_seq last_list = NULL, reduc_list = NULL;
9900 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9901 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9902 && OMP_CLAUSE_REDUCTION_INSCAN (c))
9903 {
9904 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9905 tree var = OMP_CLAUSE_DECL (c);
9906 tree new_var = lookup_decl (var, ctx);
9907 tree var3 = NULL_TREE;
9908 tree new_vard = new_var;
9909 if (omp_is_reference (var))
9910 new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9911 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9912 {
9913 var3 = maybe_lookup_decl (new_vard, ctx);
9914 if (var3 == new_vard)
9915 var3 = NULL_TREE;
9916 }
9917
9918 tree ptype = build_pointer_type (TREE_TYPE (new_var));
9919 tree rpriva = create_tmp_var (ptype);
9920 tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9921 OMP_CLAUSE_DECL (nc) = rpriva;
9922 *cp1 = nc;
9923 cp1 = &OMP_CLAUSE_CHAIN (nc);
9924
9925 tree rprivb = create_tmp_var (ptype);
9926 nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9927 OMP_CLAUSE_DECL (nc) = rprivb;
9928 OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
9929 *cp1 = nc;
9930 cp1 = &OMP_CLAUSE_CHAIN (nc);
9931
9932 tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
9933 if (new_vard != new_var)
9934 TREE_ADDRESSABLE (var2) = 1;
9935 gimple_add_tmp_var (var2);
9936
9937 tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
9938 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9939 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9940 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9941 tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
9942
9943 x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
9944 thread_num, integer_minus_one_node);
9945 x = fold_convert_loc (clause_loc, sizetype, x);
9946 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9947 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9948 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9949 tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
9950
9951 x = fold_convert_loc (clause_loc, sizetype, l);
9952 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9953 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9954 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9955 tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
9956
9957 x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
9958 x = fold_convert_loc (clause_loc, sizetype, x);
9959 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
9960 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9961 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
9962 tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
9963
9964 x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
9965 TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
9966 x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
9967 tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
9968
9969 tree var4 = is_for_simd ? new_var : var2;
9970 tree var5 = NULL_TREE, var6 = NULL_TREE;
9971 if (is_for_simd)
9972 {
9973 var5 = lookup_decl (var, input_simd_ctx);
9974 var6 = lookup_decl (var, scan_simd_ctx);
9975 if (new_vard != new_var)
9976 {
9977 var5 = build_simple_mem_ref_loc (clause_loc, var5);
9978 var6 = build_simple_mem_ref_loc (clause_loc, var6);
9979 }
9980 }
9981 if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9982 {
9983 tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9984 tree val = var2;
9985
9986 x = lang_hooks.decls.omp_clause_default_ctor
9987 (c, var2, build_outer_var_ref (var, ctx));
9988 if (x)
9989 gimplify_and_add (x, &clist);
9990
9991 x = build_outer_var_ref (var, ctx);
9992 x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
9993 x);
9994 gimplify_and_add (x, &thr01_list);
9995
9996 tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
9997 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9998 if (var3)
9999 {
10000 x = unshare_expr (var4);
10001 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10002 gimplify_and_add (x, &thrn1_list);
10003 x = unshare_expr (var4);
10004 x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10005 gimplify_and_add (x, &thr02_list);
10006 }
10007 else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10008 {
10009 /* Otherwise, assign to it the identity element. */
10010 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10011 tseq = copy_gimple_seq_and_replace_locals (tseq);
10012 if (!is_for_simd)
10013 {
10014 if (new_vard != new_var)
10015 val = build_fold_addr_expr_loc (clause_loc, val);
10016 SET_DECL_VALUE_EXPR (new_vard, val);
10017 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10018 }
10019 SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10020 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10021 lower_omp (&tseq, ctx);
10022 gimple_seq_add_seq (&thrn1_list, tseq);
10023 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10024 lower_omp (&tseq, ctx);
10025 gimple_seq_add_seq (&thr02_list, tseq);
10026 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10027 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10028 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10029 if (y)
10030 SET_DECL_VALUE_EXPR (new_vard, y);
10031 else
10032 {
10033 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10034 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10035 }
10036 }
10037
10038 x = unshare_expr (var4);
10039 x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10040 gimplify_and_add (x, &thrn2_list);
10041
10042 if (is_for_simd)
10043 {
10044 x = unshare_expr (rprivb_ref);
10045 x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10046 gimplify_and_add (x, &scan1_list);
10047 }
10048 else
10049 {
10050 if (ctx->scan_exclusive)
10051 {
10052 x = unshare_expr (rprivb_ref);
10053 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10054 gimplify_and_add (x, &scan1_list);
10055 }
10056
10057 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10058 tseq = copy_gimple_seq_and_replace_locals (tseq);
10059 SET_DECL_VALUE_EXPR (placeholder, var2);
10060 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10061 lower_omp (&tseq, ctx);
10062 gimple_seq_add_seq (&scan1_list, tseq);
10063
10064 if (ctx->scan_inclusive)
10065 {
10066 x = unshare_expr (rprivb_ref);
10067 x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10068 gimplify_and_add (x, &scan1_list);
10069 }
10070 }
10071
10072 x = unshare_expr (rpriva_ref);
10073 x = lang_hooks.decls.omp_clause_assign_op (c, x,
10074 unshare_expr (var4));
10075 gimplify_and_add (x, &mdlist);
10076
10077 x = unshare_expr (is_for_simd ? var6 : new_var);
10078 x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10079 gimplify_and_add (x, &input2_list);
10080
10081 val = rprivb_ref;
10082 if (new_vard != new_var)
10083 val = build_fold_addr_expr_loc (clause_loc, val);
10084
10085 gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10086 tseq = copy_gimple_seq_and_replace_locals (tseq);
10087 SET_DECL_VALUE_EXPR (new_vard, val);
10088 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10089 if (is_for_simd)
10090 {
10091 SET_DECL_VALUE_EXPR (placeholder, var6);
10092 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10093 }
10094 else
10095 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10096 lower_omp (&tseq, ctx);
10097 if (y)
10098 SET_DECL_VALUE_EXPR (new_vard, y);
10099 else
10100 {
10101 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10102 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10103 }
10104 if (!is_for_simd)
10105 {
10106 SET_DECL_VALUE_EXPR (placeholder, new_var);
10107 DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10108 lower_omp (&tseq, ctx);
10109 }
10110 gimple_seq_add_seq (&input2_list, tseq);
10111
10112 x = build_outer_var_ref (var, ctx);
10113 x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10114 gimplify_and_add (x, &last_list);
10115
10116 x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10117 gimplify_and_add (x, &reduc_list);
10118 tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10119 tseq = copy_gimple_seq_and_replace_locals (tseq);
10120 val = rprival_ref;
10121 if (new_vard != new_var)
10122 val = build_fold_addr_expr_loc (clause_loc, val);
10123 SET_DECL_VALUE_EXPR (new_vard, val);
10124 DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10125 SET_DECL_VALUE_EXPR (placeholder, var2);
10126 lower_omp (&tseq, ctx);
10127 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10128 SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10129 DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10130 if (y)
10131 SET_DECL_VALUE_EXPR (new_vard, y);
10132 else
10133 {
10134 DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10135 SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10136 }
10137 gimple_seq_add_seq (&reduc_list, tseq);
10138 x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10139 gimplify_and_add (x, &reduc_list);
10140
10141 x = lang_hooks.decls.omp_clause_dtor (c, var2);
10142 if (x)
10143 gimplify_and_add (x, dlist);
10144 }
10145 else
10146 {
10147 x = build_outer_var_ref (var, ctx);
10148 gimplify_assign (unshare_expr (var4), x, &thr01_list);
10149
10150 x = omp_reduction_init (c, TREE_TYPE (new_var));
10151 gimplify_assign (unshare_expr (var4), unshare_expr (x),
10152 &thrn1_list);
10153 gimplify_assign (unshare_expr (var4), x, &thr02_list);
10154
10155 gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10156
10157 enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10158 if (code == MINUS_EXPR)
10159 code = PLUS_EXPR;
10160
10161 if (is_for_simd)
10162 gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10163 else
10164 {
10165 if (ctx->scan_exclusive)
10166 gimplify_assign (unshare_expr (rprivb_ref), var2,
10167 &scan1_list);
10168 x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10169 gimplify_assign (var2, x, &scan1_list);
10170 if (ctx->scan_inclusive)
10171 gimplify_assign (unshare_expr (rprivb_ref), var2,
10172 &scan1_list);
10173 }
10174
10175 gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10176 &mdlist);
10177
10178 x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10179 gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10180
10181 gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10182 &last_list);
10183
10184 x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10185 unshare_expr (rprival_ref));
10186 gimplify_assign (rprival_ref, x, &reduc_list);
10187 }
10188 }
10189
10190 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10191 gimple_seq_add_stmt (&scan1_list, g);
10192 g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10193 gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10194 ? scan_stmt4 : scan_stmt2), g);
10195
10196 tree controlb = create_tmp_var (boolean_type_node);
10197 tree controlp = create_tmp_var (ptr_type_node);
10198 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10199 OMP_CLAUSE_DECL (nc) = controlb;
10200 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10201 *cp1 = nc;
10202 cp1 = &OMP_CLAUSE_CHAIN (nc);
10203 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10204 OMP_CLAUSE_DECL (nc) = controlp;
10205 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10206 *cp1 = nc;
10207 cp1 = &OMP_CLAUSE_CHAIN (nc);
10208 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10209 OMP_CLAUSE_DECL (nc) = controlb;
10210 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10211 *cp2 = nc;
10212 cp2 = &OMP_CLAUSE_CHAIN (nc);
10213 nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10214 OMP_CLAUSE_DECL (nc) = controlp;
10215 OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10216 *cp2 = nc;
10217 cp2 = &OMP_CLAUSE_CHAIN (nc);
10218
10219 *cp1 = gimple_omp_for_clauses (stmt);
10220 gimple_omp_for_set_clauses (stmt, new_clauses1);
10221 *cp2 = gimple_omp_for_clauses (new_stmt);
10222 gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10223
10224 if (is_for_simd)
10225 {
10226 gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10227 gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10228
10229 gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10230 GSI_SAME_STMT);
10231 gsi_remove (&input3_gsi, true);
10232 gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10233 GSI_SAME_STMT);
10234 gsi_remove (&scan3_gsi, true);
10235 gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10236 GSI_SAME_STMT);
10237 gsi_remove (&input4_gsi, true);
10238 gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10239 GSI_SAME_STMT);
10240 gsi_remove (&scan4_gsi, true);
10241 }
10242 else
10243 {
10244 gimple_omp_set_body (scan_stmt1, scan1_list);
10245 gimple_omp_set_body (input_stmt2, input2_list);
10246 }
10247
10248 gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10249 GSI_SAME_STMT);
10250 gsi_remove (&input1_gsi, true);
10251 gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10252 GSI_SAME_STMT);
10253 gsi_remove (&scan1_gsi, true);
10254 gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10255 GSI_SAME_STMT);
10256 gsi_remove (&input2_gsi, true);
10257 gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10258 GSI_SAME_STMT);
10259 gsi_remove (&scan2_gsi, true);
10260
10261 gimple_seq_add_seq (body_p, clist);
10262
10263 tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10264 tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10265 tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10266 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10267 gimple_seq_add_stmt (body_p, g);
10268 g = gimple_build_label (lab1);
10269 gimple_seq_add_stmt (body_p, g);
10270 gimple_seq_add_seq (body_p, thr01_list);
10271 g = gimple_build_goto (lab3);
10272 gimple_seq_add_stmt (body_p, g);
10273 g = gimple_build_label (lab2);
10274 gimple_seq_add_stmt (body_p, g);
10275 gimple_seq_add_seq (body_p, thrn1_list);
10276 g = gimple_build_label (lab3);
10277 gimple_seq_add_stmt (body_p, g);
10278
10279 g = gimple_build_assign (ivar, size_zero_node);
10280 gimple_seq_add_stmt (body_p, g);
10281
10282 gimple_seq_add_stmt (body_p, stmt);
10283 gimple_seq_add_seq (body_p, body);
10284 gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10285 fd->loop.v));
10286
10287 g = gimple_build_omp_return (true);
10288 gimple_seq_add_stmt (body_p, g);
10289 gimple_seq_add_seq (body_p, mdlist);
10290
10291 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10292 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10293 g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10294 gimple_seq_add_stmt (body_p, g);
10295 g = gimple_build_label (lab1);
10296 gimple_seq_add_stmt (body_p, g);
10297
10298 g = omp_build_barrier (NULL);
10299 gimple_seq_add_stmt (body_p, g);
10300
10301 tree down = create_tmp_var (unsigned_type_node);
10302 g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10303 gimple_seq_add_stmt (body_p, g);
10304
10305 g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10306 gimple_seq_add_stmt (body_p, g);
10307
10308 tree num_threadsu = create_tmp_var (unsigned_type_node);
10309 g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10310 gimple_seq_add_stmt (body_p, g);
10311
10312 tree thread_numu = create_tmp_var (unsigned_type_node);
10313 g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10314 gimple_seq_add_stmt (body_p, g);
10315
10316 tree thread_nump1 = create_tmp_var (unsigned_type_node);
10317 g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10318 build_int_cst (unsigned_type_node, 1));
10319 gimple_seq_add_stmt (body_p, g);
10320
10321 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10322 g = gimple_build_label (lab3);
10323 gimple_seq_add_stmt (body_p, g);
10324
10325 tree twok = create_tmp_var (unsigned_type_node);
10326 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10327 gimple_seq_add_stmt (body_p, g);
10328
10329 tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10330 tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10331 tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10332 g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10333 gimple_seq_add_stmt (body_p, g);
10334 g = gimple_build_label (lab4);
10335 gimple_seq_add_stmt (body_p, g);
10336 g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10337 gimple_seq_add_stmt (body_p, g);
10338 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10339 gimple_seq_add_stmt (body_p, g);
10340
10341 g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10342 gimple_seq_add_stmt (body_p, g);
10343 g = gimple_build_label (lab6);
10344 gimple_seq_add_stmt (body_p, g);
10345
10346 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10347 gimple_seq_add_stmt (body_p, g);
10348
10349 g = gimple_build_label (lab5);
10350 gimple_seq_add_stmt (body_p, g);
10351
10352 g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10353 gimple_seq_add_stmt (body_p, g);
10354
10355 tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10356 DECL_GIMPLE_REG_P (cplx) = 1;
10357 g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10358 gimple_call_set_lhs (g, cplx);
10359 gimple_seq_add_stmt (body_p, g);
10360 tree mul = create_tmp_var (unsigned_type_node);
10361 g = gimple_build_assign (mul, REALPART_EXPR,
10362 build1 (REALPART_EXPR, unsigned_type_node, cplx));
10363 gimple_seq_add_stmt (body_p, g);
10364 tree ovf = create_tmp_var (unsigned_type_node);
10365 g = gimple_build_assign (ovf, IMAGPART_EXPR,
10366 build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10367 gimple_seq_add_stmt (body_p, g);
10368
10369 tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10370 tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10371 g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10372 lab7, lab8);
10373 gimple_seq_add_stmt (body_p, g);
10374 g = gimple_build_label (lab7);
10375 gimple_seq_add_stmt (body_p, g);
10376
10377 tree andv = create_tmp_var (unsigned_type_node);
10378 g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10379 gimple_seq_add_stmt (body_p, g);
10380 tree andvm1 = create_tmp_var (unsigned_type_node);
10381 g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10382 build_minus_one_cst (unsigned_type_node));
10383 gimple_seq_add_stmt (body_p, g);
10384
10385 g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10386 gimple_seq_add_stmt (body_p, g);
10387
10388 tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10389 g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10390 gimple_seq_add_stmt (body_p, g);
10391 g = gimple_build_label (lab9);
10392 gimple_seq_add_stmt (body_p, g);
10393 gimple_seq_add_seq (body_p, reduc_list);
10394 g = gimple_build_label (lab8);
10395 gimple_seq_add_stmt (body_p, g);
10396
10397 tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10398 tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10399 tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10400 g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10401 lab10, lab11);
10402 gimple_seq_add_stmt (body_p, g);
10403 g = gimple_build_label (lab10);
10404 gimple_seq_add_stmt (body_p, g);
10405 g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10406 gimple_seq_add_stmt (body_p, g);
10407 g = gimple_build_goto (lab12);
10408 gimple_seq_add_stmt (body_p, g);
10409 g = gimple_build_label (lab11);
10410 gimple_seq_add_stmt (body_p, g);
10411 g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10412 gimple_seq_add_stmt (body_p, g);
10413 g = gimple_build_label (lab12);
10414 gimple_seq_add_stmt (body_p, g);
10415
10416 g = omp_build_barrier (NULL);
10417 gimple_seq_add_stmt (body_p, g);
10418
10419 g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10420 lab3, lab2);
10421 gimple_seq_add_stmt (body_p, g);
10422
10423 g = gimple_build_label (lab2);
10424 gimple_seq_add_stmt (body_p, g);
10425
10426 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10427 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10428 lab3 = create_artificial_label (UNKNOWN_LOCATION);
10429 g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10430 gimple_seq_add_stmt (body_p, g);
10431 g = gimple_build_label (lab1);
10432 gimple_seq_add_stmt (body_p, g);
10433 gimple_seq_add_seq (body_p, thr02_list);
10434 g = gimple_build_goto (lab3);
10435 gimple_seq_add_stmt (body_p, g);
10436 g = gimple_build_label (lab2);
10437 gimple_seq_add_stmt (body_p, g);
10438 gimple_seq_add_seq (body_p, thrn2_list);
10439 g = gimple_build_label (lab3);
10440 gimple_seq_add_stmt (body_p, g);
10441
10442 g = gimple_build_assign (ivar, size_zero_node);
10443 gimple_seq_add_stmt (body_p, g);
10444 gimple_seq_add_stmt (body_p, new_stmt);
10445 gimple_seq_add_seq (body_p, new_body);
10446
10447 gimple_seq new_dlist = NULL;
10448 lab1 = create_artificial_label (UNKNOWN_LOCATION);
10449 lab2 = create_artificial_label (UNKNOWN_LOCATION);
10450 tree num_threadsm1 = create_tmp_var (integer_type_node);
10451 g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10452 integer_minus_one_node);
10453 gimple_seq_add_stmt (&new_dlist, g);
10454 g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10455 gimple_seq_add_stmt (&new_dlist, g);
10456 g = gimple_build_label (lab1);
10457 gimple_seq_add_stmt (&new_dlist, g);
10458 gimple_seq_add_seq (&new_dlist, last_list);
10459 g = gimple_build_label (lab2);
10460 gimple_seq_add_stmt (&new_dlist, g);
10461 gimple_seq_add_seq (&new_dlist, *dlist);
10462 *dlist = new_dlist;
10463 }
10464
10465 /* Lower code for an OMP loop directive. */
10466
10467 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)10468 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10469 {
10470 tree *rhs_p, block;
10471 struct omp_for_data fd, *fdp = NULL;
10472 gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10473 gbind *new_stmt;
10474 gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10475 gimple_seq cnt_list = NULL, clist = NULL;
10476 gimple_seq oacc_head = NULL, oacc_tail = NULL;
10477 size_t i;
10478
10479 push_gimplify_context ();
10480
10481 lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10482
10483 block = make_node (BLOCK);
10484 new_stmt = gimple_build_bind (NULL, NULL, block);
10485 /* Replace at gsi right away, so that 'stmt' is no member
10486 of a sequence anymore as we're going to add to a different
10487 one below. */
10488 gsi_replace (gsi_p, new_stmt, true);
10489
10490 /* Move declaration of temporaries in the loop body before we make
10491 it go away. */
10492 omp_for_body = gimple_omp_body (stmt);
10493 if (!gimple_seq_empty_p (omp_for_body)
10494 && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10495 {
10496 gbind *inner_bind
10497 = as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10498 tree vars = gimple_bind_vars (inner_bind);
10499 gimple_bind_append_vars (new_stmt, vars);
10500 /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10501 keep them on the inner_bind and it's block. */
10502 gimple_bind_set_vars (inner_bind, NULL_TREE);
10503 if (gimple_bind_block (inner_bind))
10504 BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10505 }
10506
10507 if (gimple_omp_for_combined_into_p (stmt))
10508 {
10509 omp_extract_for_data (stmt, &fd, NULL);
10510 fdp = &fd;
10511
10512 /* We need two temporaries with fd.loop.v type (istart/iend)
10513 and then (fd.collapse - 1) temporaries with the same
10514 type for count2 ... countN-1 vars if not constant. */
10515 size_t count = 2;
10516 tree type = fd.iter_type;
10517 if (fd.collapse > 1
10518 && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10519 count += fd.collapse - 1;
10520 bool taskreg_for
10521 = (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10522 || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10523 tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10524 tree simtc = NULL;
10525 tree clauses = *pc;
10526 if (taskreg_for)
10527 outerc
10528 = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10529 OMP_CLAUSE__LOOPTEMP_);
10530 if (ctx->simt_stmt)
10531 simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10532 OMP_CLAUSE__LOOPTEMP_);
10533 for (i = 0; i < count; i++)
10534 {
10535 tree temp;
10536 if (taskreg_for)
10537 {
10538 gcc_assert (outerc);
10539 temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10540 outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10541 OMP_CLAUSE__LOOPTEMP_);
10542 }
10543 else
10544 {
10545 /* If there are 2 adjacent SIMD stmts, one with _simt_
10546 clause, another without, make sure they have the same
10547 decls in _looptemp_ clauses, because the outer stmt
10548 they are combined into will look up just one inner_stmt. */
10549 if (ctx->simt_stmt)
10550 temp = OMP_CLAUSE_DECL (simtc);
10551 else
10552 temp = create_tmp_var (type);
10553 insert_decl_map (&ctx->outer->cb, temp, temp);
10554 }
10555 *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10556 OMP_CLAUSE_DECL (*pc) = temp;
10557 pc = &OMP_CLAUSE_CHAIN (*pc);
10558 if (ctx->simt_stmt)
10559 simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10560 OMP_CLAUSE__LOOPTEMP_);
10561 }
10562 *pc = clauses;
10563 }
10564
10565 /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR. */
10566 dlist = NULL;
10567 body = NULL;
10568 tree rclauses
10569 = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10570 OMP_CLAUSE_REDUCTION);
10571 tree rtmp = NULL_TREE;
10572 if (rclauses)
10573 {
10574 tree type = build_pointer_type (pointer_sized_int_node);
10575 tree temp = create_tmp_var (type);
10576 tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10577 OMP_CLAUSE_DECL (c) = temp;
10578 OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10579 gimple_omp_for_set_clauses (stmt, c);
10580 lower_omp_task_reductions (ctx, OMP_FOR,
10581 gimple_omp_for_clauses (stmt),
10582 &tred_ilist, &tred_dlist);
10583 rclauses = c;
10584 rtmp = make_ssa_name (type);
10585 gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10586 }
10587
10588 lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10589 ctx);
10590
10591 lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10592 fdp);
10593 gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10594 gimple_omp_for_pre_body (stmt));
10595
10596 lower_omp (gimple_omp_body_ptr (stmt), ctx);
10597
10598 /* Lower the header expressions. At this point, we can assume that
10599 the header is of the form:
10600
10601 #pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10602
10603 We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10604 using the .omp_data_s mapping, if needed. */
10605 for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10606 {
10607 rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10608 if (!is_gimple_min_invariant (*rhs_p))
10609 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10610 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10611 recompute_tree_invariant_for_addr_expr (*rhs_p);
10612
10613 rhs_p = gimple_omp_for_final_ptr (stmt, i);
10614 if (!is_gimple_min_invariant (*rhs_p))
10615 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10616 else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10617 recompute_tree_invariant_for_addr_expr (*rhs_p);
10618
10619 rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10620 if (!is_gimple_min_invariant (*rhs_p))
10621 *rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10622 }
10623 if (rclauses)
10624 gimple_seq_add_seq (&tred_ilist, cnt_list);
10625 else
10626 gimple_seq_add_seq (&body, cnt_list);
10627
10628 /* Once lowered, extract the bounds and clauses. */
10629 omp_extract_for_data (stmt, &fd, NULL);
10630
10631 if (is_gimple_omp_oacc (ctx->stmt)
10632 && !ctx_in_oacc_kernels_region (ctx))
10633 lower_oacc_head_tail (gimple_location (stmt),
10634 gimple_omp_for_clauses (stmt),
10635 &oacc_head, &oacc_tail, ctx);
10636
10637 /* Add OpenACC partitioning and reduction markers just before the loop. */
10638 if (oacc_head)
10639 gimple_seq_add_seq (&body, oacc_head);
10640
10641 lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10642
10643 if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10644 for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10645 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10646 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10647 {
10648 OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10649 if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10650 OMP_CLAUSE_LINEAR_STEP (c)
10651 = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10652 ctx);
10653 }
10654
10655 bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10656 && gimple_omp_for_grid_phony (stmt));
10657 if ((ctx->scan_inclusive || ctx->scan_exclusive)
10658 && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10659 {
10660 gcc_assert (!phony_loop);
10661 lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10662 }
10663 else
10664 {
10665 if (!phony_loop)
10666 gimple_seq_add_stmt (&body, stmt);
10667 gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10668 }
10669
10670 if (!phony_loop)
10671 gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10672 fd.loop.v));
10673
10674 /* After the loop, add exit clauses. */
10675 lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10676
10677 if (clist)
10678 {
10679 tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10680 gcall *g = gimple_build_call (fndecl, 0);
10681 gimple_seq_add_stmt (&body, g);
10682 gimple_seq_add_seq (&body, clist);
10683 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10684 g = gimple_build_call (fndecl, 0);
10685 gimple_seq_add_stmt (&body, g);
10686 }
10687
10688 if (ctx->cancellable)
10689 gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10690
10691 gimple_seq_add_seq (&body, dlist);
10692
10693 if (rclauses)
10694 {
10695 gimple_seq_add_seq (&tred_ilist, body);
10696 body = tred_ilist;
10697 }
10698
10699 body = maybe_catch_exception (body);
10700
10701 if (!phony_loop)
10702 {
10703 /* Region exit marker goes at the end of the loop body. */
10704 gimple *g = gimple_build_omp_return (fd.have_nowait);
10705 gimple_seq_add_stmt (&body, g);
10706
10707 gimple_seq_add_seq (&body, tred_dlist);
10708
10709 maybe_add_implicit_barrier_cancel (ctx, g, &body);
10710
10711 if (rclauses)
10712 OMP_CLAUSE_DECL (rclauses) = rtmp;
10713 }
10714
10715 /* Add OpenACC joining and reduction markers just after the loop. */
10716 if (oacc_tail)
10717 gimple_seq_add_seq (&body, oacc_tail);
10718
10719 pop_gimplify_context (new_stmt);
10720
10721 gimple_bind_append_vars (new_stmt, ctx->block_vars);
10722 maybe_remove_omp_member_access_dummy_vars (new_stmt);
10723 BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10724 if (BLOCK_VARS (block))
10725 TREE_USED (block) = 1;
10726
10727 gimple_bind_set_body (new_stmt, body);
10728 gimple_omp_set_body (stmt, NULL);
10729 gimple_omp_for_set_pre_body (stmt, NULL);
10730 }
10731
10732 /* Callback for walk_stmts. Check if the current statement only contains
10733 GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS. */
10734
10735 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10736 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10737 bool *handled_ops_p,
10738 struct walk_stmt_info *wi)
10739 {
10740 int *info = (int *) wi->info;
10741 gimple *stmt = gsi_stmt (*gsi_p);
10742
10743 *handled_ops_p = true;
10744 switch (gimple_code (stmt))
10745 {
10746 WALK_SUBSTMTS;
10747
10748 case GIMPLE_DEBUG:
10749 break;
10750 case GIMPLE_OMP_FOR:
10751 case GIMPLE_OMP_SECTIONS:
10752 *info = *info == 0 ? 1 : -1;
10753 break;
10754 default:
10755 *info = -1;
10756 break;
10757 }
10758 return NULL;
10759 }
10760
10761 struct omp_taskcopy_context
10762 {
10763 /* This field must be at the beginning, as we do "inheritance": Some
10764 callback functions for tree-inline.c (e.g., omp_copy_decl)
10765 receive a copy_body_data pointer that is up-casted to an
10766 omp_context pointer. */
10767 copy_body_data cb;
10768 omp_context *ctx;
10769 };
10770
10771 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)10772 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10773 {
10774 struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10775
10776 if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10777 return create_tmp_var (TREE_TYPE (var));
10778
10779 return var;
10780 }
10781
10782 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)10783 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10784 {
10785 tree name, new_fields = NULL, type, f;
10786
10787 type = lang_hooks.types.make_type (RECORD_TYPE);
10788 name = DECL_NAME (TYPE_NAME (orig_type));
10789 name = build_decl (gimple_location (tcctx->ctx->stmt),
10790 TYPE_DECL, name, type);
10791 TYPE_NAME (type) = name;
10792
10793 for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10794 {
10795 tree new_f = copy_node (f);
10796 DECL_CONTEXT (new_f) = type;
10797 TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10798 TREE_CHAIN (new_f) = new_fields;
10799 walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10800 walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10801 walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10802 &tcctx->cb, NULL);
10803 new_fields = new_f;
10804 tcctx->cb.decl_map->put (f, new_f);
10805 }
10806 TYPE_FIELDS (type) = nreverse (new_fields);
10807 layout_type (type);
10808 return type;
10809 }
10810
10811 /* Create task copyfn. */
10812
10813 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)10814 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10815 {
10816 struct function *child_cfun;
10817 tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10818 tree record_type, srecord_type, bind, list;
10819 bool record_needs_remap = false, srecord_needs_remap = false;
10820 splay_tree_node n;
10821 struct omp_taskcopy_context tcctx;
10822 location_t loc = gimple_location (task_stmt);
10823 size_t looptempno = 0;
10824
10825 child_fn = gimple_omp_task_copy_fn (task_stmt);
10826 child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10827 gcc_assert (child_cfun->cfg == NULL);
10828 DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10829
10830 /* Reset DECL_CONTEXT on function arguments. */
10831 for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10832 DECL_CONTEXT (t) = child_fn;
10833
10834 /* Populate the function. */
10835 push_gimplify_context ();
10836 push_cfun (child_cfun);
10837
10838 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10839 TREE_SIDE_EFFECTS (bind) = 1;
10840 list = NULL;
10841 DECL_SAVED_TREE (child_fn) = bind;
10842 DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10843
10844 /* Remap src and dst argument types if needed. */
10845 record_type = ctx->record_type;
10846 srecord_type = ctx->srecord_type;
10847 for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10848 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10849 {
10850 record_needs_remap = true;
10851 break;
10852 }
10853 for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10854 if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10855 {
10856 srecord_needs_remap = true;
10857 break;
10858 }
10859
10860 if (record_needs_remap || srecord_needs_remap)
10861 {
10862 memset (&tcctx, '\0', sizeof (tcctx));
10863 tcctx.cb.src_fn = ctx->cb.src_fn;
10864 tcctx.cb.dst_fn = child_fn;
10865 tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10866 gcc_checking_assert (tcctx.cb.src_node);
10867 tcctx.cb.dst_node = tcctx.cb.src_node;
10868 tcctx.cb.src_cfun = ctx->cb.src_cfun;
10869 tcctx.cb.copy_decl = task_copyfn_copy_decl;
10870 tcctx.cb.eh_lp_nr = 0;
10871 tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10872 tcctx.cb.decl_map = new hash_map<tree, tree>;
10873 tcctx.ctx = ctx;
10874
10875 if (record_needs_remap)
10876 record_type = task_copyfn_remap_type (&tcctx, record_type);
10877 if (srecord_needs_remap)
10878 srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10879 }
10880 else
10881 tcctx.cb.decl_map = NULL;
10882
10883 arg = DECL_ARGUMENTS (child_fn);
10884 TREE_TYPE (arg) = build_pointer_type (record_type);
10885 sarg = DECL_CHAIN (arg);
10886 TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10887
10888 /* First pass: initialize temporaries used in record_type and srecord_type
10889 sizes and field offsets. */
10890 if (tcctx.cb.decl_map)
10891 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10892 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10893 {
10894 tree *p;
10895
10896 decl = OMP_CLAUSE_DECL (c);
10897 p = tcctx.cb.decl_map->get (decl);
10898 if (p == NULL)
10899 continue;
10900 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10901 sf = (tree) n->value;
10902 sf = *tcctx.cb.decl_map->get (sf);
10903 src = build_simple_mem_ref_loc (loc, sarg);
10904 src = omp_build_component_ref (src, sf);
10905 t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10906 append_to_statement_list (t, &list);
10907 }
10908
10909 /* Second pass: copy shared var pointers and copy construct non-VLA
10910 firstprivate vars. */
10911 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10912 switch (OMP_CLAUSE_CODE (c))
10913 {
10914 splay_tree_key key;
10915 case OMP_CLAUSE_SHARED:
10916 decl = OMP_CLAUSE_DECL (c);
10917 key = (splay_tree_key) decl;
10918 if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10919 key = (splay_tree_key) &DECL_UID (decl);
10920 n = splay_tree_lookup (ctx->field_map, key);
10921 if (n == NULL)
10922 break;
10923 f = (tree) n->value;
10924 if (tcctx.cb.decl_map)
10925 f = *tcctx.cb.decl_map->get (f);
10926 n = splay_tree_lookup (ctx->sfield_map, key);
10927 sf = (tree) n->value;
10928 if (tcctx.cb.decl_map)
10929 sf = *tcctx.cb.decl_map->get (sf);
10930 src = build_simple_mem_ref_loc (loc, sarg);
10931 src = omp_build_component_ref (src, sf);
10932 dst = build_simple_mem_ref_loc (loc, arg);
10933 dst = omp_build_component_ref (dst, f);
10934 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10935 append_to_statement_list (t, &list);
10936 break;
10937 case OMP_CLAUSE_REDUCTION:
10938 case OMP_CLAUSE_IN_REDUCTION:
10939 decl = OMP_CLAUSE_DECL (c);
10940 if (TREE_CODE (decl) == MEM_REF)
10941 {
10942 decl = TREE_OPERAND (decl, 0);
10943 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
10944 decl = TREE_OPERAND (decl, 0);
10945 if (TREE_CODE (decl) == INDIRECT_REF
10946 || TREE_CODE (decl) == ADDR_EXPR)
10947 decl = TREE_OPERAND (decl, 0);
10948 }
10949 key = (splay_tree_key) decl;
10950 n = splay_tree_lookup (ctx->field_map, key);
10951 if (n == NULL)
10952 break;
10953 f = (tree) n->value;
10954 if (tcctx.cb.decl_map)
10955 f = *tcctx.cb.decl_map->get (f);
10956 n = splay_tree_lookup (ctx->sfield_map, key);
10957 sf = (tree) n->value;
10958 if (tcctx.cb.decl_map)
10959 sf = *tcctx.cb.decl_map->get (sf);
10960 src = build_simple_mem_ref_loc (loc, sarg);
10961 src = omp_build_component_ref (src, sf);
10962 if (decl != OMP_CLAUSE_DECL (c)
10963 && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10964 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10965 src = build_simple_mem_ref_loc (loc, src);
10966 dst = build_simple_mem_ref_loc (loc, arg);
10967 dst = omp_build_component_ref (dst, f);
10968 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
10969 append_to_statement_list (t, &list);
10970 break;
10971 case OMP_CLAUSE__LOOPTEMP_:
10972 /* Fields for first two _looptemp_ clauses are initialized by
10973 GOMP_taskloop*, the rest are handled like firstprivate. */
10974 if (looptempno < 2)
10975 {
10976 looptempno++;
10977 break;
10978 }
10979 /* FALLTHRU */
10980 case OMP_CLAUSE__REDUCTEMP_:
10981 case OMP_CLAUSE_FIRSTPRIVATE:
10982 decl = OMP_CLAUSE_DECL (c);
10983 if (is_variable_sized (decl))
10984 break;
10985 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
10986 if (n == NULL)
10987 break;
10988 f = (tree) n->value;
10989 if (tcctx.cb.decl_map)
10990 f = *tcctx.cb.decl_map->get (f);
10991 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10992 if (n != NULL)
10993 {
10994 sf = (tree) n->value;
10995 if (tcctx.cb.decl_map)
10996 sf = *tcctx.cb.decl_map->get (sf);
10997 src = build_simple_mem_ref_loc (loc, sarg);
10998 src = omp_build_component_ref (src, sf);
10999 if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11000 src = build_simple_mem_ref_loc (loc, src);
11001 }
11002 else
11003 src = decl;
11004 dst = build_simple_mem_ref_loc (loc, arg);
11005 dst = omp_build_component_ref (dst, f);
11006 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11007 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11008 else
11009 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11010 append_to_statement_list (t, &list);
11011 break;
11012 case OMP_CLAUSE_PRIVATE:
11013 if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11014 break;
11015 decl = OMP_CLAUSE_DECL (c);
11016 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11017 f = (tree) n->value;
11018 if (tcctx.cb.decl_map)
11019 f = *tcctx.cb.decl_map->get (f);
11020 n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11021 if (n != NULL)
11022 {
11023 sf = (tree) n->value;
11024 if (tcctx.cb.decl_map)
11025 sf = *tcctx.cb.decl_map->get (sf);
11026 src = build_simple_mem_ref_loc (loc, sarg);
11027 src = omp_build_component_ref (src, sf);
11028 if (use_pointer_for_field (decl, NULL))
11029 src = build_simple_mem_ref_loc (loc, src);
11030 }
11031 else
11032 src = decl;
11033 dst = build_simple_mem_ref_loc (loc, arg);
11034 dst = omp_build_component_ref (dst, f);
11035 t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11036 append_to_statement_list (t, &list);
11037 break;
11038 default:
11039 break;
11040 }
11041
11042 /* Last pass: handle VLA firstprivates. */
11043 if (tcctx.cb.decl_map)
11044 for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11045 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11046 {
11047 tree ind, ptr, df;
11048
11049 decl = OMP_CLAUSE_DECL (c);
11050 if (!is_variable_sized (decl))
11051 continue;
11052 n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11053 if (n == NULL)
11054 continue;
11055 f = (tree) n->value;
11056 f = *tcctx.cb.decl_map->get (f);
11057 gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11058 ind = DECL_VALUE_EXPR (decl);
11059 gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11060 gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11061 n = splay_tree_lookup (ctx->sfield_map,
11062 (splay_tree_key) TREE_OPERAND (ind, 0));
11063 sf = (tree) n->value;
11064 sf = *tcctx.cb.decl_map->get (sf);
11065 src = build_simple_mem_ref_loc (loc, sarg);
11066 src = omp_build_component_ref (src, sf);
11067 src = build_simple_mem_ref_loc (loc, src);
11068 dst = build_simple_mem_ref_loc (loc, arg);
11069 dst = omp_build_component_ref (dst, f);
11070 t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11071 append_to_statement_list (t, &list);
11072 n = splay_tree_lookup (ctx->field_map,
11073 (splay_tree_key) TREE_OPERAND (ind, 0));
11074 df = (tree) n->value;
11075 df = *tcctx.cb.decl_map->get (df);
11076 ptr = build_simple_mem_ref_loc (loc, arg);
11077 ptr = omp_build_component_ref (ptr, df);
11078 t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11079 build_fold_addr_expr_loc (loc, dst));
11080 append_to_statement_list (t, &list);
11081 }
11082
11083 t = build1 (RETURN_EXPR, void_type_node, NULL);
11084 append_to_statement_list (t, &list);
11085
11086 if (tcctx.cb.decl_map)
11087 delete tcctx.cb.decl_map;
11088 pop_gimplify_context (NULL);
11089 BIND_EXPR_BODY (bind) = list;
11090 pop_cfun ();
11091 }
11092
11093 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)11094 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11095 {
11096 tree c, clauses;
11097 gimple *g;
11098 size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11099
11100 clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11101 gcc_assert (clauses);
11102 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11103 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11104 switch (OMP_CLAUSE_DEPEND_KIND (c))
11105 {
11106 case OMP_CLAUSE_DEPEND_LAST:
11107 /* Lowering already done at gimplification. */
11108 return;
11109 case OMP_CLAUSE_DEPEND_IN:
11110 cnt[2]++;
11111 break;
11112 case OMP_CLAUSE_DEPEND_OUT:
11113 case OMP_CLAUSE_DEPEND_INOUT:
11114 cnt[0]++;
11115 break;
11116 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11117 cnt[1]++;
11118 break;
11119 case OMP_CLAUSE_DEPEND_DEPOBJ:
11120 cnt[3]++;
11121 break;
11122 case OMP_CLAUSE_DEPEND_SOURCE:
11123 case OMP_CLAUSE_DEPEND_SINK:
11124 /* FALLTHRU */
11125 default:
11126 gcc_unreachable ();
11127 }
11128 if (cnt[1] || cnt[3])
11129 idx = 5;
11130 size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11131 tree type = build_array_type_nelts (ptr_type_node, total + idx);
11132 tree array = create_tmp_var (type);
11133 TREE_ADDRESSABLE (array) = 1;
11134 tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11135 NULL_TREE);
11136 if (idx == 5)
11137 {
11138 g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11139 gimple_seq_add_stmt (iseq, g);
11140 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11141 NULL_TREE);
11142 }
11143 g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11144 gimple_seq_add_stmt (iseq, g);
11145 for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11146 {
11147 r = build4 (ARRAY_REF, ptr_type_node, array,
11148 size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11149 g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11150 gimple_seq_add_stmt (iseq, g);
11151 }
11152 for (i = 0; i < 4; i++)
11153 {
11154 if (cnt[i] == 0)
11155 continue;
11156 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11157 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11158 continue;
11159 else
11160 {
11161 switch (OMP_CLAUSE_DEPEND_KIND (c))
11162 {
11163 case OMP_CLAUSE_DEPEND_IN:
11164 if (i != 2)
11165 continue;
11166 break;
11167 case OMP_CLAUSE_DEPEND_OUT:
11168 case OMP_CLAUSE_DEPEND_INOUT:
11169 if (i != 0)
11170 continue;
11171 break;
11172 case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11173 if (i != 1)
11174 continue;
11175 break;
11176 case OMP_CLAUSE_DEPEND_DEPOBJ:
11177 if (i != 3)
11178 continue;
11179 break;
11180 default:
11181 gcc_unreachable ();
11182 }
11183 tree t = OMP_CLAUSE_DECL (c);
11184 t = fold_convert (ptr_type_node, t);
11185 gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11186 r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11187 NULL_TREE, NULL_TREE);
11188 g = gimple_build_assign (r, t);
11189 gimple_seq_add_stmt (iseq, g);
11190 }
11191 }
11192 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11193 OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11194 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11195 OMP_CLAUSE_CHAIN (c) = *pclauses;
11196 *pclauses = c;
11197 tree clobber = build_clobber (type);
11198 g = gimple_build_assign (array, clobber);
11199 gimple_seq_add_stmt (oseq, g);
11200 }
11201
11202 /* Lower the OpenMP parallel or task directive in the current statement
11203 in GSI_P. CTX holds context information for the directive. */
11204
11205 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)11206 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11207 {
11208 tree clauses;
11209 tree child_fn, t;
11210 gimple *stmt = gsi_stmt (*gsi_p);
11211 gbind *par_bind, *bind, *dep_bind = NULL;
11212 gimple_seq par_body;
11213 location_t loc = gimple_location (stmt);
11214
11215 clauses = gimple_omp_taskreg_clauses (stmt);
11216 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11217 && gimple_omp_task_taskwait_p (stmt))
11218 {
11219 par_bind = NULL;
11220 par_body = NULL;
11221 }
11222 else
11223 {
11224 par_bind
11225 = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11226 par_body = gimple_bind_body (par_bind);
11227 }
11228 child_fn = ctx->cb.dst_fn;
11229 if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11230 && !gimple_omp_parallel_combined_p (stmt))
11231 {
11232 struct walk_stmt_info wi;
11233 int ws_num = 0;
11234
11235 memset (&wi, 0, sizeof (wi));
11236 wi.info = &ws_num;
11237 wi.val_only = true;
11238 walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11239 if (ws_num == 1)
11240 gimple_omp_parallel_set_combined_p (stmt, true);
11241 }
11242 gimple_seq dep_ilist = NULL;
11243 gimple_seq dep_olist = NULL;
11244 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11245 && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11246 {
11247 push_gimplify_context ();
11248 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11249 lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11250 &dep_ilist, &dep_olist);
11251 }
11252
11253 if (gimple_code (stmt) == GIMPLE_OMP_TASK
11254 && gimple_omp_task_taskwait_p (stmt))
11255 {
11256 if (dep_bind)
11257 {
11258 gsi_replace (gsi_p, dep_bind, true);
11259 gimple_bind_add_seq (dep_bind, dep_ilist);
11260 gimple_bind_add_stmt (dep_bind, stmt);
11261 gimple_bind_add_seq (dep_bind, dep_olist);
11262 pop_gimplify_context (dep_bind);
11263 }
11264 return;
11265 }
11266
11267 if (ctx->srecord_type)
11268 create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11269
11270 gimple_seq tskred_ilist = NULL;
11271 gimple_seq tskred_olist = NULL;
11272 if ((is_task_ctx (ctx)
11273 && gimple_omp_task_taskloop_p (ctx->stmt)
11274 && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11275 OMP_CLAUSE_REDUCTION))
11276 || (is_parallel_ctx (ctx)
11277 && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11278 OMP_CLAUSE__REDUCTEMP_)))
11279 {
11280 if (dep_bind == NULL)
11281 {
11282 push_gimplify_context ();
11283 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11284 }
11285 lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11286 : OMP_PARALLEL,
11287 gimple_omp_taskreg_clauses (ctx->stmt),
11288 &tskred_ilist, &tskred_olist);
11289 }
11290
11291 push_gimplify_context ();
11292
11293 gimple_seq par_olist = NULL;
11294 gimple_seq par_ilist = NULL;
11295 gimple_seq par_rlist = NULL;
11296 bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11297 && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11298 if (phony_construct && ctx->record_type)
11299 {
11300 gcc_checking_assert (!ctx->receiver_decl);
11301 ctx->receiver_decl = create_tmp_var
11302 (build_reference_type (ctx->record_type), ".omp_rec");
11303 }
11304 lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11305 lower_omp (&par_body, ctx);
11306 if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11307 lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11308
11309 /* Declare all the variables created by mapping and the variables
11310 declared in the scope of the parallel body. */
11311 record_vars_into (ctx->block_vars, child_fn);
11312 maybe_remove_omp_member_access_dummy_vars (par_bind);
11313 record_vars_into (gimple_bind_vars (par_bind), child_fn);
11314
11315 if (ctx->record_type)
11316 {
11317 ctx->sender_decl
11318 = create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11319 : ctx->record_type, ".omp_data_o");
11320 DECL_NAMELESS (ctx->sender_decl) = 1;
11321 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11322 gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11323 }
11324
11325 gimple_seq olist = NULL;
11326 gimple_seq ilist = NULL;
11327 lower_send_clauses (clauses, &ilist, &olist, ctx);
11328 lower_send_shared_vars (&ilist, &olist, ctx);
11329
11330 if (ctx->record_type)
11331 {
11332 tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11333 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11334 clobber));
11335 }
11336
11337 /* Once all the expansions are done, sequence all the different
11338 fragments inside gimple_omp_body. */
11339
11340 gimple_seq new_body = NULL;
11341
11342 if (ctx->record_type)
11343 {
11344 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11345 /* fixup_child_record_type might have changed receiver_decl's type. */
11346 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11347 gimple_seq_add_stmt (&new_body,
11348 gimple_build_assign (ctx->receiver_decl, t));
11349 }
11350
11351 gimple_seq_add_seq (&new_body, par_ilist);
11352 gimple_seq_add_seq (&new_body, par_body);
11353 gimple_seq_add_seq (&new_body, par_rlist);
11354 if (ctx->cancellable)
11355 gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11356 gimple_seq_add_seq (&new_body, par_olist);
11357 new_body = maybe_catch_exception (new_body);
11358 if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11359 gimple_seq_add_stmt (&new_body,
11360 gimple_build_omp_continue (integer_zero_node,
11361 integer_zero_node));
11362 if (!phony_construct)
11363 {
11364 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11365 gimple_omp_set_body (stmt, new_body);
11366 }
11367
11368 if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11369 bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11370 else
11371 bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11372 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11373 gimple_bind_add_seq (bind, ilist);
11374 if (!phony_construct)
11375 gimple_bind_add_stmt (bind, stmt);
11376 else
11377 gimple_bind_add_seq (bind, new_body);
11378 gimple_bind_add_seq (bind, olist);
11379
11380 pop_gimplify_context (NULL);
11381
11382 if (dep_bind)
11383 {
11384 gimple_bind_add_seq (dep_bind, dep_ilist);
11385 gimple_bind_add_seq (dep_bind, tskred_ilist);
11386 gimple_bind_add_stmt (dep_bind, bind);
11387 gimple_bind_add_seq (dep_bind, tskred_olist);
11388 gimple_bind_add_seq (dep_bind, dep_olist);
11389 pop_gimplify_context (dep_bind);
11390 }
11391 }
11392
11393 /* Lower the GIMPLE_OMP_TARGET in the current statement
11394 in GSI_P. CTX holds context information for the directive. */
11395
11396 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)11397 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11398 {
11399 tree clauses;
11400 tree child_fn, t, c;
11401 gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11402 gbind *tgt_bind, *bind, *dep_bind = NULL;
11403 gimple_seq tgt_body, olist, ilist, fplist, new_body;
11404 location_t loc = gimple_location (stmt);
11405 bool offloaded, data_region;
11406 unsigned int map_cnt = 0;
11407
11408 offloaded = is_gimple_omp_offloaded (stmt);
11409 switch (gimple_omp_target_kind (stmt))
11410 {
11411 case GF_OMP_TARGET_KIND_REGION:
11412 case GF_OMP_TARGET_KIND_UPDATE:
11413 case GF_OMP_TARGET_KIND_ENTER_DATA:
11414 case GF_OMP_TARGET_KIND_EXIT_DATA:
11415 case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11416 case GF_OMP_TARGET_KIND_OACC_KERNELS:
11417 case GF_OMP_TARGET_KIND_OACC_SERIAL:
11418 case GF_OMP_TARGET_KIND_OACC_UPDATE:
11419 case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11420 case GF_OMP_TARGET_KIND_OACC_DECLARE:
11421 data_region = false;
11422 break;
11423 case GF_OMP_TARGET_KIND_DATA:
11424 case GF_OMP_TARGET_KIND_OACC_DATA:
11425 case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11426 data_region = true;
11427 break;
11428 default:
11429 gcc_unreachable ();
11430 }
11431
11432 clauses = gimple_omp_target_clauses (stmt);
11433
11434 gimple_seq dep_ilist = NULL;
11435 gimple_seq dep_olist = NULL;
11436 if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11437 {
11438 push_gimplify_context ();
11439 dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11440 lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11441 &dep_ilist, &dep_olist);
11442 }
11443
11444 tgt_bind = NULL;
11445 tgt_body = NULL;
11446 if (offloaded)
11447 {
11448 tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11449 tgt_body = gimple_bind_body (tgt_bind);
11450 }
11451 else if (data_region)
11452 tgt_body = gimple_omp_body (stmt);
11453 child_fn = ctx->cb.dst_fn;
11454
11455 push_gimplify_context ();
11456 fplist = NULL;
11457
11458 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11459 switch (OMP_CLAUSE_CODE (c))
11460 {
11461 tree var, x;
11462
11463 default:
11464 break;
11465 case OMP_CLAUSE_MAP:
11466 #if CHECKING_P
11467 /* First check what we're prepared to handle in the following. */
11468 switch (OMP_CLAUSE_MAP_KIND (c))
11469 {
11470 case GOMP_MAP_ALLOC:
11471 case GOMP_MAP_TO:
11472 case GOMP_MAP_FROM:
11473 case GOMP_MAP_TOFROM:
11474 case GOMP_MAP_POINTER:
11475 case GOMP_MAP_TO_PSET:
11476 case GOMP_MAP_DELETE:
11477 case GOMP_MAP_RELEASE:
11478 case GOMP_MAP_ALWAYS_TO:
11479 case GOMP_MAP_ALWAYS_FROM:
11480 case GOMP_MAP_ALWAYS_TOFROM:
11481 case GOMP_MAP_FIRSTPRIVATE_POINTER:
11482 case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11483 case GOMP_MAP_STRUCT:
11484 case GOMP_MAP_ALWAYS_POINTER:
11485 break;
11486 case GOMP_MAP_IF_PRESENT:
11487 case GOMP_MAP_FORCE_ALLOC:
11488 case GOMP_MAP_FORCE_TO:
11489 case GOMP_MAP_FORCE_FROM:
11490 case GOMP_MAP_FORCE_TOFROM:
11491 case GOMP_MAP_FORCE_PRESENT:
11492 case GOMP_MAP_FORCE_DEVICEPTR:
11493 case GOMP_MAP_DEVICE_RESIDENT:
11494 case GOMP_MAP_LINK:
11495 case GOMP_MAP_ATTACH:
11496 case GOMP_MAP_DETACH:
11497 case GOMP_MAP_FORCE_DETACH:
11498 gcc_assert (is_gimple_omp_oacc (stmt));
11499 break;
11500 default:
11501 gcc_unreachable ();
11502 }
11503 #endif
11504 /* FALLTHRU */
11505 case OMP_CLAUSE_TO:
11506 case OMP_CLAUSE_FROM:
11507 oacc_firstprivate:
11508 var = OMP_CLAUSE_DECL (c);
11509 if (!DECL_P (var))
11510 {
11511 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11512 || (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11513 && (OMP_CLAUSE_MAP_KIND (c)
11514 != GOMP_MAP_FIRSTPRIVATE_POINTER)))
11515 map_cnt++;
11516 continue;
11517 }
11518
11519 if (DECL_SIZE (var)
11520 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11521 {
11522 tree var2 = DECL_VALUE_EXPR (var);
11523 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11524 var2 = TREE_OPERAND (var2, 0);
11525 gcc_assert (DECL_P (var2));
11526 var = var2;
11527 }
11528
11529 if (offloaded
11530 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11531 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11532 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11533 {
11534 if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11535 {
11536 if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11537 && varpool_node::get_create (var)->offloadable)
11538 continue;
11539
11540 tree type = build_pointer_type (TREE_TYPE (var));
11541 tree new_var = lookup_decl (var, ctx);
11542 x = create_tmp_var_raw (type, get_name (new_var));
11543 gimple_add_tmp_var (x);
11544 x = build_simple_mem_ref (x);
11545 SET_DECL_VALUE_EXPR (new_var, x);
11546 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11547 }
11548 continue;
11549 }
11550
11551 if (!maybe_lookup_field (var, ctx))
11552 continue;
11553
11554 /* Don't remap compute constructs' reduction variables, because the
11555 intermediate result must be local to each gang. */
11556 if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11557 && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11558 {
11559 x = build_receiver_ref (var, true, ctx);
11560 tree new_var = lookup_decl (var, ctx);
11561
11562 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11563 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11564 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11565 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11566 x = build_simple_mem_ref (x);
11567 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11568 {
11569 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11570 if (omp_is_reference (new_var)
11571 && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11572 || DECL_BY_REFERENCE (var)))
11573 {
11574 /* Create a local object to hold the instance
11575 value. */
11576 tree type = TREE_TYPE (TREE_TYPE (new_var));
11577 const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11578 tree inst = create_tmp_var (type, id);
11579 gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11580 x = build_fold_addr_expr (inst);
11581 }
11582 gimplify_assign (new_var, x, &fplist);
11583 }
11584 else if (DECL_P (new_var))
11585 {
11586 SET_DECL_VALUE_EXPR (new_var, x);
11587 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11588 }
11589 else
11590 gcc_unreachable ();
11591 }
11592 map_cnt++;
11593 break;
11594
11595 case OMP_CLAUSE_FIRSTPRIVATE:
11596 gcc_checking_assert (offloaded);
11597 if (is_gimple_omp_oacc (ctx->stmt))
11598 {
11599 /* No 'firstprivate' clauses on OpenACC 'kernels'. */
11600 gcc_checking_assert (!is_oacc_kernels (ctx));
11601
11602 goto oacc_firstprivate;
11603 }
11604 map_cnt++;
11605 var = OMP_CLAUSE_DECL (c);
11606 if (!omp_is_reference (var)
11607 && !is_gimple_reg_type (TREE_TYPE (var)))
11608 {
11609 tree new_var = lookup_decl (var, ctx);
11610 if (is_variable_sized (var))
11611 {
11612 tree pvar = DECL_VALUE_EXPR (var);
11613 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11614 pvar = TREE_OPERAND (pvar, 0);
11615 gcc_assert (DECL_P (pvar));
11616 tree new_pvar = lookup_decl (pvar, ctx);
11617 x = build_fold_indirect_ref (new_pvar);
11618 TREE_THIS_NOTRAP (x) = 1;
11619 }
11620 else
11621 x = build_receiver_ref (var, true, ctx);
11622 SET_DECL_VALUE_EXPR (new_var, x);
11623 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11624 }
11625 break;
11626
11627 case OMP_CLAUSE_PRIVATE:
11628 gcc_checking_assert (offloaded);
11629 if (is_gimple_omp_oacc (ctx->stmt))
11630 {
11631 /* No 'private' clauses on OpenACC 'kernels'. */
11632 gcc_checking_assert (!is_oacc_kernels (ctx));
11633
11634 break;
11635 }
11636 var = OMP_CLAUSE_DECL (c);
11637 if (is_variable_sized (var))
11638 {
11639 tree new_var = lookup_decl (var, ctx);
11640 tree pvar = DECL_VALUE_EXPR (var);
11641 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11642 pvar = TREE_OPERAND (pvar, 0);
11643 gcc_assert (DECL_P (pvar));
11644 tree new_pvar = lookup_decl (pvar, ctx);
11645 x = build_fold_indirect_ref (new_pvar);
11646 TREE_THIS_NOTRAP (x) = 1;
11647 SET_DECL_VALUE_EXPR (new_var, x);
11648 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11649 }
11650 break;
11651
11652 case OMP_CLAUSE_USE_DEVICE_PTR:
11653 case OMP_CLAUSE_USE_DEVICE_ADDR:
11654 case OMP_CLAUSE_IS_DEVICE_PTR:
11655 var = OMP_CLAUSE_DECL (c);
11656 map_cnt++;
11657 if (is_variable_sized (var))
11658 {
11659 tree new_var = lookup_decl (var, ctx);
11660 tree pvar = DECL_VALUE_EXPR (var);
11661 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11662 pvar = TREE_OPERAND (pvar, 0);
11663 gcc_assert (DECL_P (pvar));
11664 tree new_pvar = lookup_decl (pvar, ctx);
11665 x = build_fold_indirect_ref (new_pvar);
11666 TREE_THIS_NOTRAP (x) = 1;
11667 SET_DECL_VALUE_EXPR (new_var, x);
11668 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11669 }
11670 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11671 && !omp_is_reference (var)
11672 && !omp_is_allocatable_or_ptr (var)
11673 && !lang_hooks.decls.omp_array_data (var, true))
11674 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11675 {
11676 tree new_var = lookup_decl (var, ctx);
11677 tree type = build_pointer_type (TREE_TYPE (var));
11678 x = create_tmp_var_raw (type, get_name (new_var));
11679 gimple_add_tmp_var (x);
11680 x = build_simple_mem_ref (x);
11681 SET_DECL_VALUE_EXPR (new_var, x);
11682 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11683 }
11684 else
11685 {
11686 tree new_var = lookup_decl (var, ctx);
11687 x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11688 gimple_add_tmp_var (x);
11689 SET_DECL_VALUE_EXPR (new_var, x);
11690 DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11691 }
11692 break;
11693 }
11694
11695 if (offloaded)
11696 {
11697 target_nesting_level++;
11698 lower_omp (&tgt_body, ctx);
11699 target_nesting_level--;
11700 }
11701 else if (data_region)
11702 lower_omp (&tgt_body, ctx);
11703
11704 if (offloaded)
11705 {
11706 /* Declare all the variables created by mapping and the variables
11707 declared in the scope of the target body. */
11708 record_vars_into (ctx->block_vars, child_fn);
11709 maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11710 record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11711 }
11712
11713 olist = NULL;
11714 ilist = NULL;
11715 if (ctx->record_type)
11716 {
11717 ctx->sender_decl
11718 = create_tmp_var (ctx->record_type, ".omp_data_arr");
11719 DECL_NAMELESS (ctx->sender_decl) = 1;
11720 TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11721 t = make_tree_vec (3);
11722 TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11723 TREE_VEC_ELT (t, 1)
11724 = create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11725 ".omp_data_sizes");
11726 DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11727 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11728 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11729 tree tkind_type = short_unsigned_type_node;
11730 int talign_shift = 8;
11731 TREE_VEC_ELT (t, 2)
11732 = create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11733 ".omp_data_kinds");
11734 DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11735 TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11736 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11737 gimple_omp_target_set_data_arg (stmt, t);
11738
11739 vec<constructor_elt, va_gc> *vsize;
11740 vec<constructor_elt, va_gc> *vkind;
11741 vec_alloc (vsize, map_cnt);
11742 vec_alloc (vkind, map_cnt);
11743 unsigned int map_idx = 0;
11744
11745 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11746 switch (OMP_CLAUSE_CODE (c))
11747 {
11748 tree ovar, nc, s, purpose, var, x, type;
11749 unsigned int talign;
11750
11751 default:
11752 break;
11753
11754 case OMP_CLAUSE_MAP:
11755 case OMP_CLAUSE_TO:
11756 case OMP_CLAUSE_FROM:
11757 oacc_firstprivate_map:
11758 nc = c;
11759 ovar = OMP_CLAUSE_DECL (c);
11760 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11761 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11762 || (OMP_CLAUSE_MAP_KIND (c)
11763 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11764 break;
11765 if (!DECL_P (ovar))
11766 {
11767 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11768 && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11769 {
11770 gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11771 == get_base_address (ovar));
11772 nc = OMP_CLAUSE_CHAIN (c);
11773 ovar = OMP_CLAUSE_DECL (nc);
11774 }
11775 else
11776 {
11777 tree x = build_sender_ref (ovar, ctx);
11778 tree v
11779 = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11780 gimplify_assign (x, v, &ilist);
11781 nc = NULL_TREE;
11782 }
11783 }
11784 else
11785 {
11786 if (DECL_SIZE (ovar)
11787 && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11788 {
11789 tree ovar2 = DECL_VALUE_EXPR (ovar);
11790 gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11791 ovar2 = TREE_OPERAND (ovar2, 0);
11792 gcc_assert (DECL_P (ovar2));
11793 ovar = ovar2;
11794 }
11795 if (!maybe_lookup_field (ovar, ctx))
11796 continue;
11797 }
11798
11799 talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11800 if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11801 talign = DECL_ALIGN_UNIT (ovar);
11802 if (nc)
11803 {
11804 var = lookup_decl_in_outer_ctx (ovar, ctx);
11805 x = build_sender_ref (ovar, ctx);
11806
11807 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11808 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11809 && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11810 && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11811 {
11812 gcc_assert (offloaded);
11813 tree avar
11814 = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11815 mark_addressable (avar);
11816 gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11817 talign = DECL_ALIGN_UNIT (avar);
11818 avar = build_fold_addr_expr (avar);
11819 gimplify_assign (x, avar, &ilist);
11820 }
11821 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11822 {
11823 gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11824 if (!omp_is_reference (var))
11825 {
11826 if (is_gimple_reg (var)
11827 && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11828 TREE_NO_WARNING (var) = 1;
11829 var = build_fold_addr_expr (var);
11830 }
11831 else
11832 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11833 gimplify_assign (x, var, &ilist);
11834 }
11835 else if (is_gimple_reg (var))
11836 {
11837 gcc_assert (offloaded);
11838 tree avar = create_tmp_var (TREE_TYPE (var));
11839 mark_addressable (avar);
11840 enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11841 if (GOMP_MAP_COPY_TO_P (map_kind)
11842 || map_kind == GOMP_MAP_POINTER
11843 || map_kind == GOMP_MAP_TO_PSET
11844 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11845 {
11846 /* If we need to initialize a temporary
11847 with VAR because it is not addressable, and
11848 the variable hasn't been initialized yet, then
11849 we'll get a warning for the store to avar.
11850 Don't warn in that case, the mapping might
11851 be implicit. */
11852 TREE_NO_WARNING (var) = 1;
11853 gimplify_assign (avar, var, &ilist);
11854 }
11855 avar = build_fold_addr_expr (avar);
11856 gimplify_assign (x, avar, &ilist);
11857 if ((GOMP_MAP_COPY_FROM_P (map_kind)
11858 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11859 && !TYPE_READONLY (TREE_TYPE (var)))
11860 {
11861 x = unshare_expr (x);
11862 x = build_simple_mem_ref (x);
11863 gimplify_assign (var, x, &olist);
11864 }
11865 }
11866 else
11867 {
11868 /* While MAP is handled explicitly by the FE,
11869 for 'target update', only the identified is passed. */
11870 if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11871 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11872 && (omp_is_allocatable_or_ptr (var)
11873 && omp_check_optional_argument (var, false)))
11874 var = build_fold_indirect_ref (var);
11875 else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11876 && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11877 || (!omp_is_allocatable_or_ptr (var)
11878 && !omp_check_optional_argument (var, false)))
11879 var = build_fold_addr_expr (var);
11880 gimplify_assign (x, var, &ilist);
11881 }
11882 }
11883 s = NULL_TREE;
11884 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11885 {
11886 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11887 s = TREE_TYPE (ovar);
11888 if (TREE_CODE (s) == REFERENCE_TYPE
11889 || omp_check_optional_argument (ovar, false))
11890 s = TREE_TYPE (s);
11891 s = TYPE_SIZE_UNIT (s);
11892 }
11893 else
11894 s = OMP_CLAUSE_SIZE (c);
11895 if (s == NULL_TREE)
11896 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11897 s = fold_convert (size_type_node, s);
11898 purpose = size_int (map_idx++);
11899 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11900 if (TREE_CODE (s) != INTEGER_CST)
11901 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11902
11903 unsigned HOST_WIDE_INT tkind, tkind_zero;
11904 switch (OMP_CLAUSE_CODE (c))
11905 {
11906 case OMP_CLAUSE_MAP:
11907 tkind = OMP_CLAUSE_MAP_KIND (c);
11908 tkind_zero = tkind;
11909 if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11910 switch (tkind)
11911 {
11912 case GOMP_MAP_ALLOC:
11913 case GOMP_MAP_IF_PRESENT:
11914 case GOMP_MAP_TO:
11915 case GOMP_MAP_FROM:
11916 case GOMP_MAP_TOFROM:
11917 case GOMP_MAP_ALWAYS_TO:
11918 case GOMP_MAP_ALWAYS_FROM:
11919 case GOMP_MAP_ALWAYS_TOFROM:
11920 case GOMP_MAP_RELEASE:
11921 case GOMP_MAP_FORCE_TO:
11922 case GOMP_MAP_FORCE_FROM:
11923 case GOMP_MAP_FORCE_TOFROM:
11924 case GOMP_MAP_FORCE_PRESENT:
11925 tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
11926 break;
11927 case GOMP_MAP_DELETE:
11928 tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
11929 default:
11930 break;
11931 }
11932 if (tkind_zero != tkind)
11933 {
11934 if (integer_zerop (s))
11935 tkind = tkind_zero;
11936 else if (integer_nonzerop (s))
11937 tkind_zero = tkind;
11938 }
11939 break;
11940 case OMP_CLAUSE_FIRSTPRIVATE:
11941 gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11942 tkind = GOMP_MAP_TO;
11943 tkind_zero = tkind;
11944 break;
11945 case OMP_CLAUSE_TO:
11946 tkind = GOMP_MAP_TO;
11947 tkind_zero = tkind;
11948 break;
11949 case OMP_CLAUSE_FROM:
11950 tkind = GOMP_MAP_FROM;
11951 tkind_zero = tkind;
11952 break;
11953 default:
11954 gcc_unreachable ();
11955 }
11956 gcc_checking_assert (tkind
11957 < (HOST_WIDE_INT_C (1U) << talign_shift));
11958 gcc_checking_assert (tkind_zero
11959 < (HOST_WIDE_INT_C (1U) << talign_shift));
11960 talign = ceil_log2 (talign);
11961 tkind |= talign << talign_shift;
11962 tkind_zero |= talign << talign_shift;
11963 gcc_checking_assert (tkind
11964 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11965 gcc_checking_assert (tkind_zero
11966 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
11967 if (tkind == tkind_zero)
11968 x = build_int_cstu (tkind_type, tkind);
11969 else
11970 {
11971 TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
11972 x = build3 (COND_EXPR, tkind_type,
11973 fold_build2 (EQ_EXPR, boolean_type_node,
11974 unshare_expr (s), size_zero_node),
11975 build_int_cstu (tkind_type, tkind_zero),
11976 build_int_cstu (tkind_type, tkind));
11977 }
11978 CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
11979 if (nc && nc != c)
11980 c = nc;
11981 break;
11982
11983 case OMP_CLAUSE_FIRSTPRIVATE:
11984 if (is_gimple_omp_oacc (ctx->stmt))
11985 goto oacc_firstprivate_map;
11986 ovar = OMP_CLAUSE_DECL (c);
11987 if (omp_is_reference (ovar))
11988 talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11989 else
11990 talign = DECL_ALIGN_UNIT (ovar);
11991 var = lookup_decl_in_outer_ctx (ovar, ctx);
11992 x = build_sender_ref (ovar, ctx);
11993 tkind = GOMP_MAP_FIRSTPRIVATE;
11994 type = TREE_TYPE (ovar);
11995 if (omp_is_reference (ovar))
11996 type = TREE_TYPE (type);
11997 if ((INTEGRAL_TYPE_P (type)
11998 && TYPE_PRECISION (type) <= POINTER_SIZE)
11999 || TREE_CODE (type) == POINTER_TYPE)
12000 {
12001 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12002 tree t = var;
12003 if (omp_is_reference (var))
12004 t = build_simple_mem_ref (var);
12005 else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12006 TREE_NO_WARNING (var) = 1;
12007 if (TREE_CODE (type) != POINTER_TYPE)
12008 t = fold_convert (pointer_sized_int_node, t);
12009 t = fold_convert (TREE_TYPE (x), t);
12010 gimplify_assign (x, t, &ilist);
12011 }
12012 else if (omp_is_reference (var))
12013 gimplify_assign (x, var, &ilist);
12014 else if (is_gimple_reg (var))
12015 {
12016 tree avar = create_tmp_var (TREE_TYPE (var));
12017 mark_addressable (avar);
12018 if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12019 TREE_NO_WARNING (var) = 1;
12020 gimplify_assign (avar, var, &ilist);
12021 avar = build_fold_addr_expr (avar);
12022 gimplify_assign (x, avar, &ilist);
12023 }
12024 else
12025 {
12026 var = build_fold_addr_expr (var);
12027 gimplify_assign (x, var, &ilist);
12028 }
12029 if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12030 s = size_int (0);
12031 else if (omp_is_reference (ovar))
12032 s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12033 else
12034 s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12035 s = fold_convert (size_type_node, s);
12036 purpose = size_int (map_idx++);
12037 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12038 if (TREE_CODE (s) != INTEGER_CST)
12039 TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12040
12041 gcc_checking_assert (tkind
12042 < (HOST_WIDE_INT_C (1U) << talign_shift));
12043 talign = ceil_log2 (talign);
12044 tkind |= talign << talign_shift;
12045 gcc_checking_assert (tkind
12046 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12047 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12048 build_int_cstu (tkind_type, tkind));
12049 break;
12050
12051 case OMP_CLAUSE_USE_DEVICE_PTR:
12052 case OMP_CLAUSE_USE_DEVICE_ADDR:
12053 case OMP_CLAUSE_IS_DEVICE_PTR:
12054 ovar = OMP_CLAUSE_DECL (c);
12055 var = lookup_decl_in_outer_ctx (ovar, ctx);
12056
12057 if (lang_hooks.decls.omp_array_data (ovar, true))
12058 {
12059 tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12060 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12061 x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12062 }
12063 else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12064 {
12065 tkind = GOMP_MAP_USE_DEVICE_PTR;
12066 x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12067 }
12068 else
12069 {
12070 tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12071 x = build_sender_ref (ovar, ctx);
12072 }
12073
12074 if (is_gimple_omp_oacc (ctx->stmt))
12075 {
12076 gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12077
12078 if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12079 tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12080 }
12081
12082 type = TREE_TYPE (ovar);
12083 if (lang_hooks.decls.omp_array_data (ovar, true))
12084 var = lang_hooks.decls.omp_array_data (ovar, false);
12085 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12086 && !omp_is_reference (ovar)
12087 && !omp_is_allocatable_or_ptr (ovar))
12088 || TREE_CODE (type) == ARRAY_TYPE)
12089 var = build_fold_addr_expr (var);
12090 else
12091 {
12092 if (omp_is_reference (ovar)
12093 || omp_check_optional_argument (ovar, false)
12094 || omp_is_allocatable_or_ptr (ovar))
12095 {
12096 type = TREE_TYPE (type);
12097 if (TREE_CODE (type) != ARRAY_TYPE
12098 && ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12099 && !omp_is_allocatable_or_ptr (ovar))
12100 || (omp_is_reference (ovar)
12101 && omp_is_allocatable_or_ptr (ovar))))
12102 var = build_simple_mem_ref (var);
12103 var = fold_convert (TREE_TYPE (x), var);
12104 }
12105 }
12106 tree present;
12107 present = omp_check_optional_argument (ovar, true);
12108 if (present)
12109 {
12110 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12111 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12112 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12113 tree new_x = unshare_expr (x);
12114 gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12115 fb_rvalue);
12116 gcond *cond = gimple_build_cond_from_tree (present,
12117 notnull_label,
12118 null_label);
12119 gimple_seq_add_stmt (&ilist, cond);
12120 gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12121 gimplify_assign (new_x, null_pointer_node, &ilist);
12122 gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12123 gimple_seq_add_stmt (&ilist,
12124 gimple_build_label (notnull_label));
12125 gimplify_assign (x, var, &ilist);
12126 gimple_seq_add_stmt (&ilist,
12127 gimple_build_label (opt_arg_label));
12128 }
12129 else
12130 gimplify_assign (x, var, &ilist);
12131 s = size_int (0);
12132 purpose = size_int (map_idx++);
12133 CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12134 gcc_checking_assert (tkind
12135 < (HOST_WIDE_INT_C (1U) << talign_shift));
12136 gcc_checking_assert (tkind
12137 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12138 CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12139 build_int_cstu (tkind_type, tkind));
12140 break;
12141 }
12142
12143 gcc_assert (map_idx == map_cnt);
12144
12145 DECL_INITIAL (TREE_VEC_ELT (t, 1))
12146 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12147 DECL_INITIAL (TREE_VEC_ELT (t, 2))
12148 = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12149 for (int i = 1; i <= 2; i++)
12150 if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12151 {
12152 gimple_seq initlist = NULL;
12153 force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12154 TREE_VEC_ELT (t, i)),
12155 &initlist, true, NULL_TREE);
12156 gimple_seq_add_seq (&ilist, initlist);
12157
12158 tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12159 gimple_seq_add_stmt (&olist,
12160 gimple_build_assign (TREE_VEC_ELT (t, i),
12161 clobber));
12162 }
12163
12164 tree clobber = build_clobber (ctx->record_type);
12165 gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12166 clobber));
12167 }
12168
12169 /* Once all the expansions are done, sequence all the different
12170 fragments inside gimple_omp_body. */
12171
12172 new_body = NULL;
12173
12174 if (offloaded
12175 && ctx->record_type)
12176 {
12177 t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12178 /* fixup_child_record_type might have changed receiver_decl's type. */
12179 t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12180 gimple_seq_add_stmt (&new_body,
12181 gimple_build_assign (ctx->receiver_decl, t));
12182 }
12183 gimple_seq_add_seq (&new_body, fplist);
12184
12185 if (offloaded || data_region)
12186 {
12187 tree prev = NULL_TREE;
12188 for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12189 switch (OMP_CLAUSE_CODE (c))
12190 {
12191 tree var, x;
12192 default:
12193 break;
12194 case OMP_CLAUSE_FIRSTPRIVATE:
12195 if (is_gimple_omp_oacc (ctx->stmt))
12196 break;
12197 var = OMP_CLAUSE_DECL (c);
12198 if (omp_is_reference (var)
12199 || is_gimple_reg_type (TREE_TYPE (var)))
12200 {
12201 tree new_var = lookup_decl (var, ctx);
12202 tree type;
12203 type = TREE_TYPE (var);
12204 if (omp_is_reference (var))
12205 type = TREE_TYPE (type);
12206 if ((INTEGRAL_TYPE_P (type)
12207 && TYPE_PRECISION (type) <= POINTER_SIZE)
12208 || TREE_CODE (type) == POINTER_TYPE)
12209 {
12210 x = build_receiver_ref (var, false, ctx);
12211 if (TREE_CODE (type) != POINTER_TYPE)
12212 x = fold_convert (pointer_sized_int_node, x);
12213 x = fold_convert (type, x);
12214 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12215 fb_rvalue);
12216 if (omp_is_reference (var))
12217 {
12218 tree v = create_tmp_var_raw (type, get_name (var));
12219 gimple_add_tmp_var (v);
12220 TREE_ADDRESSABLE (v) = 1;
12221 gimple_seq_add_stmt (&new_body,
12222 gimple_build_assign (v, x));
12223 x = build_fold_addr_expr (v);
12224 }
12225 gimple_seq_add_stmt (&new_body,
12226 gimple_build_assign (new_var, x));
12227 }
12228 else
12229 {
12230 x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12231 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12232 fb_rvalue);
12233 gimple_seq_add_stmt (&new_body,
12234 gimple_build_assign (new_var, x));
12235 }
12236 }
12237 else if (is_variable_sized (var))
12238 {
12239 tree pvar = DECL_VALUE_EXPR (var);
12240 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12241 pvar = TREE_OPERAND (pvar, 0);
12242 gcc_assert (DECL_P (pvar));
12243 tree new_var = lookup_decl (pvar, ctx);
12244 x = build_receiver_ref (var, false, ctx);
12245 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12246 gimple_seq_add_stmt (&new_body,
12247 gimple_build_assign (new_var, x));
12248 }
12249 break;
12250 case OMP_CLAUSE_PRIVATE:
12251 if (is_gimple_omp_oacc (ctx->stmt))
12252 break;
12253 var = OMP_CLAUSE_DECL (c);
12254 if (omp_is_reference (var))
12255 {
12256 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12257 tree new_var = lookup_decl (var, ctx);
12258 x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12259 if (TREE_CONSTANT (x))
12260 {
12261 x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12262 get_name (var));
12263 gimple_add_tmp_var (x);
12264 TREE_ADDRESSABLE (x) = 1;
12265 x = build_fold_addr_expr_loc (clause_loc, x);
12266 }
12267 else
12268 break;
12269
12270 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12271 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12272 gimple_seq_add_stmt (&new_body,
12273 gimple_build_assign (new_var, x));
12274 }
12275 break;
12276 case OMP_CLAUSE_USE_DEVICE_PTR:
12277 case OMP_CLAUSE_USE_DEVICE_ADDR:
12278 case OMP_CLAUSE_IS_DEVICE_PTR:
12279 tree new_var;
12280 gimple_seq assign_body;
12281 bool is_array_data;
12282 bool do_optional_check;
12283 assign_body = NULL;
12284 do_optional_check = false;
12285 var = OMP_CLAUSE_DECL (c);
12286 is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12287
12288 if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12289 x = build_sender_ref (is_array_data
12290 ? (splay_tree_key) &DECL_NAME (var)
12291 : (splay_tree_key) &DECL_UID (var), ctx);
12292 else
12293 x = build_receiver_ref (var, false, ctx);
12294
12295 if (is_array_data)
12296 {
12297 bool is_ref = omp_is_reference (var);
12298 do_optional_check = true;
12299 /* First, we copy the descriptor data from the host; then
12300 we update its data to point to the target address. */
12301 new_var = lookup_decl (var, ctx);
12302 new_var = DECL_VALUE_EXPR (new_var);
12303 tree v = new_var;
12304
12305 if (is_ref)
12306 {
12307 var = build_fold_indirect_ref (var);
12308 gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12309 fb_rvalue);
12310 v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12311 gimple_add_tmp_var (v);
12312 TREE_ADDRESSABLE (v) = 1;
12313 gimple_seq_add_stmt (&assign_body,
12314 gimple_build_assign (v, var));
12315 tree rhs = build_fold_addr_expr (v);
12316 gimple_seq_add_stmt (&assign_body,
12317 gimple_build_assign (new_var, rhs));
12318 }
12319 else
12320 gimple_seq_add_stmt (&assign_body,
12321 gimple_build_assign (new_var, var));
12322
12323 tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12324 gcc_assert (v2);
12325 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12326 gimple_seq_add_stmt (&assign_body,
12327 gimple_build_assign (v2, x));
12328 }
12329 else if (is_variable_sized (var))
12330 {
12331 tree pvar = DECL_VALUE_EXPR (var);
12332 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12333 pvar = TREE_OPERAND (pvar, 0);
12334 gcc_assert (DECL_P (pvar));
12335 new_var = lookup_decl (pvar, ctx);
12336 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12337 gimple_seq_add_stmt (&assign_body,
12338 gimple_build_assign (new_var, x));
12339 }
12340 else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12341 && !omp_is_reference (var)
12342 && !omp_is_allocatable_or_ptr (var))
12343 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12344 {
12345 new_var = lookup_decl (var, ctx);
12346 new_var = DECL_VALUE_EXPR (new_var);
12347 gcc_assert (TREE_CODE (new_var) == MEM_REF);
12348 new_var = TREE_OPERAND (new_var, 0);
12349 gcc_assert (DECL_P (new_var));
12350 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12351 gimple_seq_add_stmt (&assign_body,
12352 gimple_build_assign (new_var, x));
12353 }
12354 else
12355 {
12356 tree type = TREE_TYPE (var);
12357 new_var = lookup_decl (var, ctx);
12358 if (omp_is_reference (var))
12359 {
12360 type = TREE_TYPE (type);
12361 if (TREE_CODE (type) != ARRAY_TYPE
12362 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12363 || (omp_is_reference (var)
12364 && omp_is_allocatable_or_ptr (var))))
12365 {
12366 tree v = create_tmp_var_raw (type, get_name (var));
12367 gimple_add_tmp_var (v);
12368 TREE_ADDRESSABLE (v) = 1;
12369 x = fold_convert (type, x);
12370 gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12371 fb_rvalue);
12372 gimple_seq_add_stmt (&assign_body,
12373 gimple_build_assign (v, x));
12374 x = build_fold_addr_expr (v);
12375 do_optional_check = true;
12376 }
12377 }
12378 new_var = DECL_VALUE_EXPR (new_var);
12379 x = fold_convert (TREE_TYPE (new_var), x);
12380 gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12381 gimple_seq_add_stmt (&assign_body,
12382 gimple_build_assign (new_var, x));
12383 }
12384 tree present;
12385 present = (do_optional_check
12386 ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12387 : NULL_TREE);
12388 if (present)
12389 {
12390 tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12391 tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12392 tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12393 glabel *null_glabel = gimple_build_label (null_label);
12394 glabel *notnull_glabel = gimple_build_label (notnull_label);
12395 ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12396 gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12397 fb_rvalue);
12398 gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12399 fb_rvalue);
12400 gcond *cond = gimple_build_cond_from_tree (present,
12401 notnull_label,
12402 null_label);
12403 gimple_seq_add_stmt (&new_body, cond);
12404 gimple_seq_add_stmt (&new_body, null_glabel);
12405 gimplify_assign (new_var, null_pointer_node, &new_body);
12406 gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12407 gimple_seq_add_stmt (&new_body, notnull_glabel);
12408 gimple_seq_add_seq (&new_body, assign_body);
12409 gimple_seq_add_stmt (&new_body,
12410 gimple_build_label (opt_arg_label));
12411 }
12412 else
12413 gimple_seq_add_seq (&new_body, assign_body);
12414 break;
12415 }
12416 /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12417 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12418 are already handled. Similarly OMP_CLAUSE_PRIVATE for VLAs
12419 or references to VLAs. */
12420 for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12421 switch (OMP_CLAUSE_CODE (c))
12422 {
12423 tree var;
12424 default:
12425 break;
12426 case OMP_CLAUSE_MAP:
12427 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12428 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12429 {
12430 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12431 poly_int64 offset = 0;
12432 gcc_assert (prev);
12433 var = OMP_CLAUSE_DECL (c);
12434 if (DECL_P (var)
12435 && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12436 && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12437 ctx))
12438 && varpool_node::get_create (var)->offloadable)
12439 break;
12440 if (TREE_CODE (var) == INDIRECT_REF
12441 && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12442 var = TREE_OPERAND (var, 0);
12443 if (TREE_CODE (var) == COMPONENT_REF)
12444 {
12445 var = get_addr_base_and_unit_offset (var, &offset);
12446 gcc_assert (var != NULL_TREE && DECL_P (var));
12447 }
12448 else if (DECL_SIZE (var)
12449 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12450 {
12451 tree var2 = DECL_VALUE_EXPR (var);
12452 gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12453 var2 = TREE_OPERAND (var2, 0);
12454 gcc_assert (DECL_P (var2));
12455 var = var2;
12456 }
12457 tree new_var = lookup_decl (var, ctx), x;
12458 tree type = TREE_TYPE (new_var);
12459 bool is_ref;
12460 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12461 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12462 == COMPONENT_REF))
12463 {
12464 type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12465 is_ref = true;
12466 new_var = build2 (MEM_REF, type,
12467 build_fold_addr_expr (new_var),
12468 build_int_cst (build_pointer_type (type),
12469 offset));
12470 }
12471 else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12472 {
12473 type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12474 is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12475 new_var = build2 (MEM_REF, type,
12476 build_fold_addr_expr (new_var),
12477 build_int_cst (build_pointer_type (type),
12478 offset));
12479 }
12480 else
12481 is_ref = omp_is_reference (var);
12482 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12483 is_ref = false;
12484 bool ref_to_array = false;
12485 if (is_ref)
12486 {
12487 type = TREE_TYPE (type);
12488 if (TREE_CODE (type) == ARRAY_TYPE)
12489 {
12490 type = build_pointer_type (type);
12491 ref_to_array = true;
12492 }
12493 }
12494 else if (TREE_CODE (type) == ARRAY_TYPE)
12495 {
12496 tree decl2 = DECL_VALUE_EXPR (new_var);
12497 gcc_assert (TREE_CODE (decl2) == MEM_REF);
12498 decl2 = TREE_OPERAND (decl2, 0);
12499 gcc_assert (DECL_P (decl2));
12500 new_var = decl2;
12501 type = TREE_TYPE (new_var);
12502 }
12503 x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12504 x = fold_convert_loc (clause_loc, type, x);
12505 if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12506 {
12507 tree bias = OMP_CLAUSE_SIZE (c);
12508 if (DECL_P (bias))
12509 bias = lookup_decl (bias, ctx);
12510 bias = fold_convert_loc (clause_loc, sizetype, bias);
12511 bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12512 bias);
12513 x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12514 TREE_TYPE (x), x, bias);
12515 }
12516 if (ref_to_array)
12517 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12518 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12519 if (is_ref && !ref_to_array)
12520 {
12521 tree t = create_tmp_var_raw (type, get_name (var));
12522 gimple_add_tmp_var (t);
12523 TREE_ADDRESSABLE (t) = 1;
12524 gimple_seq_add_stmt (&new_body,
12525 gimple_build_assign (t, x));
12526 x = build_fold_addr_expr_loc (clause_loc, t);
12527 }
12528 gimple_seq_add_stmt (&new_body,
12529 gimple_build_assign (new_var, x));
12530 prev = NULL_TREE;
12531 }
12532 else if (OMP_CLAUSE_CHAIN (c)
12533 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12534 == OMP_CLAUSE_MAP
12535 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12536 == GOMP_MAP_FIRSTPRIVATE_POINTER
12537 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12538 == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12539 prev = c;
12540 break;
12541 case OMP_CLAUSE_PRIVATE:
12542 var = OMP_CLAUSE_DECL (c);
12543 if (is_variable_sized (var))
12544 {
12545 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12546 tree new_var = lookup_decl (var, ctx);
12547 tree pvar = DECL_VALUE_EXPR (var);
12548 gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12549 pvar = TREE_OPERAND (pvar, 0);
12550 gcc_assert (DECL_P (pvar));
12551 tree new_pvar = lookup_decl (pvar, ctx);
12552 tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12553 tree al = size_int (DECL_ALIGN (var));
12554 tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12555 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12556 x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12557 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12558 gimple_seq_add_stmt (&new_body,
12559 gimple_build_assign (new_pvar, x));
12560 }
12561 else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12562 {
12563 location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12564 tree new_var = lookup_decl (var, ctx);
12565 tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12566 if (TREE_CONSTANT (x))
12567 break;
12568 else
12569 {
12570 tree atmp
12571 = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12572 tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12573 tree al = size_int (TYPE_ALIGN (rtype));
12574 x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12575 }
12576
12577 x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12578 gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12579 gimple_seq_add_stmt (&new_body,
12580 gimple_build_assign (new_var, x));
12581 }
12582 break;
12583 }
12584
12585 gimple_seq fork_seq = NULL;
12586 gimple_seq join_seq = NULL;
12587
12588 if (offloaded && is_gimple_omp_oacc (ctx->stmt))
12589 {
12590 /* If there are reductions on the offloaded region itself, treat
12591 them as a dummy GANG loop. */
12592 tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12593
12594 lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12595 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12596 }
12597
12598 gimple_seq_add_seq (&new_body, fork_seq);
12599 gimple_seq_add_seq (&new_body, tgt_body);
12600 gimple_seq_add_seq (&new_body, join_seq);
12601
12602 if (offloaded)
12603 {
12604 new_body = maybe_catch_exception (new_body);
12605 gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12606 }
12607 gimple_omp_set_body (stmt, new_body);
12608 }
12609
12610 bind = gimple_build_bind (NULL, NULL,
12611 tgt_bind ? gimple_bind_block (tgt_bind)
12612 : NULL_TREE);
12613 gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12614 gimple_bind_add_seq (bind, ilist);
12615 gimple_bind_add_stmt (bind, stmt);
12616 gimple_bind_add_seq (bind, olist);
12617
12618 pop_gimplify_context (NULL);
12619
12620 if (dep_bind)
12621 {
12622 gimple_bind_add_seq (dep_bind, dep_ilist);
12623 gimple_bind_add_stmt (dep_bind, bind);
12624 gimple_bind_add_seq (dep_bind, dep_olist);
12625 pop_gimplify_context (dep_bind);
12626 }
12627 }
12628
12629 /* Expand code for an OpenMP teams directive. */
12630
12631 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)12632 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12633 {
12634 gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12635 push_gimplify_context ();
12636
12637 tree block = make_node (BLOCK);
12638 gbind *bind = gimple_build_bind (NULL, NULL, block);
12639 gsi_replace (gsi_p, bind, true);
12640 gimple_seq bind_body = NULL;
12641 gimple_seq dlist = NULL;
12642 gimple_seq olist = NULL;
12643
12644 tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12645 OMP_CLAUSE_NUM_TEAMS);
12646 if (num_teams == NULL_TREE)
12647 num_teams = build_int_cst (unsigned_type_node, 0);
12648 else
12649 {
12650 num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12651 num_teams = fold_convert (unsigned_type_node, num_teams);
12652 gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12653 }
12654 tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12655 OMP_CLAUSE_THREAD_LIMIT);
12656 if (thread_limit == NULL_TREE)
12657 thread_limit = build_int_cst (unsigned_type_node, 0);
12658 else
12659 {
12660 thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12661 thread_limit = fold_convert (unsigned_type_node, thread_limit);
12662 gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12663 fb_rvalue);
12664 }
12665
12666 lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12667 &bind_body, &dlist, ctx, NULL);
12668 lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12669 lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12670 NULL, ctx);
12671 if (!gimple_omp_teams_grid_phony (teams_stmt))
12672 {
12673 gimple_seq_add_stmt (&bind_body, teams_stmt);
12674 location_t loc = gimple_location (teams_stmt);
12675 tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12676 gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12677 gimple_set_location (call, loc);
12678 gimple_seq_add_stmt (&bind_body, call);
12679 }
12680
12681 gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12682 gimple_omp_set_body (teams_stmt, NULL);
12683 gimple_seq_add_seq (&bind_body, olist);
12684 gimple_seq_add_seq (&bind_body, dlist);
12685 if (!gimple_omp_teams_grid_phony (teams_stmt))
12686 gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12687 gimple_bind_set_body (bind, bind_body);
12688
12689 pop_gimplify_context (bind);
12690
12691 gimple_bind_append_vars (bind, ctx->block_vars);
12692 BLOCK_VARS (block) = ctx->block_vars;
12693 if (BLOCK_VARS (block))
12694 TREE_USED (block) = 1;
12695 }
12696
12697 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct. */
12698
12699 static void
lower_omp_grid_body(gimple_stmt_iterator * gsi_p,omp_context * ctx)12700 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12701 {
12702 gimple *stmt = gsi_stmt (*gsi_p);
12703 lower_omp (gimple_omp_body_ptr (stmt), ctx);
12704 gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12705 gimple_build_omp_return (false));
12706 }
12707
12708
12709 /* Callback for lower_omp_1. Return non-NULL if *tp needs to be
12710 regimplified. If DATA is non-NULL, lower_omp_1 is outside
12711 of OMP context, but with task_shared_vars set. */
12712
12713 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)12714 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12715 void *data)
12716 {
12717 tree t = *tp;
12718
12719 /* Any variable with DECL_VALUE_EXPR needs to be regimplified. */
12720 if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
12721 return t;
12722
12723 if (task_shared_vars
12724 && DECL_P (t)
12725 && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12726 return t;
12727
12728 /* If a global variable has been privatized, TREE_CONSTANT on
12729 ADDR_EXPR might be wrong. */
12730 if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12731 recompute_tree_invariant_for_addr_expr (t);
12732
12733 *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12734 return NULL_TREE;
12735 }
12736
12737 /* Data to be communicated between lower_omp_regimplify_operands and
12738 lower_omp_regimplify_operands_p. */
12739
12740 struct lower_omp_regimplify_operands_data
12741 {
12742 omp_context *ctx;
12743 vec<tree> *decls;
12744 };
12745
12746 /* Helper function for lower_omp_regimplify_operands. Find
12747 omp_member_access_dummy_var vars and adjust temporarily their
12748 DECL_VALUE_EXPRs if needed. */
12749
12750 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)12751 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12752 void *data)
12753 {
12754 tree t = omp_member_access_dummy_var (*tp);
12755 if (t)
12756 {
12757 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12758 lower_omp_regimplify_operands_data *ldata
12759 = (lower_omp_regimplify_operands_data *) wi->info;
12760 tree o = maybe_lookup_decl (t, ldata->ctx);
12761 if (o != t)
12762 {
12763 ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12764 ldata->decls->safe_push (*tp);
12765 tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12766 SET_DECL_VALUE_EXPR (*tp, v);
12767 }
12768 }
12769 *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12770 return NULL_TREE;
12771 }
12772
12773 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12774 of omp_member_access_dummy_var vars during regimplification. */
12775
12776 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)12777 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12778 gimple_stmt_iterator *gsi_p)
12779 {
12780 auto_vec<tree, 10> decls;
12781 if (ctx)
12782 {
12783 struct walk_stmt_info wi;
12784 memset (&wi, '\0', sizeof (wi));
12785 struct lower_omp_regimplify_operands_data data;
12786 data.ctx = ctx;
12787 data.decls = &decls;
12788 wi.info = &data;
12789 walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12790 }
12791 gimple_regimplify_operands (stmt, gsi_p);
12792 while (!decls.is_empty ())
12793 {
12794 tree t = decls.pop ();
12795 tree v = decls.pop ();
12796 SET_DECL_VALUE_EXPR (t, v);
12797 }
12798 }
12799
12800 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)12801 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12802 {
12803 gimple *stmt = gsi_stmt (*gsi_p);
12804 struct walk_stmt_info wi;
12805 gcall *call_stmt;
12806
12807 if (gimple_has_location (stmt))
12808 input_location = gimple_location (stmt);
12809
12810 if (task_shared_vars)
12811 memset (&wi, '\0', sizeof (wi));
12812
12813 /* If we have issued syntax errors, avoid doing any heavy lifting.
12814 Just replace the OMP directives with a NOP to avoid
12815 confusing RTL expansion. */
12816 if (seen_error () && is_gimple_omp (stmt))
12817 {
12818 gsi_replace (gsi_p, gimple_build_nop (), true);
12819 return;
12820 }
12821
12822 switch (gimple_code (stmt))
12823 {
12824 case GIMPLE_COND:
12825 {
12826 gcond *cond_stmt = as_a <gcond *> (stmt);
12827 if ((ctx || task_shared_vars)
12828 && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12829 lower_omp_regimplify_p,
12830 ctx ? NULL : &wi, NULL)
12831 || walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12832 lower_omp_regimplify_p,
12833 ctx ? NULL : &wi, NULL)))
12834 lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12835 }
12836 break;
12837 case GIMPLE_CATCH:
12838 lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12839 break;
12840 case GIMPLE_EH_FILTER:
12841 lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12842 break;
12843 case GIMPLE_TRY:
12844 lower_omp (gimple_try_eval_ptr (stmt), ctx);
12845 lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12846 break;
12847 case GIMPLE_TRANSACTION:
12848 lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12849 ctx);
12850 break;
12851 case GIMPLE_BIND:
12852 lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12853 maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12854 break;
12855 case GIMPLE_OMP_PARALLEL:
12856 case GIMPLE_OMP_TASK:
12857 ctx = maybe_lookup_ctx (stmt);
12858 gcc_assert (ctx);
12859 if (ctx->cancellable)
12860 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12861 lower_omp_taskreg (gsi_p, ctx);
12862 break;
12863 case GIMPLE_OMP_FOR:
12864 ctx = maybe_lookup_ctx (stmt);
12865 gcc_assert (ctx);
12866 if (ctx->cancellable)
12867 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12868 lower_omp_for (gsi_p, ctx);
12869 break;
12870 case GIMPLE_OMP_SECTIONS:
12871 ctx = maybe_lookup_ctx (stmt);
12872 gcc_assert (ctx);
12873 if (ctx->cancellable)
12874 ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12875 lower_omp_sections (gsi_p, ctx);
12876 break;
12877 case GIMPLE_OMP_SINGLE:
12878 ctx = maybe_lookup_ctx (stmt);
12879 gcc_assert (ctx);
12880 lower_omp_single (gsi_p, ctx);
12881 break;
12882 case GIMPLE_OMP_MASTER:
12883 ctx = maybe_lookup_ctx (stmt);
12884 gcc_assert (ctx);
12885 lower_omp_master (gsi_p, ctx);
12886 break;
12887 case GIMPLE_OMP_TASKGROUP:
12888 ctx = maybe_lookup_ctx (stmt);
12889 gcc_assert (ctx);
12890 lower_omp_taskgroup (gsi_p, ctx);
12891 break;
12892 case GIMPLE_OMP_ORDERED:
12893 ctx = maybe_lookup_ctx (stmt);
12894 gcc_assert (ctx);
12895 lower_omp_ordered (gsi_p, ctx);
12896 break;
12897 case GIMPLE_OMP_SCAN:
12898 ctx = maybe_lookup_ctx (stmt);
12899 gcc_assert (ctx);
12900 lower_omp_scan (gsi_p, ctx);
12901 break;
12902 case GIMPLE_OMP_CRITICAL:
12903 ctx = maybe_lookup_ctx (stmt);
12904 gcc_assert (ctx);
12905 lower_omp_critical (gsi_p, ctx);
12906 break;
12907 case GIMPLE_OMP_ATOMIC_LOAD:
12908 if ((ctx || task_shared_vars)
12909 && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12910 as_a <gomp_atomic_load *> (stmt)),
12911 lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12912 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12913 break;
12914 case GIMPLE_OMP_TARGET:
12915 ctx = maybe_lookup_ctx (stmt);
12916 gcc_assert (ctx);
12917 lower_omp_target (gsi_p, ctx);
12918 break;
12919 case GIMPLE_OMP_TEAMS:
12920 ctx = maybe_lookup_ctx (stmt);
12921 gcc_assert (ctx);
12922 if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
12923 lower_omp_taskreg (gsi_p, ctx);
12924 else
12925 lower_omp_teams (gsi_p, ctx);
12926 break;
12927 case GIMPLE_OMP_GRID_BODY:
12928 ctx = maybe_lookup_ctx (stmt);
12929 gcc_assert (ctx);
12930 lower_omp_grid_body (gsi_p, ctx);
12931 break;
12932 case GIMPLE_CALL:
12933 tree fndecl;
12934 call_stmt = as_a <gcall *> (stmt);
12935 fndecl = gimple_call_fndecl (call_stmt);
12936 if (fndecl
12937 && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
12938 switch (DECL_FUNCTION_CODE (fndecl))
12939 {
12940 case BUILT_IN_GOMP_BARRIER:
12941 if (ctx == NULL)
12942 break;
12943 /* FALLTHRU */
12944 case BUILT_IN_GOMP_CANCEL:
12945 case BUILT_IN_GOMP_CANCELLATION_POINT:
12946 omp_context *cctx;
12947 cctx = ctx;
12948 if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
12949 cctx = cctx->outer;
12950 gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
12951 if (!cctx->cancellable)
12952 {
12953 if (DECL_FUNCTION_CODE (fndecl)
12954 == BUILT_IN_GOMP_CANCELLATION_POINT)
12955 {
12956 stmt = gimple_build_nop ();
12957 gsi_replace (gsi_p, stmt, false);
12958 }
12959 break;
12960 }
12961 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
12962 {
12963 fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
12964 gimple_call_set_fndecl (call_stmt, fndecl);
12965 gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
12966 }
12967 tree lhs;
12968 lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
12969 gimple_call_set_lhs (call_stmt, lhs);
12970 tree fallthru_label;
12971 fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
12972 gimple *g;
12973 g = gimple_build_label (fallthru_label);
12974 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12975 g = gimple_build_cond (NE_EXPR, lhs,
12976 fold_convert (TREE_TYPE (lhs),
12977 boolean_false_node),
12978 cctx->cancel_label, fallthru_label);
12979 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
12980 break;
12981 default:
12982 break;
12983 }
12984 goto regimplify;
12985
12986 case GIMPLE_ASSIGN:
12987 for (omp_context *up = ctx; up; up = up->outer)
12988 {
12989 if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
12990 || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
12991 || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
12992 || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
12993 || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
12994 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
12995 && (gimple_omp_target_kind (up->stmt)
12996 == GF_OMP_TARGET_KIND_DATA)))
12997 continue;
12998 else if (!up->lastprivate_conditional_map)
12999 break;
13000 tree lhs = get_base_address (gimple_assign_lhs (stmt));
13001 if (TREE_CODE (lhs) == MEM_REF
13002 && DECL_P (TREE_OPERAND (lhs, 0))
13003 && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13004 0))) == REFERENCE_TYPE)
13005 lhs = TREE_OPERAND (lhs, 0);
13006 if (DECL_P (lhs))
13007 if (tree *v = up->lastprivate_conditional_map->get (lhs))
13008 {
13009 tree clauses;
13010 if (up->combined_into_simd_safelen1)
13011 {
13012 up = up->outer;
13013 if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13014 up = up->outer;
13015 }
13016 if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13017 clauses = gimple_omp_for_clauses (up->stmt);
13018 else
13019 clauses = gimple_omp_sections_clauses (up->stmt);
13020 tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13021 if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13022 c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13023 OMP_CLAUSE__CONDTEMP_);
13024 gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13025 gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13026 gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13027 }
13028 }
13029 /* FALLTHRU */
13030
13031 default:
13032 regimplify:
13033 if ((ctx || task_shared_vars)
13034 && walk_gimple_op (stmt, lower_omp_regimplify_p,
13035 ctx ? NULL : &wi))
13036 {
13037 /* Just remove clobbers, this should happen only if we have
13038 "privatized" local addressable variables in SIMD regions,
13039 the clobber isn't needed in that case and gimplifying address
13040 of the ARRAY_REF into a pointer and creating MEM_REF based
13041 clobber would create worse code than we get with the clobber
13042 dropped. */
13043 if (gimple_clobber_p (stmt))
13044 {
13045 gsi_replace (gsi_p, gimple_build_nop (), true);
13046 break;
13047 }
13048 lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13049 }
13050 break;
13051 }
13052 }
13053
13054 static void
lower_omp(gimple_seq * body,omp_context * ctx)13055 lower_omp (gimple_seq *body, omp_context *ctx)
13056 {
13057 location_t saved_location = input_location;
13058 gimple_stmt_iterator gsi;
13059 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13060 lower_omp_1 (&gsi, ctx);
13061 /* During gimplification, we haven't folded statments inside offloading
13062 or taskreg regions (gimplify.c:maybe_fold_stmt); do that now. */
13063 if (target_nesting_level || taskreg_nesting_level)
13064 for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13065 fold_stmt (&gsi);
13066 input_location = saved_location;
13067 }
13068
13069 /* Main entry point. */
13070
13071 static unsigned int
execute_lower_omp(void)13072 execute_lower_omp (void)
13073 {
13074 gimple_seq body;
13075 int i;
13076 omp_context *ctx;
13077
13078 /* This pass always runs, to provide PROP_gimple_lomp.
13079 But often, there is nothing to do. */
13080 if (flag_openacc == 0 && flag_openmp == 0
13081 && flag_openmp_simd == 0)
13082 return 0;
13083
13084 all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13085 delete_omp_context);
13086
13087 body = gimple_body (current_function_decl);
13088
13089 if (hsa_gen_requested_p ())
13090 omp_grid_gridify_all_targets (&body);
13091
13092 scan_omp (&body, NULL);
13093 gcc_assert (taskreg_nesting_level == 0);
13094 FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13095 finish_taskreg_scan (ctx);
13096 taskreg_contexts.release ();
13097
13098 if (all_contexts->root)
13099 {
13100 if (task_shared_vars)
13101 push_gimplify_context ();
13102 lower_omp (&body, NULL);
13103 if (task_shared_vars)
13104 pop_gimplify_context (NULL);
13105 }
13106
13107 if (all_contexts)
13108 {
13109 splay_tree_delete (all_contexts);
13110 all_contexts = NULL;
13111 }
13112 BITMAP_FREE (task_shared_vars);
13113 BITMAP_FREE (global_nonaddressable_vars);
13114
13115 /* If current function is a method, remove artificial dummy VAR_DECL created
13116 for non-static data member privatization, they aren't needed for
13117 debuginfo nor anything else, have been already replaced everywhere in the
13118 IL and cause problems with LTO. */
13119 if (DECL_ARGUMENTS (current_function_decl)
13120 && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13121 && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13122 == POINTER_TYPE))
13123 remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13124 return 0;
13125 }
13126
13127 namespace {
13128
13129 const pass_data pass_data_lower_omp =
13130 {
13131 GIMPLE_PASS, /* type */
13132 "omplower", /* name */
13133 OPTGROUP_OMP, /* optinfo_flags */
13134 TV_NONE, /* tv_id */
13135 PROP_gimple_any, /* properties_required */
13136 PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13137 0, /* properties_destroyed */
13138 0, /* todo_flags_start */
13139 0, /* todo_flags_finish */
13140 };
13141
13142 class pass_lower_omp : public gimple_opt_pass
13143 {
13144 public:
pass_lower_omp(gcc::context * ctxt)13145 pass_lower_omp (gcc::context *ctxt)
13146 : gimple_opt_pass (pass_data_lower_omp, ctxt)
13147 {}
13148
13149 /* opt_pass methods: */
execute(function *)13150 virtual unsigned int execute (function *) { return execute_lower_omp (); }
13151
13152 }; // class pass_lower_omp
13153
13154 } // anon namespace
13155
13156 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)13157 make_pass_lower_omp (gcc::context *ctxt)
13158 {
13159 return new pass_lower_omp (ctxt);
13160 }
13161
13162 /* The following is a utility to diagnose structured block violations.
13163 It is not part of the "omplower" pass, as that's invoked too late. It
13164 should be invoked by the respective front ends after gimplification. */
13165
13166 static splay_tree all_labels;
13167
13168 /* Check for mismatched contexts and generate an error if needed. Return
13169 true if an error is detected. */
13170
13171 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)13172 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13173 gimple *branch_ctx, gimple *label_ctx)
13174 {
13175 gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13176 gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13177
13178 if (label_ctx == branch_ctx)
13179 return false;
13180
13181 const char* kind = NULL;
13182
13183 if (flag_openacc)
13184 {
13185 if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13186 || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13187 {
13188 gcc_checking_assert (kind == NULL);
13189 kind = "OpenACC";
13190 }
13191 }
13192 if (kind == NULL)
13193 {
13194 gcc_checking_assert (flag_openmp || flag_openmp_simd);
13195 kind = "OpenMP";
13196 }
13197
13198 /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13199 so we could traverse it and issue a correct "exit" or "enter" error
13200 message upon a structured block violation.
13201
13202 We built the context by building a list with tree_cons'ing, but there is
13203 no easy counterpart in gimple tuples. It seems like far too much work
13204 for issuing exit/enter error messages. If someone really misses the
13205 distinct error message... patches welcome. */
13206
13207 #if 0
13208 /* Try to avoid confusing the user by producing and error message
13209 with correct "exit" or "enter" verbiage. We prefer "exit"
13210 unless we can show that LABEL_CTX is nested within BRANCH_CTX. */
13211 if (branch_ctx == NULL)
13212 exit_p = false;
13213 else
13214 {
13215 while (label_ctx)
13216 {
13217 if (TREE_VALUE (label_ctx) == branch_ctx)
13218 {
13219 exit_p = false;
13220 break;
13221 }
13222 label_ctx = TREE_CHAIN (label_ctx);
13223 }
13224 }
13225
13226 if (exit_p)
13227 error ("invalid exit from %s structured block", kind);
13228 else
13229 error ("invalid entry to %s structured block", kind);
13230 #endif
13231
13232 /* If it's obvious we have an invalid entry, be specific about the error. */
13233 if (branch_ctx == NULL)
13234 error ("invalid entry to %s structured block", kind);
13235 else
13236 {
13237 /* Otherwise, be vague and lazy, but efficient. */
13238 error ("invalid branch to/from %s structured block", kind);
13239 }
13240
13241 gsi_replace (gsi_p, gimple_build_nop (), false);
13242 return true;
13243 }
13244
13245 /* Pass 1: Create a minimal tree of structured blocks, and record
13246 where each label is found. */
13247
13248 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)13249 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13250 struct walk_stmt_info *wi)
13251 {
13252 gimple *context = (gimple *) wi->info;
13253 gimple *inner_context;
13254 gimple *stmt = gsi_stmt (*gsi_p);
13255
13256 *handled_ops_p = true;
13257
13258 switch (gimple_code (stmt))
13259 {
13260 WALK_SUBSTMTS;
13261
13262 case GIMPLE_OMP_PARALLEL:
13263 case GIMPLE_OMP_TASK:
13264 case GIMPLE_OMP_SECTIONS:
13265 case GIMPLE_OMP_SINGLE:
13266 case GIMPLE_OMP_SECTION:
13267 case GIMPLE_OMP_MASTER:
13268 case GIMPLE_OMP_ORDERED:
13269 case GIMPLE_OMP_SCAN:
13270 case GIMPLE_OMP_CRITICAL:
13271 case GIMPLE_OMP_TARGET:
13272 case GIMPLE_OMP_TEAMS:
13273 case GIMPLE_OMP_TASKGROUP:
13274 /* The minimal context here is just the current OMP construct. */
13275 inner_context = stmt;
13276 wi->info = inner_context;
13277 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13278 wi->info = context;
13279 break;
13280
13281 case GIMPLE_OMP_FOR:
13282 inner_context = stmt;
13283 wi->info = inner_context;
13284 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13285 walk them. */
13286 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13287 diagnose_sb_1, NULL, wi);
13288 walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13289 wi->info = context;
13290 break;
13291
13292 case GIMPLE_LABEL:
13293 splay_tree_insert (all_labels,
13294 (splay_tree_key) gimple_label_label (
13295 as_a <glabel *> (stmt)),
13296 (splay_tree_value) context);
13297 break;
13298
13299 default:
13300 break;
13301 }
13302
13303 return NULL_TREE;
13304 }
13305
13306 /* Pass 2: Check each branch and see if its context differs from that of
13307 the destination label's context. */
13308
13309 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)13310 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13311 struct walk_stmt_info *wi)
13312 {
13313 gimple *context = (gimple *) wi->info;
13314 splay_tree_node n;
13315 gimple *stmt = gsi_stmt (*gsi_p);
13316
13317 *handled_ops_p = true;
13318
13319 switch (gimple_code (stmt))
13320 {
13321 WALK_SUBSTMTS;
13322
13323 case GIMPLE_OMP_PARALLEL:
13324 case GIMPLE_OMP_TASK:
13325 case GIMPLE_OMP_SECTIONS:
13326 case GIMPLE_OMP_SINGLE:
13327 case GIMPLE_OMP_SECTION:
13328 case GIMPLE_OMP_MASTER:
13329 case GIMPLE_OMP_ORDERED:
13330 case GIMPLE_OMP_SCAN:
13331 case GIMPLE_OMP_CRITICAL:
13332 case GIMPLE_OMP_TARGET:
13333 case GIMPLE_OMP_TEAMS:
13334 case GIMPLE_OMP_TASKGROUP:
13335 wi->info = stmt;
13336 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13337 wi->info = context;
13338 break;
13339
13340 case GIMPLE_OMP_FOR:
13341 wi->info = stmt;
13342 /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13343 walk them. */
13344 walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13345 diagnose_sb_2, NULL, wi);
13346 walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13347 wi->info = context;
13348 break;
13349
13350 case GIMPLE_COND:
13351 {
13352 gcond *cond_stmt = as_a <gcond *> (stmt);
13353 tree lab = gimple_cond_true_label (cond_stmt);
13354 if (lab)
13355 {
13356 n = splay_tree_lookup (all_labels,
13357 (splay_tree_key) lab);
13358 diagnose_sb_0 (gsi_p, context,
13359 n ? (gimple *) n->value : NULL);
13360 }
13361 lab = gimple_cond_false_label (cond_stmt);
13362 if (lab)
13363 {
13364 n = splay_tree_lookup (all_labels,
13365 (splay_tree_key) lab);
13366 diagnose_sb_0 (gsi_p, context,
13367 n ? (gimple *) n->value : NULL);
13368 }
13369 }
13370 break;
13371
13372 case GIMPLE_GOTO:
13373 {
13374 tree lab = gimple_goto_dest (stmt);
13375 if (TREE_CODE (lab) != LABEL_DECL)
13376 break;
13377
13378 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13379 diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13380 }
13381 break;
13382
13383 case GIMPLE_SWITCH:
13384 {
13385 gswitch *switch_stmt = as_a <gswitch *> (stmt);
13386 unsigned int i;
13387 for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13388 {
13389 tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13390 n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13391 if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13392 break;
13393 }
13394 }
13395 break;
13396
13397 case GIMPLE_RETURN:
13398 diagnose_sb_0 (gsi_p, context, NULL);
13399 break;
13400
13401 default:
13402 break;
13403 }
13404
13405 return NULL_TREE;
13406 }
13407
13408 static unsigned int
diagnose_omp_structured_block_errors(void)13409 diagnose_omp_structured_block_errors (void)
13410 {
13411 struct walk_stmt_info wi;
13412 gimple_seq body = gimple_body (current_function_decl);
13413
13414 all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13415
13416 memset (&wi, 0, sizeof (wi));
13417 walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13418
13419 memset (&wi, 0, sizeof (wi));
13420 wi.want_locations = true;
13421 walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13422
13423 gimple_set_body (current_function_decl, body);
13424
13425 splay_tree_delete (all_labels);
13426 all_labels = NULL;
13427
13428 return 0;
13429 }
13430
13431 namespace {
13432
13433 const pass_data pass_data_diagnose_omp_blocks =
13434 {
13435 GIMPLE_PASS, /* type */
13436 "*diagnose_omp_blocks", /* name */
13437 OPTGROUP_OMP, /* optinfo_flags */
13438 TV_NONE, /* tv_id */
13439 PROP_gimple_any, /* properties_required */
13440 0, /* properties_provided */
13441 0, /* properties_destroyed */
13442 0, /* todo_flags_start */
13443 0, /* todo_flags_finish */
13444 };
13445
13446 class pass_diagnose_omp_blocks : public gimple_opt_pass
13447 {
13448 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)13449 pass_diagnose_omp_blocks (gcc::context *ctxt)
13450 : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13451 {}
13452
13453 /* opt_pass methods: */
gate(function *)13454 virtual bool gate (function *)
13455 {
13456 return flag_openacc || flag_openmp || flag_openmp_simd;
13457 }
execute(function *)13458 virtual unsigned int execute (function *)
13459 {
13460 return diagnose_omp_structured_block_errors ();
13461 }
13462
13463 }; // class pass_diagnose_omp_blocks
13464
13465 } // anon namespace
13466
13467 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)13468 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13469 {
13470 return new pass_diagnose_omp_blocks (ctxt);
13471 }
13472
13473
13474 #include "gt-omp-low.h"
13475