1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2019 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65    phases.  The first phase scans the function looking for OMP statements
66    and then for variables that must be replaced to satisfy data sharing
67    clauses.  The second phase expands code for the constructs, as well as
68    re-gimplifying things when variables have been replaced with complex
69    expressions.
70 
71    Final code generation is done by pass_expand_omp.  The flowgraph is
72    scanned for regions which are then moved to a new
73    function, to be invoked by the thread library, or offloaded.  */
74 
75 /* Context structure.  Used to store information about each parallel
76    directive in the code.  */
77 
78 struct omp_context
79 {
80   /* This field must be at the beginning, as we do "inheritance": Some
81      callback functions for tree-inline.c (e.g., omp_copy_decl)
82      receive a copy_body_data pointer that is up-casted to an
83      omp_context pointer.  */
84   copy_body_data cb;
85 
86   /* The tree of contexts corresponding to the encountered constructs.  */
87   struct omp_context *outer;
88   gimple *stmt;
89 
90   /* Map variables to fields in a structure that allows communication
91      between sending and receiving threads.  */
92   splay_tree field_map;
93   tree record_type;
94   tree sender_decl;
95   tree receiver_decl;
96 
97   /* These are used just by task contexts, if task firstprivate fn is
98      needed.  srecord_type is used to communicate from the thread
99      that encountered the task construct to task firstprivate fn,
100      record_type is allocated by GOMP_task, initialized by task firstprivate
101      fn and passed to the task body fn.  */
102   splay_tree sfield_map;
103   tree srecord_type;
104 
105   /* A chain of variables to add to the top-level block surrounding the
106      construct.  In the case of a parallel, this is in the child function.  */
107   tree block_vars;
108 
109   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110      barriers should jump to during omplower pass.  */
111   tree cancel_label;
112 
113   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114      otherwise.  */
115   gimple *simt_stmt;
116 
117   /* For task reductions registered in this context, a vector containing
118      the length of the private copies block (if constant, otherwise NULL)
119      and then offsets (if constant, otherwise NULL) for each entry.  */
120   vec<tree> task_reductions;
121 
122   /* And a hash map from the reduction clauses to the registered array
123      elts.  */
124   hash_map<tree, unsigned> *task_reduction_map;
125 
126   /* Nesting depth of this context.  Used to beautify error messages re
127      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
128      reserved for the main body of the function.  */
129   int depth;
130 
131   /* True if this parallel directive is nested within another.  */
132   bool is_nested;
133 
134   /* True if this construct can be cancelled.  */
135   bool cancellable;
136 };
137 
138 static splay_tree all_contexts;
139 static int taskreg_nesting_level;
140 static int target_nesting_level;
141 static bitmap task_shared_vars;
142 static bitmap global_nonaddressable_vars;
143 static vec<omp_context *> taskreg_contexts;
144 
145 static void scan_omp (gimple_seq *, omp_context *);
146 static tree scan_omp_1_op (tree *, int *, void *);
147 
148 #define WALK_SUBSTMTS  \
149     case GIMPLE_BIND: \
150     case GIMPLE_TRY: \
151     case GIMPLE_CATCH: \
152     case GIMPLE_EH_FILTER: \
153     case GIMPLE_TRANSACTION: \
154       /* The sub-statements for these should be walked.  */ \
155       *handled_ops_p = false; \
156       break;
157 
158 /* Return true if CTX corresponds to an oacc parallel region.  */
159 
160 static bool
is_oacc_parallel(omp_context * ctx)161 is_oacc_parallel (omp_context *ctx)
162 {
163   enum gimple_code outer_type = gimple_code (ctx->stmt);
164   return ((outer_type == GIMPLE_OMP_TARGET)
165 	  && (gimple_omp_target_kind (ctx->stmt)
166 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
167 }
168 
169 /* Return true if CTX corresponds to an oacc kernels region.  */
170 
171 static bool
is_oacc_kernels(omp_context * ctx)172 is_oacc_kernels (omp_context *ctx)
173 {
174   enum gimple_code outer_type = gimple_code (ctx->stmt);
175   return ((outer_type == GIMPLE_OMP_TARGET)
176 	  && (gimple_omp_target_kind (ctx->stmt)
177 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
178 }
179 
180 /* If DECL is the artificial dummy VAR_DECL created for non-static
181    data member privatization, return the underlying "this" parameter,
182    otherwise return NULL.  */
183 
184 tree
omp_member_access_dummy_var(tree decl)185 omp_member_access_dummy_var (tree decl)
186 {
187   if (!VAR_P (decl)
188       || !DECL_ARTIFICIAL (decl)
189       || !DECL_IGNORED_P (decl)
190       || !DECL_HAS_VALUE_EXPR_P (decl)
191       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
192     return NULL_TREE;
193 
194   tree v = DECL_VALUE_EXPR (decl);
195   if (TREE_CODE (v) != COMPONENT_REF)
196     return NULL_TREE;
197 
198   while (1)
199     switch (TREE_CODE (v))
200       {
201       case COMPONENT_REF:
202       case MEM_REF:
203       case INDIRECT_REF:
204       CASE_CONVERT:
205       case POINTER_PLUS_EXPR:
206 	v = TREE_OPERAND (v, 0);
207 	continue;
208       case PARM_DECL:
209 	if (DECL_CONTEXT (v) == current_function_decl
210 	    && DECL_ARTIFICIAL (v)
211 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
212 	  return v;
213 	return NULL_TREE;
214       default:
215 	return NULL_TREE;
216       }
217 }
218 
219 /* Helper for unshare_and_remap, called through walk_tree.  */
220 
221 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)222 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
223 {
224   tree *pair = (tree *) data;
225   if (*tp == pair[0])
226     {
227       *tp = unshare_expr (pair[1]);
228       *walk_subtrees = 0;
229     }
230   else if (IS_TYPE_OR_DECL_P (*tp))
231     *walk_subtrees = 0;
232   return NULL_TREE;
233 }
234 
235 /* Return unshare_expr (X) with all occurrences of FROM
236    replaced with TO.  */
237 
238 static tree
unshare_and_remap(tree x,tree from,tree to)239 unshare_and_remap (tree x, tree from, tree to)
240 {
241   tree pair[2] = { from, to };
242   x = unshare_expr (x);
243   walk_tree (&x, unshare_and_remap_1, pair, NULL);
244   return x;
245 }
246 
247 /* Convenience function for calling scan_omp_1_op on tree operands.  */
248 
249 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)250 scan_omp_op (tree *tp, omp_context *ctx)
251 {
252   struct walk_stmt_info wi;
253 
254   memset (&wi, 0, sizeof (wi));
255   wi.info = ctx;
256   wi.want_locations = true;
257 
258   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
259 }
260 
261 static void lower_omp (gimple_seq *, omp_context *);
262 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
263 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
264 
265 /* Return true if CTX is for an omp parallel.  */
266 
267 static inline bool
is_parallel_ctx(omp_context * ctx)268 is_parallel_ctx (omp_context *ctx)
269 {
270   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
271 }
272 
273 
274 /* Return true if CTX is for an omp task.  */
275 
276 static inline bool
is_task_ctx(omp_context * ctx)277 is_task_ctx (omp_context *ctx)
278 {
279   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
280 }
281 
282 
283 /* Return true if CTX is for an omp taskloop.  */
284 
285 static inline bool
is_taskloop_ctx(omp_context * ctx)286 is_taskloop_ctx (omp_context *ctx)
287 {
288   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
289 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
290 }
291 
292 
293 /* Return true if CTX is for a host omp teams.  */
294 
295 static inline bool
is_host_teams_ctx(omp_context * ctx)296 is_host_teams_ctx (omp_context *ctx)
297 {
298   return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
299 	 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
300 }
301 
302 /* Return true if CTX is for an omp parallel or omp task or host omp teams
303    (the last one is strictly not a task region in OpenMP speak, but we
304    need to treat it similarly).  */
305 
306 static inline bool
is_taskreg_ctx(omp_context * ctx)307 is_taskreg_ctx (omp_context *ctx)
308 {
309   return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
310 }
311 
312 /* Return true if EXPR is variable sized.  */
313 
314 static inline bool
is_variable_sized(const_tree expr)315 is_variable_sized (const_tree expr)
316 {
317   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
318 }
319 
320 /* Lookup variables.  The "maybe" form
321    allows for the variable form to not have been entered, otherwise we
322    assert that the variable must have been entered.  */
323 
324 static inline tree
lookup_decl(tree var,omp_context * ctx)325 lookup_decl (tree var, omp_context *ctx)
326 {
327   tree *n = ctx->cb.decl_map->get (var);
328   return *n;
329 }
330 
331 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)332 maybe_lookup_decl (const_tree var, omp_context *ctx)
333 {
334   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
335   return n ? *n : NULL_TREE;
336 }
337 
338 static inline tree
lookup_field(tree var,omp_context * ctx)339 lookup_field (tree var, omp_context *ctx)
340 {
341   splay_tree_node n;
342   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
343   return (tree) n->value;
344 }
345 
346 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)347 lookup_sfield (splay_tree_key key, omp_context *ctx)
348 {
349   splay_tree_node n;
350   n = splay_tree_lookup (ctx->sfield_map
351 			 ? ctx->sfield_map : ctx->field_map, key);
352   return (tree) n->value;
353 }
354 
355 static inline tree
lookup_sfield(tree var,omp_context * ctx)356 lookup_sfield (tree var, omp_context *ctx)
357 {
358   return lookup_sfield ((splay_tree_key) var, ctx);
359 }
360 
361 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)362 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
363 {
364   splay_tree_node n;
365   n = splay_tree_lookup (ctx->field_map, key);
366   return n ? (tree) n->value : NULL_TREE;
367 }
368 
369 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)370 maybe_lookup_field (tree var, omp_context *ctx)
371 {
372   return maybe_lookup_field ((splay_tree_key) var, ctx);
373 }
374 
375 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
376    the parallel context if DECL is to be shared.  */
377 
378 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)379 use_pointer_for_field (tree decl, omp_context *shared_ctx)
380 {
381   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
382       || TYPE_ATOMIC (TREE_TYPE (decl)))
383     return true;
384 
385   /* We can only use copy-in/copy-out semantics for shared variables
386      when we know the value is not accessible from an outer scope.  */
387   if (shared_ctx)
388     {
389       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
390 
391       /* ??? Trivially accessible from anywhere.  But why would we even
392 	 be passing an address in this case?  Should we simply assert
393 	 this to be false, or should we have a cleanup pass that removes
394 	 these from the list of mappings?  */
395       if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
396 	return true;
397 
398       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
399 	 without analyzing the expression whether or not its location
400 	 is accessible to anyone else.  In the case of nested parallel
401 	 regions it certainly may be.  */
402       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
403 	return true;
404 
405       /* Do not use copy-in/copy-out for variables that have their
406 	 address taken.  */
407       if (is_global_var (decl))
408 	{
409 	  /* For file scope vars, track whether we've seen them as
410 	     non-addressable initially and in that case, keep the same
411 	     answer for the duration of the pass, even when they are made
412 	     addressable later on e.g. through reduction expansion.  Global
413 	     variables which weren't addressable before the pass will not
414 	     have their privatized copies address taken.  See PR91216.  */
415 	  if (!TREE_ADDRESSABLE (decl))
416 	    {
417 	      if (!global_nonaddressable_vars)
418 		global_nonaddressable_vars = BITMAP_ALLOC (NULL);
419 	      bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
420 	    }
421 	  else if (!global_nonaddressable_vars
422 		   || !bitmap_bit_p (global_nonaddressable_vars,
423 				     DECL_UID (decl)))
424 	    return true;
425 	}
426       else if (TREE_ADDRESSABLE (decl))
427 	return true;
428 
429       /* lower_send_shared_vars only uses copy-in, but not copy-out
430 	 for these.  */
431       if (TREE_READONLY (decl)
432 	  || ((TREE_CODE (decl) == RESULT_DECL
433 	       || TREE_CODE (decl) == PARM_DECL)
434 	      && DECL_BY_REFERENCE (decl)))
435 	return false;
436 
437       /* Disallow copy-in/out in nested parallel if
438 	 decl is shared in outer parallel, otherwise
439 	 each thread could store the shared variable
440 	 in its own copy-in location, making the
441 	 variable no longer really shared.  */
442       if (shared_ctx->is_nested)
443 	{
444 	  omp_context *up;
445 
446 	  for (up = shared_ctx->outer; up; up = up->outer)
447 	    if ((is_taskreg_ctx (up)
448 		 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
449 		     && is_gimple_omp_offloaded (up->stmt)))
450 		&& maybe_lookup_decl (decl, up))
451 	      break;
452 
453 	  if (up)
454 	    {
455 	      tree c;
456 
457 	      if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
458 		{
459 		  for (c = gimple_omp_target_clauses (up->stmt);
460 		       c; c = OMP_CLAUSE_CHAIN (c))
461 		    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
462 			&& OMP_CLAUSE_DECL (c) == decl)
463 		      break;
464 		}
465 	      else
466 		for (c = gimple_omp_taskreg_clauses (up->stmt);
467 		     c; c = OMP_CLAUSE_CHAIN (c))
468 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
469 		      && OMP_CLAUSE_DECL (c) == decl)
470 		    break;
471 
472 	      if (c)
473 		goto maybe_mark_addressable_and_ret;
474 	    }
475 	}
476 
477       /* For tasks avoid using copy-in/out.  As tasks can be
478 	 deferred or executed in different thread, when GOMP_task
479 	 returns, the task hasn't necessarily terminated.  */
480       if (is_task_ctx (shared_ctx))
481 	{
482 	  tree outer;
483 	maybe_mark_addressable_and_ret:
484 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
485 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
486 	    {
487 	      /* Taking address of OUTER in lower_send_shared_vars
488 		 might need regimplification of everything that uses the
489 		 variable.  */
490 	      if (!task_shared_vars)
491 		task_shared_vars = BITMAP_ALLOC (NULL);
492 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
493 	      TREE_ADDRESSABLE (outer) = 1;
494 	    }
495 	  return true;
496 	}
497     }
498 
499   return false;
500 }
501 
502 /* Construct a new automatic decl similar to VAR.  */
503 
504 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)505 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
506 {
507   tree copy = copy_var_decl (var, name, type);
508 
509   DECL_CONTEXT (copy) = current_function_decl;
510   DECL_CHAIN (copy) = ctx->block_vars;
511   /* If VAR is listed in task_shared_vars, it means it wasn't
512      originally addressable and is just because task needs to take
513      it's address.  But we don't need to take address of privatizations
514      from that var.  */
515   if (TREE_ADDRESSABLE (var)
516       && ((task_shared_vars
517 	   && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
518 	  || (global_nonaddressable_vars
519 	      && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
520     TREE_ADDRESSABLE (copy) = 0;
521   ctx->block_vars = copy;
522 
523   return copy;
524 }
525 
526 static tree
omp_copy_decl_1(tree var,omp_context * ctx)527 omp_copy_decl_1 (tree var, omp_context *ctx)
528 {
529   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
530 }
531 
532 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
533    as appropriate.  */
534 static tree
omp_build_component_ref(tree obj,tree field)535 omp_build_component_ref (tree obj, tree field)
536 {
537   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
538   if (TREE_THIS_VOLATILE (field))
539     TREE_THIS_VOLATILE (ret) |= 1;
540   if (TREE_READONLY (field))
541     TREE_READONLY (ret) |= 1;
542   return ret;
543 }
544 
545 /* Build tree nodes to access the field for VAR on the receiver side.  */
546 
547 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)548 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
549 {
550   tree x, field = lookup_field (var, ctx);
551 
552   /* If the receiver record type was remapped in the child function,
553      remap the field into the new record type.  */
554   x = maybe_lookup_field (field, ctx);
555   if (x != NULL)
556     field = x;
557 
558   x = build_simple_mem_ref (ctx->receiver_decl);
559   TREE_THIS_NOTRAP (x) = 1;
560   x = omp_build_component_ref (x, field);
561   if (by_ref)
562     {
563       x = build_simple_mem_ref (x);
564       TREE_THIS_NOTRAP (x) = 1;
565     }
566 
567   return x;
568 }
569 
570 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
571    of a parallel, this is a component reference; for workshare constructs
572    this is some variable.  */
573 
574 static tree
575 build_outer_var_ref (tree var, omp_context *ctx,
576 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
577 {
578   tree x;
579   omp_context *outer = ctx->outer;
580   while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
581     outer = outer->outer;
582 
583   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
584     x = var;
585   else if (is_variable_sized (var))
586     {
587       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
588       x = build_outer_var_ref (x, ctx, code);
589       x = build_simple_mem_ref (x);
590     }
591   else if (is_taskreg_ctx (ctx))
592     {
593       bool by_ref = use_pointer_for_field (var, NULL);
594       x = build_receiver_ref (var, by_ref, ctx);
595     }
596   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
597 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
598 	   || (code == OMP_CLAUSE_PRIVATE
599 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
600 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
601 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
602     {
603       /* #pragma omp simd isn't a worksharing construct, and can reference
604 	 even private vars in its linear etc. clauses.
605 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
606 	 to private vars in all worksharing constructs.  */
607       x = NULL_TREE;
608       if (outer && is_taskreg_ctx (outer))
609 	x = lookup_decl (var, outer);
610       else if (outer)
611 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
612       if (x == NULL_TREE)
613 	x = var;
614     }
615   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
616     {
617       gcc_assert (outer);
618       splay_tree_node n
619 	= splay_tree_lookup (outer->field_map,
620 			     (splay_tree_key) &DECL_UID (var));
621       if (n == NULL)
622 	{
623 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
624 	    x = var;
625 	  else
626 	    x = lookup_decl (var, outer);
627 	}
628       else
629 	{
630 	  tree field = (tree) n->value;
631 	  /* If the receiver record type was remapped in the child function,
632 	     remap the field into the new record type.  */
633 	  x = maybe_lookup_field (field, outer);
634 	  if (x != NULL)
635 	    field = x;
636 
637 	  x = build_simple_mem_ref (outer->receiver_decl);
638 	  x = omp_build_component_ref (x, field);
639 	  if (use_pointer_for_field (var, outer))
640 	    x = build_simple_mem_ref (x);
641 	}
642     }
643   else if (outer)
644     {
645       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
646 	{
647 	  outer = outer->outer;
648 	  gcc_assert (outer
649 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
650 	}
651       x = lookup_decl (var, outer);
652     }
653   else if (omp_is_reference (var))
654     /* This can happen with orphaned constructs.  If var is reference, it is
655        possible it is shared and as such valid.  */
656     x = var;
657   else if (omp_member_access_dummy_var (var))
658     x = var;
659   else
660     gcc_unreachable ();
661 
662   if (x == var)
663     {
664       tree t = omp_member_access_dummy_var (var);
665       if (t)
666 	{
667 	  x = DECL_VALUE_EXPR (var);
668 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
669 	  if (o != t)
670 	    x = unshare_and_remap (x, t, o);
671 	  else
672 	    x = unshare_expr (x);
673 	}
674     }
675 
676   if (omp_is_reference (var))
677     x = build_simple_mem_ref (x);
678 
679   return x;
680 }
681 
682 /* Build tree nodes to access the field for VAR on the sender side.  */
683 
684 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)685 build_sender_ref (splay_tree_key key, omp_context *ctx)
686 {
687   tree field = lookup_sfield (key, ctx);
688   return omp_build_component_ref (ctx->sender_decl, field);
689 }
690 
691 static tree
build_sender_ref(tree var,omp_context * ctx)692 build_sender_ref (tree var, omp_context *ctx)
693 {
694   return build_sender_ref ((splay_tree_key) var, ctx);
695 }
696 
697 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
698    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
699 
700 static void
install_var_field(tree var,bool by_ref,int mask,omp_context * ctx)701 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
702 {
703   tree field, type, sfield = NULL_TREE;
704   splay_tree_key key = (splay_tree_key) var;
705 
706   if ((mask & 8) != 0)
707     {
708       key = (splay_tree_key) &DECL_UID (var);
709       gcc_checking_assert (key != (splay_tree_key) var);
710     }
711   gcc_assert ((mask & 1) == 0
712 	      || !splay_tree_lookup (ctx->field_map, key));
713   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
714 	      || !splay_tree_lookup (ctx->sfield_map, key));
715   gcc_assert ((mask & 3) == 3
716 	      || !is_gimple_omp_oacc (ctx->stmt));
717 
718   type = TREE_TYPE (var);
719   /* Prevent redeclaring the var in the split-off function with a restrict
720      pointer type.  Note that we only clear type itself, restrict qualifiers in
721      the pointed-to type will be ignored by points-to analysis.  */
722   if (POINTER_TYPE_P (type)
723       && TYPE_RESTRICT (type))
724     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
725 
726   if (mask & 4)
727     {
728       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
729       type = build_pointer_type (build_pointer_type (type));
730     }
731   else if (by_ref)
732     type = build_pointer_type (type);
733   else if ((mask & 3) == 1 && omp_is_reference (var))
734     type = TREE_TYPE (type);
735 
736   field = build_decl (DECL_SOURCE_LOCATION (var),
737 		      FIELD_DECL, DECL_NAME (var), type);
738 
739   /* Remember what variable this field was created for.  This does have a
740      side effect of making dwarf2out ignore this member, so for helpful
741      debugging we clear it later in delete_omp_context.  */
742   DECL_ABSTRACT_ORIGIN (field) = var;
743   if (type == TREE_TYPE (var))
744     {
745       SET_DECL_ALIGN (field, DECL_ALIGN (var));
746       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
747       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
748     }
749   else
750     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
751 
752   if ((mask & 3) == 3)
753     {
754       insert_field_into_struct (ctx->record_type, field);
755       if (ctx->srecord_type)
756 	{
757 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
758 			       FIELD_DECL, DECL_NAME (var), type);
759 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
760 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
761 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
762 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
763 	  insert_field_into_struct (ctx->srecord_type, sfield);
764 	}
765     }
766   else
767     {
768       if (ctx->srecord_type == NULL_TREE)
769 	{
770 	  tree t;
771 
772 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
773 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
774 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
775 	    {
776 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
777 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
778 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
779 	      insert_field_into_struct (ctx->srecord_type, sfield);
780 	      splay_tree_insert (ctx->sfield_map,
781 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
782 				 (splay_tree_value) sfield);
783 	    }
784 	}
785       sfield = field;
786       insert_field_into_struct ((mask & 1) ? ctx->record_type
787 				: ctx->srecord_type, field);
788     }
789 
790   if (mask & 1)
791     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
792   if ((mask & 2) && ctx->sfield_map)
793     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
794 }
795 
796 static tree
install_var_local(tree var,omp_context * ctx)797 install_var_local (tree var, omp_context *ctx)
798 {
799   tree new_var = omp_copy_decl_1 (var, ctx);
800   insert_decl_map (&ctx->cb, var, new_var);
801   return new_var;
802 }
803 
804 /* Adjust the replacement for DECL in CTX for the new context.  This means
805    copying the DECL_VALUE_EXPR, and fixing up the type.  */
806 
807 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)808 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
809 {
810   tree new_decl, size;
811 
812   new_decl = lookup_decl (decl, ctx);
813 
814   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
815 
816   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
817       && DECL_HAS_VALUE_EXPR_P (decl))
818     {
819       tree ve = DECL_VALUE_EXPR (decl);
820       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
821       SET_DECL_VALUE_EXPR (new_decl, ve);
822       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
823     }
824 
825   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
826     {
827       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
828       if (size == error_mark_node)
829 	size = TYPE_SIZE (TREE_TYPE (new_decl));
830       DECL_SIZE (new_decl) = size;
831 
832       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
833       if (size == error_mark_node)
834 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
835       DECL_SIZE_UNIT (new_decl) = size;
836     }
837 }
838 
839 /* The callback for remap_decl.  Search all containing contexts for a
840    mapping of the variable; this avoids having to duplicate the splay
841    tree ahead of time.  We know a mapping doesn't already exist in the
842    given context.  Create new mappings to implement default semantics.  */
843 
844 static tree
omp_copy_decl(tree var,copy_body_data * cb)845 omp_copy_decl (tree var, copy_body_data *cb)
846 {
847   omp_context *ctx = (omp_context *) cb;
848   tree new_var;
849 
850   if (TREE_CODE (var) == LABEL_DECL)
851     {
852       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
853 	return var;
854       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
855       DECL_CONTEXT (new_var) = current_function_decl;
856       insert_decl_map (&ctx->cb, var, new_var);
857       return new_var;
858     }
859 
860   while (!is_taskreg_ctx (ctx))
861     {
862       ctx = ctx->outer;
863       if (ctx == NULL)
864 	return var;
865       new_var = maybe_lookup_decl (var, ctx);
866       if (new_var)
867 	return new_var;
868     }
869 
870   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
871     return var;
872 
873   return error_mark_node;
874 }
875 
876 /* Create a new context, with OUTER_CTX being the surrounding context.  */
877 
878 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)879 new_omp_context (gimple *stmt, omp_context *outer_ctx)
880 {
881   omp_context *ctx = XCNEW (omp_context);
882 
883   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
884 		     (splay_tree_value) ctx);
885   ctx->stmt = stmt;
886 
887   if (outer_ctx)
888     {
889       ctx->outer = outer_ctx;
890       ctx->cb = outer_ctx->cb;
891       ctx->cb.block = NULL;
892       ctx->depth = outer_ctx->depth + 1;
893     }
894   else
895     {
896       ctx->cb.src_fn = current_function_decl;
897       ctx->cb.dst_fn = current_function_decl;
898       ctx->cb.src_node = cgraph_node::get (current_function_decl);
899       gcc_checking_assert (ctx->cb.src_node);
900       ctx->cb.dst_node = ctx->cb.src_node;
901       ctx->cb.src_cfun = cfun;
902       ctx->cb.copy_decl = omp_copy_decl;
903       ctx->cb.eh_lp_nr = 0;
904       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
905       ctx->cb.adjust_array_error_bounds = true;
906       ctx->cb.dont_remap_vla_if_no_change = true;
907       ctx->depth = 1;
908     }
909 
910   ctx->cb.decl_map = new hash_map<tree, tree>;
911 
912   return ctx;
913 }
914 
915 static gimple_seq maybe_catch_exception (gimple_seq);
916 
917 /* Finalize task copyfn.  */
918 
919 static void
finalize_task_copyfn(gomp_task * task_stmt)920 finalize_task_copyfn (gomp_task *task_stmt)
921 {
922   struct function *child_cfun;
923   tree child_fn;
924   gimple_seq seq = NULL, new_seq;
925   gbind *bind;
926 
927   child_fn = gimple_omp_task_copy_fn (task_stmt);
928   if (child_fn == NULL_TREE)
929     return;
930 
931   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
932   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
933 
934   push_cfun (child_cfun);
935   bind = gimplify_body (child_fn, false);
936   gimple_seq_add_stmt (&seq, bind);
937   new_seq = maybe_catch_exception (seq);
938   if (new_seq != seq)
939     {
940       bind = gimple_build_bind (NULL, new_seq, NULL);
941       seq = NULL;
942       gimple_seq_add_stmt (&seq, bind);
943     }
944   gimple_set_body (child_fn, seq);
945   pop_cfun ();
946 
947   /* Inform the callgraph about the new function.  */
948   cgraph_node *node = cgraph_node::get_create (child_fn);
949   node->parallelized_function = 1;
950   cgraph_node::add_new_function (child_fn, false);
951 }
952 
953 /* Destroy a omp_context data structures.  Called through the splay tree
954    value delete callback.  */
955 
956 static void
delete_omp_context(splay_tree_value value)957 delete_omp_context (splay_tree_value value)
958 {
959   omp_context *ctx = (omp_context *) value;
960 
961   delete ctx->cb.decl_map;
962 
963   if (ctx->field_map)
964     splay_tree_delete (ctx->field_map);
965   if (ctx->sfield_map)
966     splay_tree_delete (ctx->sfield_map);
967 
968   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
969      it produces corrupt debug information.  */
970   if (ctx->record_type)
971     {
972       tree t;
973       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
974 	DECL_ABSTRACT_ORIGIN (t) = NULL;
975     }
976   if (ctx->srecord_type)
977     {
978       tree t;
979       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
980 	DECL_ABSTRACT_ORIGIN (t) = NULL;
981     }
982 
983   if (is_task_ctx (ctx))
984     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
985 
986   if (ctx->task_reduction_map)
987     {
988       ctx->task_reductions.release ();
989       delete ctx->task_reduction_map;
990     }
991 
992   XDELETE (ctx);
993 }
994 
995 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
996    context.  */
997 
998 static void
fixup_child_record_type(omp_context * ctx)999 fixup_child_record_type (omp_context *ctx)
1000 {
1001   tree f, type = ctx->record_type;
1002 
1003   if (!ctx->receiver_decl)
1004     return;
1005   /* ??? It isn't sufficient to just call remap_type here, because
1006      variably_modified_type_p doesn't work the way we expect for
1007      record types.  Testing each field for whether it needs remapping
1008      and creating a new record by hand works, however.  */
1009   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1010     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1011       break;
1012   if (f)
1013     {
1014       tree name, new_fields = NULL;
1015 
1016       type = lang_hooks.types.make_type (RECORD_TYPE);
1017       name = DECL_NAME (TYPE_NAME (ctx->record_type));
1018       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1019 			 TYPE_DECL, name, type);
1020       TYPE_NAME (type) = name;
1021 
1022       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1023 	{
1024 	  tree new_f = copy_node (f);
1025 	  DECL_CONTEXT (new_f) = type;
1026 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1027 	  DECL_CHAIN (new_f) = new_fields;
1028 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1029 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1030 		     &ctx->cb, NULL);
1031 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1032 		     &ctx->cb, NULL);
1033 	  new_fields = new_f;
1034 
1035 	  /* Arrange to be able to look up the receiver field
1036 	     given the sender field.  */
1037 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1038 			     (splay_tree_value) new_f);
1039 	}
1040       TYPE_FIELDS (type) = nreverse (new_fields);
1041       layout_type (type);
1042     }
1043 
1044   /* In a target region we never modify any of the pointers in *.omp_data_i,
1045      so attempt to help the optimizers.  */
1046   if (is_gimple_omp_offloaded (ctx->stmt))
1047     type = build_qualified_type (type, TYPE_QUAL_CONST);
1048 
1049   TREE_TYPE (ctx->receiver_decl)
1050     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1051 }
1052 
1053 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1054    specified by CLAUSES.  */
1055 
1056 static void
scan_sharing_clauses(tree clauses,omp_context * ctx)1057 scan_sharing_clauses (tree clauses, omp_context *ctx)
1058 {
1059   tree c, decl;
1060   bool scan_array_reductions = false;
1061 
1062   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1063     {
1064       bool by_ref;
1065 
1066       switch (OMP_CLAUSE_CODE (c))
1067 	{
1068 	case OMP_CLAUSE_PRIVATE:
1069 	  decl = OMP_CLAUSE_DECL (c);
1070 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1071 	    goto do_private;
1072 	  else if (!is_variable_sized (decl))
1073 	    install_var_local (decl, ctx);
1074 	  break;
1075 
1076 	case OMP_CLAUSE_SHARED:
1077 	  decl = OMP_CLAUSE_DECL (c);
1078 	  /* Ignore shared directives in teams construct inside of
1079 	     target construct.  */
1080 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1081 	      && !is_host_teams_ctx (ctx))
1082 	    {
1083 	      /* Global variables don't need to be copied,
1084 		 the receiver side will use them directly.  */
1085 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1086 	      if (is_global_var (odecl))
1087 		break;
1088 	      insert_decl_map (&ctx->cb, decl, odecl);
1089 	      break;
1090 	    }
1091 	  gcc_assert (is_taskreg_ctx (ctx));
1092 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1093 		      || !is_variable_sized (decl));
1094 	  /* Global variables don't need to be copied,
1095 	     the receiver side will use them directly.  */
1096 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1097 	    break;
1098 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1099 	    {
1100 	      use_pointer_for_field (decl, ctx);
1101 	      break;
1102 	    }
1103 	  by_ref = use_pointer_for_field (decl, NULL);
1104 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1105 	      || TREE_ADDRESSABLE (decl)
1106 	      || by_ref
1107 	      || omp_is_reference (decl))
1108 	    {
1109 	      by_ref = use_pointer_for_field (decl, ctx);
1110 	      install_var_field (decl, by_ref, 3, ctx);
1111 	      install_var_local (decl, ctx);
1112 	      break;
1113 	    }
1114 	  /* We don't need to copy const scalar vars back.  */
1115 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1116 	  goto do_private;
1117 
1118 	case OMP_CLAUSE_REDUCTION:
1119 	case OMP_CLAUSE_IN_REDUCTION:
1120 	  decl = OMP_CLAUSE_DECL (c);
1121 	  if (TREE_CODE (decl) == MEM_REF)
1122 	    {
1123 	      tree t = TREE_OPERAND (decl, 0);
1124 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1125 		t = TREE_OPERAND (t, 0);
1126 	      if (TREE_CODE (t) == INDIRECT_REF
1127 		  || TREE_CODE (t) == ADDR_EXPR)
1128 		t = TREE_OPERAND (t, 0);
1129 	      install_var_local (t, ctx);
1130 	      if (is_taskreg_ctx (ctx)
1131 		  && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1132 		      || (is_task_ctx (ctx)
1133 			  && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1134 			      || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1135 				  && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1136 				      == POINTER_TYPE)))))
1137 		  && !is_variable_sized (t)
1138 		  && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1139 		      || (!OMP_CLAUSE_REDUCTION_TASK (c)
1140 			  && !is_task_ctx (ctx))))
1141 		{
1142 		  by_ref = use_pointer_for_field (t, NULL);
1143 		  if (is_task_ctx (ctx)
1144 		      && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1145 		      && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1146 		    {
1147 		      install_var_field (t, false, 1, ctx);
1148 		      install_var_field (t, by_ref, 2, ctx);
1149 		    }
1150 		  else
1151 		    install_var_field (t, by_ref, 3, ctx);
1152 		}
1153 	      break;
1154 	    }
1155 	  if (is_task_ctx (ctx)
1156 	      || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1157 		  && OMP_CLAUSE_REDUCTION_TASK (c)
1158 		  && is_parallel_ctx (ctx)))
1159 	    {
1160 	      /* Global variables don't need to be copied,
1161 		 the receiver side will use them directly.  */
1162 	      if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1163 		{
1164 		  by_ref = use_pointer_for_field (decl, ctx);
1165 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1166 		    install_var_field (decl, by_ref, 3, ctx);
1167 		}
1168 	      install_var_local (decl, ctx);
1169 	      break;
1170 	    }
1171 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1172 	      && OMP_CLAUSE_REDUCTION_TASK (c))
1173 	    {
1174 	      install_var_local (decl, ctx);
1175 	      break;
1176 	    }
1177 	  goto do_private;
1178 
1179 	case OMP_CLAUSE_LASTPRIVATE:
1180 	  /* Let the corresponding firstprivate clause create
1181 	     the variable.  */
1182 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1183 	    break;
1184 	  /* FALLTHRU */
1185 
1186 	case OMP_CLAUSE_FIRSTPRIVATE:
1187 	case OMP_CLAUSE_LINEAR:
1188 	  decl = OMP_CLAUSE_DECL (c);
1189 	do_private:
1190 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1191 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1192 	      && is_gimple_omp_offloaded (ctx->stmt))
1193 	    {
1194 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1195 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1196 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1197 		install_var_field (decl, true, 3, ctx);
1198 	      else
1199 		install_var_field (decl, false, 3, ctx);
1200 	    }
1201 	  if (is_variable_sized (decl))
1202 	    {
1203 	      if (is_task_ctx (ctx))
1204 		install_var_field (decl, false, 1, ctx);
1205 	      break;
1206 	    }
1207 	  else if (is_taskreg_ctx (ctx))
1208 	    {
1209 	      bool global
1210 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1211 	      by_ref = use_pointer_for_field (decl, NULL);
1212 
1213 	      if (is_task_ctx (ctx)
1214 		  && (global || by_ref || omp_is_reference (decl)))
1215 		{
1216 		  install_var_field (decl, false, 1, ctx);
1217 		  if (!global)
1218 		    install_var_field (decl, by_ref, 2, ctx);
1219 		}
1220 	      else if (!global)
1221 		install_var_field (decl, by_ref, 3, ctx);
1222 	    }
1223 	  install_var_local (decl, ctx);
1224 	  break;
1225 
1226 	case OMP_CLAUSE_USE_DEVICE_PTR:
1227 	  decl = OMP_CLAUSE_DECL (c);
1228 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1229 	    install_var_field (decl, true, 3, ctx);
1230 	  else
1231 	    install_var_field (decl, false, 3, ctx);
1232 	  if (DECL_SIZE (decl)
1233 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 	    {
1235 	      tree decl2 = DECL_VALUE_EXPR (decl);
1236 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1237 	      decl2 = TREE_OPERAND (decl2, 0);
1238 	      gcc_assert (DECL_P (decl2));
1239 	      install_var_local (decl2, ctx);
1240 	    }
1241 	  install_var_local (decl, ctx);
1242 	  break;
1243 
1244 	case OMP_CLAUSE_IS_DEVICE_PTR:
1245 	  decl = OMP_CLAUSE_DECL (c);
1246 	  goto do_private;
1247 
1248 	case OMP_CLAUSE__LOOPTEMP_:
1249 	case OMP_CLAUSE__REDUCTEMP_:
1250 	  gcc_assert (is_taskreg_ctx (ctx));
1251 	  decl = OMP_CLAUSE_DECL (c);
1252 	  install_var_field (decl, false, 3, ctx);
1253 	  install_var_local (decl, ctx);
1254 	  break;
1255 
1256 	case OMP_CLAUSE_COPYPRIVATE:
1257 	case OMP_CLAUSE_COPYIN:
1258 	  decl = OMP_CLAUSE_DECL (c);
1259 	  by_ref = use_pointer_for_field (decl, NULL);
1260 	  install_var_field (decl, by_ref, 3, ctx);
1261 	  break;
1262 
1263 	case OMP_CLAUSE_FINAL:
1264 	case OMP_CLAUSE_IF:
1265 	case OMP_CLAUSE_NUM_THREADS:
1266 	case OMP_CLAUSE_NUM_TEAMS:
1267 	case OMP_CLAUSE_THREAD_LIMIT:
1268 	case OMP_CLAUSE_DEVICE:
1269 	case OMP_CLAUSE_SCHEDULE:
1270 	case OMP_CLAUSE_DIST_SCHEDULE:
1271 	case OMP_CLAUSE_DEPEND:
1272 	case OMP_CLAUSE_PRIORITY:
1273 	case OMP_CLAUSE_GRAINSIZE:
1274 	case OMP_CLAUSE_NUM_TASKS:
1275 	case OMP_CLAUSE_NUM_GANGS:
1276 	case OMP_CLAUSE_NUM_WORKERS:
1277 	case OMP_CLAUSE_VECTOR_LENGTH:
1278 	  if (ctx->outer)
1279 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1280 	  break;
1281 
1282 	case OMP_CLAUSE_TO:
1283 	case OMP_CLAUSE_FROM:
1284 	case OMP_CLAUSE_MAP:
1285 	  if (ctx->outer)
1286 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1287 	  decl = OMP_CLAUSE_DECL (c);
1288 	  /* Global variables with "omp declare target" attribute
1289 	     don't need to be copied, the receiver side will use them
1290 	     directly.  However, global variables with "omp declare target link"
1291 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1292 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1293 	      && DECL_P (decl)
1294 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1295 		   && (OMP_CLAUSE_MAP_KIND (c)
1296 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1297 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1298 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1299 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1300 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1301 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1302 	      && varpool_node::get_create (decl)->offloadable
1303 	      && !lookup_attribute ("omp declare target link",
1304 				    DECL_ATTRIBUTES (decl)))
1305 	    break;
1306 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1307 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1308 	    {
1309 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1310 		 not offloaded; there is nothing to map for those.  */
1311 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1312 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1313 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1314 		break;
1315 	    }
1316 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1317 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1318 		  || (OMP_CLAUSE_MAP_KIND (c)
1319 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1320 	    {
1321 	      if (TREE_CODE (decl) == COMPONENT_REF
1322 		  || (TREE_CODE (decl) == INDIRECT_REF
1323 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1324 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1325 			  == REFERENCE_TYPE)))
1326 		break;
1327 	      if (DECL_SIZE (decl)
1328 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1329 		{
1330 		  tree decl2 = DECL_VALUE_EXPR (decl);
1331 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1332 		  decl2 = TREE_OPERAND (decl2, 0);
1333 		  gcc_assert (DECL_P (decl2));
1334 		  install_var_local (decl2, ctx);
1335 		}
1336 	      install_var_local (decl, ctx);
1337 	      break;
1338 	    }
1339 	  if (DECL_P (decl))
1340 	    {
1341 	      if (DECL_SIZE (decl)
1342 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1343 		{
1344 		  tree decl2 = DECL_VALUE_EXPR (decl);
1345 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1346 		  decl2 = TREE_OPERAND (decl2, 0);
1347 		  gcc_assert (DECL_P (decl2));
1348 		  install_var_field (decl2, true, 3, ctx);
1349 		  install_var_local (decl2, ctx);
1350 		  install_var_local (decl, ctx);
1351 		}
1352 	      else
1353 		{
1354 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1355 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1356 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1357 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1358 		    install_var_field (decl, true, 7, ctx);
1359 		  else
1360 		    install_var_field (decl, true, 3, ctx);
1361 		  if (is_gimple_omp_offloaded (ctx->stmt)
1362 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1363 		    install_var_local (decl, ctx);
1364 		}
1365 	    }
1366 	  else
1367 	    {
1368 	      tree base = get_base_address (decl);
1369 	      tree nc = OMP_CLAUSE_CHAIN (c);
1370 	      if (DECL_P (base)
1371 		  && nc != NULL_TREE
1372 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1373 		  && OMP_CLAUSE_DECL (nc) == base
1374 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1375 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1376 		{
1377 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1378 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1379 		}
1380 	      else
1381 		{
1382 		  if (ctx->outer)
1383 		    {
1384 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1385 		      decl = OMP_CLAUSE_DECL (c);
1386 		    }
1387 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1388 						  (splay_tree_key) decl));
1389 		  tree field
1390 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1391 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1392 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1393 		  insert_field_into_struct (ctx->record_type, field);
1394 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1395 				     (splay_tree_value) field);
1396 		}
1397 	    }
1398 	  break;
1399 
1400 	case OMP_CLAUSE__GRIDDIM_:
1401 	  if (ctx->outer)
1402 	    {
1403 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1404 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1405 	    }
1406 	  break;
1407 
1408 	case OMP_CLAUSE_NOWAIT:
1409 	case OMP_CLAUSE_ORDERED:
1410 	case OMP_CLAUSE_COLLAPSE:
1411 	case OMP_CLAUSE_UNTIED:
1412 	case OMP_CLAUSE_MERGEABLE:
1413 	case OMP_CLAUSE_PROC_BIND:
1414 	case OMP_CLAUSE_SAFELEN:
1415 	case OMP_CLAUSE_SIMDLEN:
1416 	case OMP_CLAUSE_THREADS:
1417 	case OMP_CLAUSE_SIMD:
1418 	case OMP_CLAUSE_NOGROUP:
1419 	case OMP_CLAUSE_DEFAULTMAP:
1420 	case OMP_CLAUSE_ASYNC:
1421 	case OMP_CLAUSE_WAIT:
1422 	case OMP_CLAUSE_GANG:
1423 	case OMP_CLAUSE_WORKER:
1424 	case OMP_CLAUSE_VECTOR:
1425 	case OMP_CLAUSE_INDEPENDENT:
1426 	case OMP_CLAUSE_AUTO:
1427 	case OMP_CLAUSE_SEQ:
1428 	case OMP_CLAUSE_TILE:
1429 	case OMP_CLAUSE__SIMT_:
1430 	case OMP_CLAUSE_DEFAULT:
1431 	case OMP_CLAUSE_NONTEMPORAL:
1432 	case OMP_CLAUSE_IF_PRESENT:
1433 	case OMP_CLAUSE_FINALIZE:
1434 	case OMP_CLAUSE_TASK_REDUCTION:
1435 	  break;
1436 
1437 	case OMP_CLAUSE_ALIGNED:
1438 	  decl = OMP_CLAUSE_DECL (c);
1439 	  if (is_global_var (decl)
1440 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1441 	    install_var_local (decl, ctx);
1442 	  break;
1443 
1444 	case OMP_CLAUSE__CACHE_:
1445 	default:
1446 	  gcc_unreachable ();
1447 	}
1448     }
1449 
1450   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1451     {
1452       switch (OMP_CLAUSE_CODE (c))
1453 	{
1454 	case OMP_CLAUSE_LASTPRIVATE:
1455 	  /* Let the corresponding firstprivate clause create
1456 	     the variable.  */
1457 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1458 	    scan_array_reductions = true;
1459 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1460 	    break;
1461 	  /* FALLTHRU */
1462 
1463 	case OMP_CLAUSE_FIRSTPRIVATE:
1464 	case OMP_CLAUSE_PRIVATE:
1465 	case OMP_CLAUSE_LINEAR:
1466 	case OMP_CLAUSE_IS_DEVICE_PTR:
1467 	  decl = OMP_CLAUSE_DECL (c);
1468 	  if (is_variable_sized (decl))
1469 	    {
1470 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1471 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1472 		  && is_gimple_omp_offloaded (ctx->stmt))
1473 		{
1474 		  tree decl2 = DECL_VALUE_EXPR (decl);
1475 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1476 		  decl2 = TREE_OPERAND (decl2, 0);
1477 		  gcc_assert (DECL_P (decl2));
1478 		  install_var_local (decl2, ctx);
1479 		  fixup_remapped_decl (decl2, ctx, false);
1480 		}
1481 	      install_var_local (decl, ctx);
1482 	    }
1483 	  fixup_remapped_decl (decl, ctx,
1484 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1485 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1486 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1487 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1488 	    scan_array_reductions = true;
1489 	  break;
1490 
1491 	case OMP_CLAUSE_REDUCTION:
1492 	case OMP_CLAUSE_IN_REDUCTION:
1493 	  decl = OMP_CLAUSE_DECL (c);
1494 	  if (TREE_CODE (decl) != MEM_REF)
1495 	    {
1496 	      if (is_variable_sized (decl))
1497 		install_var_local (decl, ctx);
1498 	      fixup_remapped_decl (decl, ctx, false);
1499 	    }
1500 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1501 	    scan_array_reductions = true;
1502 	  break;
1503 
1504 	case OMP_CLAUSE_TASK_REDUCTION:
1505 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1506 	    scan_array_reductions = true;
1507 	  break;
1508 
1509 	case OMP_CLAUSE_SHARED:
1510 	  /* Ignore shared directives in teams construct inside of
1511 	     target construct.  */
1512 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1513 	      && !is_host_teams_ctx (ctx))
1514 	    break;
1515 	  decl = OMP_CLAUSE_DECL (c);
1516 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1517 	    break;
1518 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1519 	    {
1520 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1521 								 ctx->outer)))
1522 		break;
1523 	      bool by_ref = use_pointer_for_field (decl, ctx);
1524 	      install_var_field (decl, by_ref, 11, ctx);
1525 	      break;
1526 	    }
1527 	  fixup_remapped_decl (decl, ctx, false);
1528 	  break;
1529 
1530 	case OMP_CLAUSE_MAP:
1531 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1532 	    break;
1533 	  decl = OMP_CLAUSE_DECL (c);
1534 	  if (DECL_P (decl)
1535 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1536 		   && (OMP_CLAUSE_MAP_KIND (c)
1537 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1538 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1539 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1540 	      && varpool_node::get_create (decl)->offloadable)
1541 	    break;
1542 	  if (DECL_P (decl))
1543 	    {
1544 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1545 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1546 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1547 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1548 		{
1549 		  tree new_decl = lookup_decl (decl, ctx);
1550 		  TREE_TYPE (new_decl)
1551 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1552 		}
1553 	      else if (DECL_SIZE (decl)
1554 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1555 		{
1556 		  tree decl2 = DECL_VALUE_EXPR (decl);
1557 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1558 		  decl2 = TREE_OPERAND (decl2, 0);
1559 		  gcc_assert (DECL_P (decl2));
1560 		  fixup_remapped_decl (decl2, ctx, false);
1561 		  fixup_remapped_decl (decl, ctx, true);
1562 		}
1563 	      else
1564 		fixup_remapped_decl (decl, ctx, false);
1565 	    }
1566 	  break;
1567 
1568 	case OMP_CLAUSE_COPYPRIVATE:
1569 	case OMP_CLAUSE_COPYIN:
1570 	case OMP_CLAUSE_DEFAULT:
1571 	case OMP_CLAUSE_IF:
1572 	case OMP_CLAUSE_NUM_THREADS:
1573 	case OMP_CLAUSE_NUM_TEAMS:
1574 	case OMP_CLAUSE_THREAD_LIMIT:
1575 	case OMP_CLAUSE_DEVICE:
1576 	case OMP_CLAUSE_SCHEDULE:
1577 	case OMP_CLAUSE_DIST_SCHEDULE:
1578 	case OMP_CLAUSE_NOWAIT:
1579 	case OMP_CLAUSE_ORDERED:
1580 	case OMP_CLAUSE_COLLAPSE:
1581 	case OMP_CLAUSE_UNTIED:
1582 	case OMP_CLAUSE_FINAL:
1583 	case OMP_CLAUSE_MERGEABLE:
1584 	case OMP_CLAUSE_PROC_BIND:
1585 	case OMP_CLAUSE_SAFELEN:
1586 	case OMP_CLAUSE_SIMDLEN:
1587 	case OMP_CLAUSE_ALIGNED:
1588 	case OMP_CLAUSE_DEPEND:
1589 	case OMP_CLAUSE__LOOPTEMP_:
1590 	case OMP_CLAUSE__REDUCTEMP_:
1591 	case OMP_CLAUSE_TO:
1592 	case OMP_CLAUSE_FROM:
1593 	case OMP_CLAUSE_PRIORITY:
1594 	case OMP_CLAUSE_GRAINSIZE:
1595 	case OMP_CLAUSE_NUM_TASKS:
1596 	case OMP_CLAUSE_THREADS:
1597 	case OMP_CLAUSE_SIMD:
1598 	case OMP_CLAUSE_NOGROUP:
1599 	case OMP_CLAUSE_DEFAULTMAP:
1600 	case OMP_CLAUSE_USE_DEVICE_PTR:
1601 	case OMP_CLAUSE_NONTEMPORAL:
1602 	case OMP_CLAUSE_ASYNC:
1603 	case OMP_CLAUSE_WAIT:
1604 	case OMP_CLAUSE_NUM_GANGS:
1605 	case OMP_CLAUSE_NUM_WORKERS:
1606 	case OMP_CLAUSE_VECTOR_LENGTH:
1607 	case OMP_CLAUSE_GANG:
1608 	case OMP_CLAUSE_WORKER:
1609 	case OMP_CLAUSE_VECTOR:
1610 	case OMP_CLAUSE_INDEPENDENT:
1611 	case OMP_CLAUSE_AUTO:
1612 	case OMP_CLAUSE_SEQ:
1613 	case OMP_CLAUSE_TILE:
1614 	case OMP_CLAUSE__GRIDDIM_:
1615 	case OMP_CLAUSE__SIMT_:
1616 	case OMP_CLAUSE_IF_PRESENT:
1617 	case OMP_CLAUSE_FINALIZE:
1618 	  break;
1619 
1620 	case OMP_CLAUSE__CACHE_:
1621 	default:
1622 	  gcc_unreachable ();
1623 	}
1624     }
1625 
1626   gcc_checking_assert (!scan_array_reductions
1627 		       || !is_gimple_omp_oacc (ctx->stmt));
1628   if (scan_array_reductions)
1629     {
1630       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1631 	if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1632 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1633 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1634 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1635 	  {
1636 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1637 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1638 	  }
1639 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1640 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1641 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1642 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1643 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1644 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1645     }
1646 }
1647 
1648 /* Create a new name for omp child function.  Returns an identifier. */
1649 
1650 static tree
create_omp_child_function_name(bool task_copy)1651 create_omp_child_function_name (bool task_copy)
1652 {
1653   return clone_function_name_numbered (current_function_decl,
1654 				       task_copy ? "_omp_cpyfn" : "_omp_fn");
1655 }
1656 
1657 /* Return true if CTX may belong to offloaded code: either if current function
1658    is offloaded, or any enclosing context corresponds to a target region.  */
1659 
1660 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1661 omp_maybe_offloaded_ctx (omp_context *ctx)
1662 {
1663   if (cgraph_node::get (current_function_decl)->offloadable)
1664     return true;
1665   for (; ctx; ctx = ctx->outer)
1666     if (is_gimple_omp_offloaded (ctx->stmt))
1667       return true;
1668   return false;
1669 }
1670 
1671 /* Build a decl for the omp child function.  It'll not contain a body
1672    yet, just the bare decl.  */
1673 
1674 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1675 create_omp_child_function (omp_context *ctx, bool task_copy)
1676 {
1677   tree decl, type, name, t;
1678 
1679   name = create_omp_child_function_name (task_copy);
1680   if (task_copy)
1681     type = build_function_type_list (void_type_node, ptr_type_node,
1682 				     ptr_type_node, NULL_TREE);
1683   else
1684     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1685 
1686   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1687 
1688   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1689 		       || !task_copy);
1690   if (!task_copy)
1691     ctx->cb.dst_fn = decl;
1692   else
1693     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1694 
1695   TREE_STATIC (decl) = 1;
1696   TREE_USED (decl) = 1;
1697   DECL_ARTIFICIAL (decl) = 1;
1698   DECL_IGNORED_P (decl) = 0;
1699   TREE_PUBLIC (decl) = 0;
1700   DECL_UNINLINABLE (decl) = 1;
1701   DECL_EXTERNAL (decl) = 0;
1702   DECL_CONTEXT (decl) = NULL_TREE;
1703   DECL_INITIAL (decl) = make_node (BLOCK);
1704   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1705   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1706   /* Remove omp declare simd attribute from the new attributes.  */
1707   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1708     {
1709       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1710 	a = a2;
1711       a = TREE_CHAIN (a);
1712       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1713 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1714 	  *p = TREE_CHAIN (*p);
1715 	else
1716 	  {
1717 	    tree chain = TREE_CHAIN (*p);
1718 	    *p = copy_node (*p);
1719 	    p = &TREE_CHAIN (*p);
1720 	    *p = chain;
1721 	  }
1722     }
1723   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1724     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1725   DECL_FUNCTION_SPECIFIC_TARGET (decl)
1726     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1727   DECL_FUNCTION_VERSIONED (decl)
1728     = DECL_FUNCTION_VERSIONED (current_function_decl);
1729 
1730   if (omp_maybe_offloaded_ctx (ctx))
1731     {
1732       cgraph_node::get_create (decl)->offloadable = 1;
1733       if (ENABLE_OFFLOADING)
1734 	g->have_offload = true;
1735     }
1736 
1737   if (cgraph_node::get_create (decl)->offloadable
1738       && !lookup_attribute ("omp declare target",
1739                            DECL_ATTRIBUTES (current_function_decl)))
1740     {
1741       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1742 				 ? "omp target entrypoint"
1743 				 : "omp declare target");
1744       DECL_ATTRIBUTES (decl)
1745 	= tree_cons (get_identifier (target_attr),
1746 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1747     }
1748 
1749   t = build_decl (DECL_SOURCE_LOCATION (decl),
1750 		  RESULT_DECL, NULL_TREE, void_type_node);
1751   DECL_ARTIFICIAL (t) = 1;
1752   DECL_IGNORED_P (t) = 1;
1753   DECL_CONTEXT (t) = decl;
1754   DECL_RESULT (decl) = t;
1755 
1756   tree data_name = get_identifier (".omp_data_i");
1757   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1758 		  ptr_type_node);
1759   DECL_ARTIFICIAL (t) = 1;
1760   DECL_NAMELESS (t) = 1;
1761   DECL_ARG_TYPE (t) = ptr_type_node;
1762   DECL_CONTEXT (t) = current_function_decl;
1763   TREE_USED (t) = 1;
1764   TREE_READONLY (t) = 1;
1765   DECL_ARGUMENTS (decl) = t;
1766   if (!task_copy)
1767     ctx->receiver_decl = t;
1768   else
1769     {
1770       t = build_decl (DECL_SOURCE_LOCATION (decl),
1771 		      PARM_DECL, get_identifier (".omp_data_o"),
1772 		      ptr_type_node);
1773       DECL_ARTIFICIAL (t) = 1;
1774       DECL_NAMELESS (t) = 1;
1775       DECL_ARG_TYPE (t) = ptr_type_node;
1776       DECL_CONTEXT (t) = current_function_decl;
1777       TREE_USED (t) = 1;
1778       TREE_ADDRESSABLE (t) = 1;
1779       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1780       DECL_ARGUMENTS (decl) = t;
1781     }
1782 
1783   /* Allocate memory for the function structure.  The call to
1784      allocate_struct_function clobbers CFUN, so we need to restore
1785      it afterward.  */
1786   push_struct_function (decl);
1787   cfun->function_end_locus = gimple_location (ctx->stmt);
1788   init_tree_ssa (cfun);
1789   pop_cfun ();
1790 }
1791 
1792 /* Callback for walk_gimple_seq.  Check if combined parallel
1793    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1794 
1795 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1796 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1797 		       bool *handled_ops_p,
1798 		       struct walk_stmt_info *wi)
1799 {
1800   gimple *stmt = gsi_stmt (*gsi_p);
1801 
1802   *handled_ops_p = true;
1803   switch (gimple_code (stmt))
1804     {
1805     WALK_SUBSTMTS;
1806 
1807     case GIMPLE_OMP_FOR:
1808       if (gimple_omp_for_combined_into_p (stmt)
1809 	  && gimple_omp_for_kind (stmt)
1810 	     == *(const enum gf_mask *) (wi->info))
1811 	{
1812 	  wi->info = stmt;
1813 	  return integer_zero_node;
1814 	}
1815       break;
1816     default:
1817       break;
1818     }
1819   return NULL;
1820 }
1821 
1822 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task.  */
1823 
1824 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)1825 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1826 			      omp_context *outer_ctx)
1827 {
1828   struct walk_stmt_info wi;
1829 
1830   memset (&wi, 0, sizeof (wi));
1831   wi.val_only = true;
1832   wi.info = (void *) &msk;
1833   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1834   if (wi.info != (void *) &msk)
1835     {
1836       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1837       struct omp_for_data fd;
1838       omp_extract_for_data (for_stmt, &fd, NULL);
1839       /* We need two temporaries with fd.loop.v type (istart/iend)
1840 	 and then (fd.collapse - 1) temporaries with the same
1841 	 type for count2 ... countN-1 vars if not constant.  */
1842       size_t count = 2, i;
1843       tree type = fd.iter_type;
1844       if (fd.collapse > 1
1845 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1846 	{
1847 	  count += fd.collapse - 1;
1848 	  /* If there are lastprivate clauses on the inner
1849 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1850 	     of iterations (product of count1 ... countN-1).  */
1851 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1852 			       OMP_CLAUSE_LASTPRIVATE))
1853 	    count++;
1854 	  else if (msk == GF_OMP_FOR_KIND_FOR
1855 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1856 				       OMP_CLAUSE_LASTPRIVATE))
1857 	    count++;
1858 	}
1859       for (i = 0; i < count; i++)
1860 	{
1861 	  tree temp = create_tmp_var (type);
1862 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1863 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1864 	  OMP_CLAUSE_DECL (c) = temp;
1865 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1866 	  gimple_omp_taskreg_set_clauses (stmt, c);
1867 	}
1868     }
1869   if (msk == GF_OMP_FOR_KIND_TASKLOOP
1870       && omp_find_clause (gimple_omp_task_clauses (stmt),
1871 			  OMP_CLAUSE_REDUCTION))
1872     {
1873       tree type = build_pointer_type (pointer_sized_int_node);
1874       tree temp = create_tmp_var (type);
1875       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1876       insert_decl_map (&outer_ctx->cb, temp, temp);
1877       OMP_CLAUSE_DECL (c) = temp;
1878       OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1879       gimple_omp_task_set_clauses (stmt, c);
1880     }
1881 }
1882 
1883 /* Scan an OpenMP parallel directive.  */
1884 
1885 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1886 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1887 {
1888   omp_context *ctx;
1889   tree name;
1890   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1891 
1892   /* Ignore parallel directives with empty bodies, unless there
1893      are copyin clauses.  */
1894   if (optimize > 0
1895       && empty_body_p (gimple_omp_body (stmt))
1896       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1897 			  OMP_CLAUSE_COPYIN) == NULL)
1898     {
1899       gsi_replace (gsi, gimple_build_nop (), false);
1900       return;
1901     }
1902 
1903   if (gimple_omp_parallel_combined_p (stmt))
1904     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1905   for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1906 				 OMP_CLAUSE_REDUCTION);
1907        c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1908     if (OMP_CLAUSE_REDUCTION_TASK (c))
1909       {
1910 	tree type = build_pointer_type (pointer_sized_int_node);
1911 	tree temp = create_tmp_var (type);
1912 	tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1913 	if (outer_ctx)
1914 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1915 	OMP_CLAUSE_DECL (c) = temp;
1916 	OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1917 	gimple_omp_parallel_set_clauses (stmt, c);
1918 	break;
1919       }
1920     else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1921       break;
1922 
1923   ctx = new_omp_context (stmt, outer_ctx);
1924   taskreg_contexts.safe_push (ctx);
1925   if (taskreg_nesting_level > 1)
1926     ctx->is_nested = true;
1927   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1928   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1929   name = create_tmp_var_name (".omp_data_s");
1930   name = build_decl (gimple_location (stmt),
1931 		     TYPE_DECL, name, ctx->record_type);
1932   DECL_ARTIFICIAL (name) = 1;
1933   DECL_NAMELESS (name) = 1;
1934   TYPE_NAME (ctx->record_type) = name;
1935   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1936   if (!gimple_omp_parallel_grid_phony (stmt))
1937     {
1938       create_omp_child_function (ctx, false);
1939       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1940     }
1941 
1942   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1943   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1944 
1945   if (TYPE_FIELDS (ctx->record_type) == NULL)
1946     ctx->record_type = ctx->receiver_decl = NULL;
1947 }
1948 
1949 /* Scan an OpenMP task directive.  */
1950 
1951 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1952 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1953 {
1954   omp_context *ctx;
1955   tree name, t;
1956   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1957 
1958   /* Ignore task directives with empty bodies, unless they have depend
1959      clause.  */
1960   if (optimize > 0
1961       && gimple_omp_body (stmt)
1962       && empty_body_p (gimple_omp_body (stmt))
1963       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1964     {
1965       gsi_replace (gsi, gimple_build_nop (), false);
1966       return;
1967     }
1968 
1969   if (gimple_omp_task_taskloop_p (stmt))
1970     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1971 
1972   ctx = new_omp_context (stmt, outer_ctx);
1973 
1974   if (gimple_omp_task_taskwait_p (stmt))
1975     {
1976       scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1977       return;
1978     }
1979 
1980   taskreg_contexts.safe_push (ctx);
1981   if (taskreg_nesting_level > 1)
1982     ctx->is_nested = true;
1983   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1984   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1985   name = create_tmp_var_name (".omp_data_s");
1986   name = build_decl (gimple_location (stmt),
1987 		     TYPE_DECL, name, ctx->record_type);
1988   DECL_ARTIFICIAL (name) = 1;
1989   DECL_NAMELESS (name) = 1;
1990   TYPE_NAME (ctx->record_type) = name;
1991   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1992   create_omp_child_function (ctx, false);
1993   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1994 
1995   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1996 
1997   if (ctx->srecord_type)
1998     {
1999       name = create_tmp_var_name (".omp_data_a");
2000       name = build_decl (gimple_location (stmt),
2001 			 TYPE_DECL, name, ctx->srecord_type);
2002       DECL_ARTIFICIAL (name) = 1;
2003       DECL_NAMELESS (name) = 1;
2004       TYPE_NAME (ctx->srecord_type) = name;
2005       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2006       create_omp_child_function (ctx, true);
2007     }
2008 
2009   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2010 
2011   if (TYPE_FIELDS (ctx->record_type) == NULL)
2012     {
2013       ctx->record_type = ctx->receiver_decl = NULL;
2014       t = build_int_cst (long_integer_type_node, 0);
2015       gimple_omp_task_set_arg_size (stmt, t);
2016       t = build_int_cst (long_integer_type_node, 1);
2017       gimple_omp_task_set_arg_align (stmt, t);
2018     }
2019 }
2020 
2021 /* Helper function for finish_taskreg_scan, called through walk_tree.
2022    If maybe_lookup_decl_in_outer_context returns non-NULL for some
2023    tree, replace it in the expression.  */
2024 
2025 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)2026 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2027 {
2028   if (VAR_P (*tp))
2029     {
2030       omp_context *ctx = (omp_context *) data;
2031       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2032       if (t != *tp)
2033 	{
2034 	  if (DECL_HAS_VALUE_EXPR_P (t))
2035 	    t = unshare_expr (DECL_VALUE_EXPR (t));
2036 	  *tp = t;
2037 	}
2038       *walk_subtrees = 0;
2039     }
2040   else if (IS_TYPE_OR_DECL_P (*tp))
2041     *walk_subtrees = 0;
2042   return NULL_TREE;
2043 }
2044 
2045 /* If any decls have been made addressable during scan_omp,
2046    adjust their fields if needed, and layout record types
2047    of parallel/task constructs.  */
2048 
2049 static void
finish_taskreg_scan(omp_context * ctx)2050 finish_taskreg_scan (omp_context *ctx)
2051 {
2052   if (ctx->record_type == NULL_TREE)
2053     return;
2054 
2055   /* If any task_shared_vars were needed, verify all
2056      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2057      statements if use_pointer_for_field hasn't changed
2058      because of that.  If it did, update field types now.  */
2059   if (task_shared_vars)
2060     {
2061       tree c;
2062 
2063       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2064 	   c; c = OMP_CLAUSE_CHAIN (c))
2065 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2066 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2067 	  {
2068 	    tree decl = OMP_CLAUSE_DECL (c);
2069 
2070 	    /* Global variables don't need to be copied,
2071 	       the receiver side will use them directly.  */
2072 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2073 	      continue;
2074 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2075 		|| !use_pointer_for_field (decl, ctx))
2076 	      continue;
2077 	    tree field = lookup_field (decl, ctx);
2078 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2079 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2080 	      continue;
2081 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2082 	    TREE_THIS_VOLATILE (field) = 0;
2083 	    DECL_USER_ALIGN (field) = 0;
2084 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2085 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2086 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2087 	    if (ctx->srecord_type)
2088 	      {
2089 		tree sfield = lookup_sfield (decl, ctx);
2090 		TREE_TYPE (sfield) = TREE_TYPE (field);
2091 		TREE_THIS_VOLATILE (sfield) = 0;
2092 		DECL_USER_ALIGN (sfield) = 0;
2093 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2094 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2095 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2096 	      }
2097 	  }
2098     }
2099 
2100   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2101     {
2102       tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2103       tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2104       if (c)
2105 	{
2106 	  /* Move the _reductemp_ clause first.  GOMP_parallel_reductions
2107 	     expects to find it at the start of data.  */
2108 	  tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2109 	  tree *p = &TYPE_FIELDS (ctx->record_type);
2110 	  while (*p)
2111 	    if (*p == f)
2112 	      {
2113 		*p = DECL_CHAIN (*p);
2114 		break;
2115 	      }
2116 	    else
2117 	      p = &DECL_CHAIN (*p);
2118 	  DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2119 	  TYPE_FIELDS (ctx->record_type) = f;
2120 	}
2121       layout_type (ctx->record_type);
2122       fixup_child_record_type (ctx);
2123     }
2124   else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2125     {
2126       layout_type (ctx->record_type);
2127       fixup_child_record_type (ctx);
2128     }
2129   else
2130     {
2131       location_t loc = gimple_location (ctx->stmt);
2132       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2133       /* Move VLA fields to the end.  */
2134       p = &TYPE_FIELDS (ctx->record_type);
2135       while (*p)
2136 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2137 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2138 	  {
2139 	    *q = *p;
2140 	    *p = TREE_CHAIN (*p);
2141 	    TREE_CHAIN (*q) = NULL_TREE;
2142 	    q = &TREE_CHAIN (*q);
2143 	  }
2144 	else
2145 	  p = &DECL_CHAIN (*p);
2146       *p = vla_fields;
2147       if (gimple_omp_task_taskloop_p (ctx->stmt))
2148 	{
2149 	  /* Move fields corresponding to first and second _looptemp_
2150 	     clause first.  There are filled by GOMP_taskloop
2151 	     and thus need to be in specific positions.  */
2152 	  tree clauses = gimple_omp_task_clauses (ctx->stmt);
2153 	  tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2154 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2155 				     OMP_CLAUSE__LOOPTEMP_);
2156 	  tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2157 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2158 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2159 	  tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2160 	  p = &TYPE_FIELDS (ctx->record_type);
2161 	  while (*p)
2162 	    if (*p == f1 || *p == f2 || *p == f3)
2163 	      *p = DECL_CHAIN (*p);
2164 	    else
2165 	      p = &DECL_CHAIN (*p);
2166 	  DECL_CHAIN (f1) = f2;
2167 	  if (c3)
2168 	    {
2169 	      DECL_CHAIN (f2) = f3;
2170 	      DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2171 	    }
2172 	  else
2173 	    DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2174 	  TYPE_FIELDS (ctx->record_type) = f1;
2175 	  if (ctx->srecord_type)
2176 	    {
2177 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2178 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2179 	      if (c3)
2180 		f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2181 	      p = &TYPE_FIELDS (ctx->srecord_type);
2182 	      while (*p)
2183 		if (*p == f1 || *p == f2 || *p == f3)
2184 		  *p = DECL_CHAIN (*p);
2185 		else
2186 		  p = &DECL_CHAIN (*p);
2187 	      DECL_CHAIN (f1) = f2;
2188 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2189 	      if (c3)
2190 		{
2191 		  DECL_CHAIN (f2) = f3;
2192 		  DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2193 		}
2194 	      else
2195 		DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2196 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2197 	    }
2198 	}
2199       layout_type (ctx->record_type);
2200       fixup_child_record_type (ctx);
2201       if (ctx->srecord_type)
2202 	layout_type (ctx->srecord_type);
2203       tree t = fold_convert_loc (loc, long_integer_type_node,
2204 				 TYPE_SIZE_UNIT (ctx->record_type));
2205       if (TREE_CODE (t) != INTEGER_CST)
2206 	{
2207 	  t = unshare_expr (t);
2208 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2209 	}
2210       gimple_omp_task_set_arg_size (ctx->stmt, t);
2211       t = build_int_cst (long_integer_type_node,
2212 			 TYPE_ALIGN_UNIT (ctx->record_type));
2213       gimple_omp_task_set_arg_align (ctx->stmt, t);
2214     }
2215 }
2216 
2217 /* Find the enclosing offload context.  */
2218 
2219 static omp_context *
enclosing_target_ctx(omp_context * ctx)2220 enclosing_target_ctx (omp_context *ctx)
2221 {
2222   for (; ctx; ctx = ctx->outer)
2223     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2224       break;
2225 
2226   return ctx;
2227 }
2228 
2229 /* Return true if ctx is part of an oacc kernels region.  */
2230 
2231 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2232 ctx_in_oacc_kernels_region (omp_context *ctx)
2233 {
2234   for (;ctx != NULL; ctx = ctx->outer)
2235     {
2236       gimple *stmt = ctx->stmt;
2237       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2238 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2239 	return true;
2240     }
2241 
2242   return false;
2243 }
2244 
2245 /* Check the parallelism clauses inside a kernels regions.
2246    Until kernels handling moves to use the same loop indirection
2247    scheme as parallel, we need to do this checking early.  */
2248 
2249 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2250 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2251 {
2252   bool checking = true;
2253   unsigned outer_mask = 0;
2254   unsigned this_mask = 0;
2255   bool has_seq = false, has_auto = false;
2256 
2257   if (ctx->outer)
2258     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2259   if (!stmt)
2260     {
2261       checking = false;
2262       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2263 	return outer_mask;
2264       stmt = as_a <gomp_for *> (ctx->stmt);
2265     }
2266 
2267   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2268     {
2269       switch (OMP_CLAUSE_CODE (c))
2270 	{
2271 	case OMP_CLAUSE_GANG:
2272 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2273 	  break;
2274 	case OMP_CLAUSE_WORKER:
2275 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2276 	  break;
2277 	case OMP_CLAUSE_VECTOR:
2278 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2279 	  break;
2280 	case OMP_CLAUSE_SEQ:
2281 	  has_seq = true;
2282 	  break;
2283 	case OMP_CLAUSE_AUTO:
2284 	  has_auto = true;
2285 	  break;
2286 	default:
2287 	  break;
2288 	}
2289     }
2290 
2291   if (checking)
2292     {
2293       if (has_seq && (this_mask || has_auto))
2294 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2295 		  " OpenACC loop specifiers");
2296       else if (has_auto && this_mask)
2297 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2298 		  " OpenACC loop specifiers");
2299 
2300       if (this_mask & outer_mask)
2301 	error_at (gimple_location (stmt), "inner loop uses same"
2302 		  " OpenACC parallelism as containing loop");
2303     }
2304 
2305   return outer_mask | this_mask;
2306 }
2307 
2308 /* Scan a GIMPLE_OMP_FOR.  */
2309 
2310 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2311 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2312 {
2313   omp_context *ctx;
2314   size_t i;
2315   tree clauses = gimple_omp_for_clauses (stmt);
2316 
2317   ctx = new_omp_context (stmt, outer_ctx);
2318 
2319   if (is_gimple_omp_oacc (stmt))
2320     {
2321       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2322 
2323       if (!tgt || is_oacc_parallel (tgt))
2324 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2325 	  {
2326 	    char const *check = NULL;
2327 
2328 	    switch (OMP_CLAUSE_CODE (c))
2329 	      {
2330 	      case OMP_CLAUSE_GANG:
2331 		check = "gang";
2332 		break;
2333 
2334 	      case OMP_CLAUSE_WORKER:
2335 		check = "worker";
2336 		break;
2337 
2338 	      case OMP_CLAUSE_VECTOR:
2339 		check = "vector";
2340 		break;
2341 
2342 	      default:
2343 		break;
2344 	      }
2345 
2346 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2347 	      error_at (gimple_location (stmt),
2348 			"argument not permitted on %qs clause in"
2349 			" OpenACC %<parallel%>", check);
2350 	  }
2351 
2352       if (tgt && is_oacc_kernels (tgt))
2353 	{
2354 	  /* Strip out reductions, as they are not handled yet.  */
2355 	  tree *prev_ptr = &clauses;
2356 
2357 	  while (tree probe = *prev_ptr)
2358 	    {
2359 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2360 
2361 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2362 		*prev_ptr = *next_ptr;
2363 	      else
2364 		prev_ptr = next_ptr;
2365 	    }
2366 
2367 	  gimple_omp_for_set_clauses (stmt, clauses);
2368 	  check_oacc_kernel_gwv (stmt, ctx);
2369 	}
2370     }
2371 
2372   scan_sharing_clauses (clauses, ctx);
2373 
2374   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2375   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2376     {
2377       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2378       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2379       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2380       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2381     }
2382   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2383   return ctx;
2384 }
2385 
2386 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2387 
2388 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2389 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2390 	       omp_context *outer_ctx)
2391 {
2392   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2393   gsi_replace (gsi, bind, false);
2394   gimple_seq seq = NULL;
2395   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2396   tree cond = create_tmp_var_raw (integer_type_node);
2397   DECL_CONTEXT (cond) = current_function_decl;
2398   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2399   gimple_bind_set_vars (bind, cond);
2400   gimple_call_set_lhs (g, cond);
2401   gimple_seq_add_stmt (&seq, g);
2402   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2403   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2404   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2405   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2406   gimple_seq_add_stmt (&seq, g);
2407   g = gimple_build_label (lab1);
2408   gimple_seq_add_stmt (&seq, g);
2409   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2410   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2411   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2412   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2413   gimple_omp_for_set_clauses (new_stmt, clause);
2414   gimple_seq_add_stmt (&seq, new_stmt);
2415   g = gimple_build_goto (lab3);
2416   gimple_seq_add_stmt (&seq, g);
2417   g = gimple_build_label (lab2);
2418   gimple_seq_add_stmt (&seq, g);
2419   gimple_seq_add_stmt (&seq, stmt);
2420   g = gimple_build_label (lab3);
2421   gimple_seq_add_stmt (&seq, g);
2422   gimple_bind_set_body (bind, seq);
2423   update_stmt (bind);
2424   scan_omp_for (new_stmt, outer_ctx);
2425   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2426 }
2427 
2428 /* Scan an OpenMP sections directive.  */
2429 
2430 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2431 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2432 {
2433   omp_context *ctx;
2434 
2435   ctx = new_omp_context (stmt, outer_ctx);
2436   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2437   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2438 }
2439 
2440 /* Scan an OpenMP single directive.  */
2441 
2442 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2443 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2444 {
2445   omp_context *ctx;
2446   tree name;
2447 
2448   ctx = new_omp_context (stmt, outer_ctx);
2449   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2450   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2451   name = create_tmp_var_name (".omp_copy_s");
2452   name = build_decl (gimple_location (stmt),
2453 		     TYPE_DECL, name, ctx->record_type);
2454   TYPE_NAME (ctx->record_type) = name;
2455 
2456   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2457   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2458 
2459   if (TYPE_FIELDS (ctx->record_type) == NULL)
2460     ctx->record_type = NULL;
2461   else
2462     layout_type (ctx->record_type);
2463 }
2464 
2465 /* Scan a GIMPLE_OMP_TARGET.  */
2466 
2467 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)2468 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2469 {
2470   omp_context *ctx;
2471   tree name;
2472   bool offloaded = is_gimple_omp_offloaded (stmt);
2473   tree clauses = gimple_omp_target_clauses (stmt);
2474 
2475   ctx = new_omp_context (stmt, outer_ctx);
2476   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2477   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2478   name = create_tmp_var_name (".omp_data_t");
2479   name = build_decl (gimple_location (stmt),
2480 		     TYPE_DECL, name, ctx->record_type);
2481   DECL_ARTIFICIAL (name) = 1;
2482   DECL_NAMELESS (name) = 1;
2483   TYPE_NAME (ctx->record_type) = name;
2484   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2485 
2486   if (offloaded)
2487     {
2488       create_omp_child_function (ctx, false);
2489       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2490     }
2491 
2492   scan_sharing_clauses (clauses, ctx);
2493   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2494 
2495   if (TYPE_FIELDS (ctx->record_type) == NULL)
2496     ctx->record_type = ctx->receiver_decl = NULL;
2497   else
2498     {
2499       TYPE_FIELDS (ctx->record_type)
2500 	= nreverse (TYPE_FIELDS (ctx->record_type));
2501       if (flag_checking)
2502 	{
2503 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2504 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2505 	       field;
2506 	       field = DECL_CHAIN (field))
2507 	    gcc_assert (DECL_ALIGN (field) == align);
2508 	}
2509       layout_type (ctx->record_type);
2510       if (offloaded)
2511 	fixup_child_record_type (ctx);
2512     }
2513 }
2514 
2515 /* Scan an OpenMP teams directive.  */
2516 
2517 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)2518 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2519 {
2520   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2521 
2522   if (!gimple_omp_teams_host (stmt))
2523     {
2524       scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2525       scan_omp (gimple_omp_body_ptr (stmt), ctx);
2526       return;
2527     }
2528   taskreg_contexts.safe_push (ctx);
2529   gcc_assert (taskreg_nesting_level == 1);
2530   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2531   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2532   tree name = create_tmp_var_name (".omp_data_s");
2533   name = build_decl (gimple_location (stmt),
2534 		     TYPE_DECL, name, ctx->record_type);
2535   DECL_ARTIFICIAL (name) = 1;
2536   DECL_NAMELESS (name) = 1;
2537   TYPE_NAME (ctx->record_type) = name;
2538   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2539   create_omp_child_function (ctx, false);
2540   gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2541 
2542   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2543   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2544 
2545   if (TYPE_FIELDS (ctx->record_type) == NULL)
2546     ctx->record_type = ctx->receiver_decl = NULL;
2547 }
2548 
2549 /* Check nesting restrictions.  */
2550 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)2551 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2552 {
2553   tree c;
2554 
2555   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2556     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2557        the original copy of its contents.  */
2558     return true;
2559 
2560   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2561      inside an OpenACC CTX.  */
2562   if (!(is_gimple_omp (stmt)
2563 	&& is_gimple_omp_oacc (stmt))
2564       /* Except for atomic codes that we share with OpenMP.  */
2565       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2566 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2567     {
2568       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2569 	{
2570 	  error_at (gimple_location (stmt),
2571 		    "non-OpenACC construct inside of OpenACC routine");
2572 	  return false;
2573 	}
2574       else
2575 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2576 	  if (is_gimple_omp (octx->stmt)
2577 	      && is_gimple_omp_oacc (octx->stmt))
2578 	    {
2579 	      error_at (gimple_location (stmt),
2580 			"non-OpenACC construct inside of OpenACC region");
2581 	      return false;
2582 	    }
2583     }
2584 
2585   if (ctx != NULL)
2586     {
2587       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2588 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2589 	{
2590 	  c = NULL_TREE;
2591 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2592 	    {
2593 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2594 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2595 		{
2596 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2597 		      && (ctx->outer == NULL
2598 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2599 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2600 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2601 			      != GF_OMP_FOR_KIND_FOR)
2602 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2603 		    {
2604 		      error_at (gimple_location (stmt),
2605 				"%<ordered simd threads%> must be closely "
2606 				"nested inside of %<for simd%> region");
2607 		      return false;
2608 		    }
2609 		  return true;
2610 		}
2611 	    }
2612 	  else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2613 		   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2614 	    return true;
2615 	  error_at (gimple_location (stmt),
2616 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2617 		    " or %<#pragma omp atomic%> may not be nested inside"
2618 		    " %<simd%> region");
2619 	  return false;
2620 	}
2621       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2622 	{
2623 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2624 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2625 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2626 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2627 	    {
2628 	      error_at (gimple_location (stmt),
2629 			"only %<distribute%> or %<parallel%> regions are "
2630 			"allowed to be strictly nested inside %<teams%> "
2631 			"region");
2632 	      return false;
2633 	    }
2634 	}
2635     }
2636   switch (gimple_code (stmt))
2637     {
2638     case GIMPLE_OMP_FOR:
2639       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2640 	return true;
2641       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2642 	{
2643 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2644 	    {
2645 	      error_at (gimple_location (stmt),
2646 			"%<distribute%> region must be strictly nested "
2647 			"inside %<teams%> construct");
2648 	      return false;
2649 	    }
2650 	  return true;
2651 	}
2652       /* We split taskloop into task and nested taskloop in it.  */
2653       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2654 	return true;
2655       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2656 	{
2657 	  bool ok = false;
2658 
2659 	  if (ctx)
2660 	    switch (gimple_code (ctx->stmt))
2661 	      {
2662 	      case GIMPLE_OMP_FOR:
2663 		ok = (gimple_omp_for_kind (ctx->stmt)
2664 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2665 		break;
2666 
2667 	      case GIMPLE_OMP_TARGET:
2668 		switch (gimple_omp_target_kind (ctx->stmt))
2669 		  {
2670 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2671 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2672 		    ok = true;
2673 		    break;
2674 
2675 		  default:
2676 		    break;
2677 		  }
2678 
2679 	      default:
2680 		break;
2681 	      }
2682 	  else if (oacc_get_fn_attrib (current_function_decl))
2683 	    ok = true;
2684 	  if (!ok)
2685 	    {
2686 	      error_at (gimple_location (stmt),
2687 			"OpenACC loop directive must be associated with"
2688 			" an OpenACC compute region");
2689 	      return false;
2690 	    }
2691 	}
2692       /* FALLTHRU */
2693     case GIMPLE_CALL:
2694       if (is_gimple_call (stmt)
2695 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2696 	      == BUILT_IN_GOMP_CANCEL
2697 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2698 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2699 	{
2700 	  const char *bad = NULL;
2701 	  const char *kind = NULL;
2702 	  const char *construct
2703 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2704 	       == BUILT_IN_GOMP_CANCEL)
2705 	      ? "#pragma omp cancel"
2706 	      : "#pragma omp cancellation point";
2707 	  if (ctx == NULL)
2708 	    {
2709 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2710 			construct);
2711 	      return false;
2712 	    }
2713 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2714 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2715 		  : 0)
2716 	    {
2717 	    case 1:
2718 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2719 		bad = "#pragma omp parallel";
2720 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2721 		       == BUILT_IN_GOMP_CANCEL
2722 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2723 		ctx->cancellable = true;
2724 	      kind = "parallel";
2725 	      break;
2726 	    case 2:
2727 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2728 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2729 		bad = "#pragma omp for";
2730 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2731 		       == BUILT_IN_GOMP_CANCEL
2732 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2733 		{
2734 		  ctx->cancellable = true;
2735 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2736 				       OMP_CLAUSE_NOWAIT))
2737 		    warning_at (gimple_location (stmt), 0,
2738 				"%<#pragma omp cancel for%> inside "
2739 				"%<nowait%> for construct");
2740 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2741 				       OMP_CLAUSE_ORDERED))
2742 		    warning_at (gimple_location (stmt), 0,
2743 				"%<#pragma omp cancel for%> inside "
2744 				"%<ordered%> for construct");
2745 		}
2746 	      kind = "for";
2747 	      break;
2748 	    case 4:
2749 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2750 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2751 		bad = "#pragma omp sections";
2752 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2753 		       == BUILT_IN_GOMP_CANCEL
2754 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2755 		{
2756 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2757 		    {
2758 		      ctx->cancellable = true;
2759 		      if (omp_find_clause (gimple_omp_sections_clauses
2760 								(ctx->stmt),
2761 					   OMP_CLAUSE_NOWAIT))
2762 			warning_at (gimple_location (stmt), 0,
2763 				    "%<#pragma omp cancel sections%> inside "
2764 				    "%<nowait%> sections construct");
2765 		    }
2766 		  else
2767 		    {
2768 		      gcc_assert (ctx->outer
2769 				  && gimple_code (ctx->outer->stmt)
2770 				     == GIMPLE_OMP_SECTIONS);
2771 		      ctx->outer->cancellable = true;
2772 		      if (omp_find_clause (gimple_omp_sections_clauses
2773 							(ctx->outer->stmt),
2774 					   OMP_CLAUSE_NOWAIT))
2775 			warning_at (gimple_location (stmt), 0,
2776 				    "%<#pragma omp cancel sections%> inside "
2777 				    "%<nowait%> sections construct");
2778 		    }
2779 		}
2780 	      kind = "sections";
2781 	      break;
2782 	    case 8:
2783 	      if (!is_task_ctx (ctx)
2784 		  && (!is_taskloop_ctx (ctx)
2785 		      || ctx->outer == NULL
2786 		      || !is_task_ctx (ctx->outer)))
2787 		bad = "#pragma omp task";
2788 	      else
2789 		{
2790 		  for (omp_context *octx = ctx->outer;
2791 		       octx; octx = octx->outer)
2792 		    {
2793 		      switch (gimple_code (octx->stmt))
2794 			{
2795 			case GIMPLE_OMP_TASKGROUP:
2796 			  break;
2797 			case GIMPLE_OMP_TARGET:
2798 			  if (gimple_omp_target_kind (octx->stmt)
2799 			      != GF_OMP_TARGET_KIND_REGION)
2800 			    continue;
2801 			  /* FALLTHRU */
2802 			case GIMPLE_OMP_PARALLEL:
2803 			case GIMPLE_OMP_TEAMS:
2804 			  error_at (gimple_location (stmt),
2805 				    "%<%s taskgroup%> construct not closely "
2806 				    "nested inside of %<taskgroup%> region",
2807 				    construct);
2808 			  return false;
2809 			case GIMPLE_OMP_TASK:
2810 			  if (gimple_omp_task_taskloop_p (octx->stmt)
2811 			      && octx->outer
2812 			      && is_taskloop_ctx (octx->outer))
2813 			    {
2814 			      tree clauses
2815 				= gimple_omp_for_clauses (octx->outer->stmt);
2816 			      if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2817 				break;
2818 			    }
2819 			  continue;
2820 			default:
2821 			  continue;
2822 			}
2823 		      break;
2824 		    }
2825 		  ctx->cancellable = true;
2826 		}
2827 	      kind = "taskgroup";
2828 	      break;
2829 	    default:
2830 	      error_at (gimple_location (stmt), "invalid arguments");
2831 	      return false;
2832 	    }
2833 	  if (bad)
2834 	    {
2835 	      error_at (gimple_location (stmt),
2836 			"%<%s %s%> construct not closely nested inside of %qs",
2837 			construct, kind, bad);
2838 	      return false;
2839 	    }
2840 	}
2841       /* FALLTHRU */
2842     case GIMPLE_OMP_SECTIONS:
2843     case GIMPLE_OMP_SINGLE:
2844       for (; ctx != NULL; ctx = ctx->outer)
2845 	switch (gimple_code (ctx->stmt))
2846 	  {
2847 	  case GIMPLE_OMP_FOR:
2848 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2849 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2850 	      break;
2851 	    /* FALLTHRU */
2852 	  case GIMPLE_OMP_SECTIONS:
2853 	  case GIMPLE_OMP_SINGLE:
2854 	  case GIMPLE_OMP_ORDERED:
2855 	  case GIMPLE_OMP_MASTER:
2856 	  case GIMPLE_OMP_TASK:
2857 	  case GIMPLE_OMP_CRITICAL:
2858 	    if (is_gimple_call (stmt))
2859 	      {
2860 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2861 		    != BUILT_IN_GOMP_BARRIER)
2862 		  return true;
2863 		error_at (gimple_location (stmt),
2864 			  "barrier region may not be closely nested inside "
2865 			  "of work-sharing, %<critical%>, %<ordered%>, "
2866 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2867 			  "region");
2868 		return false;
2869 	      }
2870 	    error_at (gimple_location (stmt),
2871 		      "work-sharing region may not be closely nested inside "
2872 		      "of work-sharing, %<critical%>, %<ordered%>, "
2873 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2874 	    return false;
2875 	  case GIMPLE_OMP_PARALLEL:
2876 	  case GIMPLE_OMP_TEAMS:
2877 	    return true;
2878 	  case GIMPLE_OMP_TARGET:
2879 	    if (gimple_omp_target_kind (ctx->stmt)
2880 		== GF_OMP_TARGET_KIND_REGION)
2881 	      return true;
2882 	    break;
2883 	  default:
2884 	    break;
2885 	  }
2886       break;
2887     case GIMPLE_OMP_MASTER:
2888       for (; ctx != NULL; ctx = ctx->outer)
2889 	switch (gimple_code (ctx->stmt))
2890 	  {
2891 	  case GIMPLE_OMP_FOR:
2892 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2893 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2894 	      break;
2895 	    /* FALLTHRU */
2896 	  case GIMPLE_OMP_SECTIONS:
2897 	  case GIMPLE_OMP_SINGLE:
2898 	  case GIMPLE_OMP_TASK:
2899 	    error_at (gimple_location (stmt),
2900 		      "%<master%> region may not be closely nested inside "
2901 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2902 		      "region");
2903 	    return false;
2904 	  case GIMPLE_OMP_PARALLEL:
2905 	  case GIMPLE_OMP_TEAMS:
2906 	    return true;
2907 	  case GIMPLE_OMP_TARGET:
2908 	    if (gimple_omp_target_kind (ctx->stmt)
2909 		== GF_OMP_TARGET_KIND_REGION)
2910 	      return true;
2911 	    break;
2912 	  default:
2913 	    break;
2914 	  }
2915       break;
2916     case GIMPLE_OMP_TASK:
2917       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2918 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2919 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2920 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2921 	  {
2922 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2923 	    error_at (OMP_CLAUSE_LOCATION (c),
2924 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2925 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2926 	    return false;
2927 	  }
2928       break;
2929     case GIMPLE_OMP_ORDERED:
2930       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2931 	   c; c = OMP_CLAUSE_CHAIN (c))
2932 	{
2933 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2934 	    {
2935 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2936 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2937 	      continue;
2938 	    }
2939 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2940 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2941 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2942 	    {
2943 	      tree oclause;
2944 	      /* Look for containing ordered(N) loop.  */
2945 	      if (ctx == NULL
2946 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2947 		  || (oclause
2948 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2949 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2950 		{
2951 		  error_at (OMP_CLAUSE_LOCATION (c),
2952 			    "%<ordered%> construct with %<depend%> clause "
2953 			    "must be closely nested inside an %<ordered%> "
2954 			    "loop");
2955 		  return false;
2956 		}
2957 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2958 		{
2959 		  error_at (OMP_CLAUSE_LOCATION (c),
2960 			    "%<ordered%> construct with %<depend%> clause "
2961 			    "must be closely nested inside a loop with "
2962 			    "%<ordered%> clause with a parameter");
2963 		  return false;
2964 		}
2965 	    }
2966 	  else
2967 	    {
2968 	      error_at (OMP_CLAUSE_LOCATION (c),
2969 			"invalid depend kind in omp %<ordered%> %<depend%>");
2970 	      return false;
2971 	    }
2972 	}
2973       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2974       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2975 	{
2976 	  /* ordered simd must be closely nested inside of simd region,
2977 	     and simd region must not encounter constructs other than
2978 	     ordered simd, therefore ordered simd may be either orphaned,
2979 	     or ctx->stmt must be simd.  The latter case is handled already
2980 	     earlier.  */
2981 	  if (ctx != NULL)
2982 	    {
2983 	      error_at (gimple_location (stmt),
2984 			"%<ordered%> %<simd%> must be closely nested inside "
2985 			"%<simd%> region");
2986 	      return false;
2987 	    }
2988 	}
2989       for (; ctx != NULL; ctx = ctx->outer)
2990 	switch (gimple_code (ctx->stmt))
2991 	  {
2992 	  case GIMPLE_OMP_CRITICAL:
2993 	  case GIMPLE_OMP_TASK:
2994 	  case GIMPLE_OMP_ORDERED:
2995 	  ordered_in_taskloop:
2996 	    error_at (gimple_location (stmt),
2997 		      "%<ordered%> region may not be closely nested inside "
2998 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2999 		      "%<taskloop%> region");
3000 	    return false;
3001 	  case GIMPLE_OMP_FOR:
3002 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3003 	      goto ordered_in_taskloop;
3004 	    tree o;
3005 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3006 				 OMP_CLAUSE_ORDERED);
3007 	    if (o == NULL)
3008 	      {
3009 		error_at (gimple_location (stmt),
3010 			  "%<ordered%> region must be closely nested inside "
3011 			  "a loop region with an %<ordered%> clause");
3012 		return false;
3013 	      }
3014 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3015 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3016 	      {
3017 		error_at (gimple_location (stmt),
3018 			  "%<ordered%> region without %<depend%> clause may "
3019 			  "not be closely nested inside a loop region with "
3020 			  "an %<ordered%> clause with a parameter");
3021 		return false;
3022 	      }
3023 	    return true;
3024 	  case GIMPLE_OMP_TARGET:
3025 	    if (gimple_omp_target_kind (ctx->stmt)
3026 		!= GF_OMP_TARGET_KIND_REGION)
3027 	      break;
3028 	    /* FALLTHRU */
3029 	  case GIMPLE_OMP_PARALLEL:
3030 	  case GIMPLE_OMP_TEAMS:
3031 	    error_at (gimple_location (stmt),
3032 		      "%<ordered%> region must be closely nested inside "
3033 		      "a loop region with an %<ordered%> clause");
3034 	    return false;
3035 	  default:
3036 	    break;
3037 	  }
3038       break;
3039     case GIMPLE_OMP_CRITICAL:
3040       {
3041 	tree this_stmt_name
3042 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3043 	for (; ctx != NULL; ctx = ctx->outer)
3044 	  if (gomp_critical *other_crit
3045 	        = dyn_cast <gomp_critical *> (ctx->stmt))
3046 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
3047 	      {
3048 		error_at (gimple_location (stmt),
3049 			  "%<critical%> region may not be nested inside "
3050 			   "a %<critical%> region with the same name");
3051 		return false;
3052 	      }
3053       }
3054       break;
3055     case GIMPLE_OMP_TEAMS:
3056       if ((ctx == NULL
3057            || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3058            || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
3059 	  && lang_GNU_Fortran ())
3060 	{
3061 	  error_at (gimple_location (stmt),
3062 		    "%<teams%> construct not closely nested inside of "
3063 		    "%<target%> construct");
3064 	  return false;
3065 	}
3066       if (ctx == NULL)
3067 	break;
3068       else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3069 	       || (gimple_omp_target_kind (ctx->stmt)
3070 		   != GF_OMP_TARGET_KIND_REGION))
3071 	{
3072 	  /* Teams construct can appear either strictly nested inside of
3073 	     target construct with no intervening stmts, or can be encountered
3074 	     only by initial task (so must not appear inside any OpenMP
3075 	     construct.  */
3076 	  error_at (gimple_location (stmt),
3077 		    "%<teams%> construct must be closely nested inside of "
3078 		    "%<target%> construct or not nested in any OpenMP "
3079 		    "construct");
3080 	  return false;
3081 	}
3082       break;
3083     case GIMPLE_OMP_TARGET:
3084       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3085 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3086 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3087 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3088 	  {
3089 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3090 	    error_at (OMP_CLAUSE_LOCATION (c),
3091 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
3092 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3093 	    return false;
3094 	  }
3095       if (is_gimple_omp_offloaded (stmt)
3096 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
3097 	{
3098 	  error_at (gimple_location (stmt),
3099 		    "OpenACC region inside of OpenACC routine, nested "
3100 		    "parallelism not supported yet");
3101 	  return false;
3102 	}
3103       for (; ctx != NULL; ctx = ctx->outer)
3104 	{
3105 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3106 	    {
3107 	      if (is_gimple_omp (stmt)
3108 		  && is_gimple_omp_oacc (stmt)
3109 		  && is_gimple_omp (ctx->stmt))
3110 		{
3111 		  error_at (gimple_location (stmt),
3112 			    "OpenACC construct inside of non-OpenACC region");
3113 		  return false;
3114 		}
3115 	      continue;
3116 	    }
3117 
3118 	  const char *stmt_name, *ctx_stmt_name;
3119 	  switch (gimple_omp_target_kind (stmt))
3120 	    {
3121 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3122 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3123 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3124 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
3125 	      stmt_name = "target enter data"; break;
3126 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
3127 	      stmt_name = "target exit data"; break;
3128 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3129 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3130 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3131 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3132 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3133 	      stmt_name = "enter/exit data"; break;
3134 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3135 	      break;
3136 	    default: gcc_unreachable ();
3137 	    }
3138 	  switch (gimple_omp_target_kind (ctx->stmt))
3139 	    {
3140 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3141 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3142 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3143 	      ctx_stmt_name = "parallel"; break;
3144 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
3145 	      ctx_stmt_name = "kernels"; break;
3146 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3147 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3148 	      ctx_stmt_name = "host_data"; break;
3149 	    default: gcc_unreachable ();
3150 	    }
3151 
3152 	  /* OpenACC/OpenMP mismatch?  */
3153 	  if (is_gimple_omp_oacc (stmt)
3154 	      != is_gimple_omp_oacc (ctx->stmt))
3155 	    {
3156 	      error_at (gimple_location (stmt),
3157 			"%s %qs construct inside of %s %qs region",
3158 			(is_gimple_omp_oacc (stmt)
3159 			 ? "OpenACC" : "OpenMP"), stmt_name,
3160 			(is_gimple_omp_oacc (ctx->stmt)
3161 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3162 	      return false;
3163 	    }
3164 	  if (is_gimple_omp_offloaded (ctx->stmt))
3165 	    {
3166 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
3167 	      if (is_gimple_omp_oacc (ctx->stmt))
3168 		{
3169 		  error_at (gimple_location (stmt),
3170 			    "%qs construct inside of %qs region",
3171 			    stmt_name, ctx_stmt_name);
3172 		  return false;
3173 		}
3174 	      else
3175 		{
3176 		  warning_at (gimple_location (stmt), 0,
3177 			      "%qs construct inside of %qs region",
3178 			      stmt_name, ctx_stmt_name);
3179 		}
3180 	    }
3181 	}
3182       break;
3183     default:
3184       break;
3185     }
3186   return true;
3187 }
3188 
3189 
3190 /* Helper function scan_omp.
3191 
3192    Callback for walk_tree or operators in walk_gimple_stmt used to
3193    scan for OMP directives in TP.  */
3194 
3195 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3196 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3197 {
3198   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3199   omp_context *ctx = (omp_context *) wi->info;
3200   tree t = *tp;
3201 
3202   switch (TREE_CODE (t))
3203     {
3204     case VAR_DECL:
3205     case PARM_DECL:
3206     case LABEL_DECL:
3207     case RESULT_DECL:
3208       if (ctx)
3209 	{
3210 	  tree repl = remap_decl (t, &ctx->cb);
3211 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3212 	  *tp = repl;
3213 	}
3214       break;
3215 
3216     default:
3217       if (ctx && TYPE_P (t))
3218 	*tp = remap_type (t, &ctx->cb);
3219       else if (!DECL_P (t))
3220 	{
3221 	  *walk_subtrees = 1;
3222 	  if (ctx)
3223 	    {
3224 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3225 	      if (tem != TREE_TYPE (t))
3226 		{
3227 		  if (TREE_CODE (t) == INTEGER_CST)
3228 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3229 		  else
3230 		    TREE_TYPE (t) = tem;
3231 		}
3232 	    }
3233 	}
3234       break;
3235     }
3236 
3237   return NULL_TREE;
3238 }
3239 
3240 /* Return true if FNDECL is a setjmp or a longjmp.  */
3241 
3242 static bool
setjmp_or_longjmp_p(const_tree fndecl)3243 setjmp_or_longjmp_p (const_tree fndecl)
3244 {
3245   if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3246       || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3247     return true;
3248 
3249   tree declname = DECL_NAME (fndecl);
3250   if (!declname)
3251     return false;
3252   const char *name = IDENTIFIER_POINTER (declname);
3253   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3254 }
3255 
3256 
3257 /* Helper function for scan_omp.
3258 
3259    Callback for walk_gimple_stmt used to scan for OMP directives in
3260    the current statement in GSI.  */
3261 
3262 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)3263 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3264 		 struct walk_stmt_info *wi)
3265 {
3266   gimple *stmt = gsi_stmt (*gsi);
3267   omp_context *ctx = (omp_context *) wi->info;
3268 
3269   if (gimple_has_location (stmt))
3270     input_location = gimple_location (stmt);
3271 
3272   /* Check the nesting restrictions.  */
3273   bool remove = false;
3274   if (is_gimple_omp (stmt))
3275     remove = !check_omp_nesting_restrictions (stmt, ctx);
3276   else if (is_gimple_call (stmt))
3277     {
3278       tree fndecl = gimple_call_fndecl (stmt);
3279       if (fndecl)
3280 	{
3281 	  if (setjmp_or_longjmp_p (fndecl)
3282 	      && ctx
3283 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3284 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3285 	    {
3286 	      remove = true;
3287 	      error_at (gimple_location (stmt),
3288 			"setjmp/longjmp inside simd construct");
3289 	    }
3290 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3291 	    switch (DECL_FUNCTION_CODE (fndecl))
3292 	      {
3293 	      case BUILT_IN_GOMP_BARRIER:
3294 	      case BUILT_IN_GOMP_CANCEL:
3295 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3296 	      case BUILT_IN_GOMP_TASKYIELD:
3297 	      case BUILT_IN_GOMP_TASKWAIT:
3298 	      case BUILT_IN_GOMP_TASKGROUP_START:
3299 	      case BUILT_IN_GOMP_TASKGROUP_END:
3300 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3301 		break;
3302 	      default:
3303 		break;
3304 	      }
3305 	}
3306     }
3307   if (remove)
3308     {
3309       stmt = gimple_build_nop ();
3310       gsi_replace (gsi, stmt, false);
3311     }
3312 
3313   *handled_ops_p = true;
3314 
3315   switch (gimple_code (stmt))
3316     {
3317     case GIMPLE_OMP_PARALLEL:
3318       taskreg_nesting_level++;
3319       scan_omp_parallel (gsi, ctx);
3320       taskreg_nesting_level--;
3321       break;
3322 
3323     case GIMPLE_OMP_TASK:
3324       taskreg_nesting_level++;
3325       scan_omp_task (gsi, ctx);
3326       taskreg_nesting_level--;
3327       break;
3328 
3329     case GIMPLE_OMP_FOR:
3330       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3331 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3332 	  && omp_maybe_offloaded_ctx (ctx)
3333 	  && omp_max_simt_vf ())
3334 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3335       else
3336 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3337       break;
3338 
3339     case GIMPLE_OMP_SECTIONS:
3340       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3341       break;
3342 
3343     case GIMPLE_OMP_SINGLE:
3344       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3345       break;
3346 
3347     case GIMPLE_OMP_SECTION:
3348     case GIMPLE_OMP_MASTER:
3349     case GIMPLE_OMP_ORDERED:
3350     case GIMPLE_OMP_CRITICAL:
3351     case GIMPLE_OMP_GRID_BODY:
3352       ctx = new_omp_context (stmt, ctx);
3353       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3354       break;
3355 
3356     case GIMPLE_OMP_TASKGROUP:
3357       ctx = new_omp_context (stmt, ctx);
3358       scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3359       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3360       break;
3361 
3362     case GIMPLE_OMP_TARGET:
3363       if (is_gimple_omp_offloaded (stmt))
3364 	{
3365 	  taskreg_nesting_level++;
3366 	  scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3367 	  taskreg_nesting_level--;
3368 	}
3369       else
3370 	scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3371       break;
3372 
3373     case GIMPLE_OMP_TEAMS:
3374       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3375 	{
3376 	  taskreg_nesting_level++;
3377 	  scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3378 	  taskreg_nesting_level--;
3379 	}
3380       else
3381 	scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3382       break;
3383 
3384     case GIMPLE_BIND:
3385       {
3386 	tree var;
3387 
3388 	*handled_ops_p = false;
3389 	if (ctx)
3390 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3391 	       var ;
3392 	       var = DECL_CHAIN (var))
3393 	    insert_decl_map (&ctx->cb, var, var);
3394       }
3395       break;
3396     default:
3397       *handled_ops_p = false;
3398       break;
3399     }
3400 
3401   return NULL_TREE;
3402 }
3403 
3404 
3405 /* Scan all the statements starting at the current statement.  CTX
3406    contains context information about the OMP directives and
3407    clauses found during the scan.  */
3408 
3409 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)3410 scan_omp (gimple_seq *body_p, omp_context *ctx)
3411 {
3412   location_t saved_location;
3413   struct walk_stmt_info wi;
3414 
3415   memset (&wi, 0, sizeof (wi));
3416   wi.info = ctx;
3417   wi.want_locations = true;
3418 
3419   saved_location = input_location;
3420   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3421   input_location = saved_location;
3422 }
3423 
3424 /* Re-gimplification and code generation routines.  */
3425 
3426 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3427    of BIND if in a method.  */
3428 
3429 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)3430 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3431 {
3432   if (DECL_ARGUMENTS (current_function_decl)
3433       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3434       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3435 	  == POINTER_TYPE))
3436     {
3437       tree vars = gimple_bind_vars (bind);
3438       for (tree *pvar = &vars; *pvar; )
3439 	if (omp_member_access_dummy_var (*pvar))
3440 	  *pvar = DECL_CHAIN (*pvar);
3441 	else
3442 	  pvar = &DECL_CHAIN (*pvar);
3443       gimple_bind_set_vars (bind, vars);
3444     }
3445 }
3446 
3447 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3448    block and its subblocks.  */
3449 
3450 static void
remove_member_access_dummy_vars(tree block)3451 remove_member_access_dummy_vars (tree block)
3452 {
3453   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3454     if (omp_member_access_dummy_var (*pvar))
3455       *pvar = DECL_CHAIN (*pvar);
3456     else
3457       pvar = &DECL_CHAIN (*pvar);
3458 
3459   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3460     remove_member_access_dummy_vars (block);
3461 }
3462 
3463 /* If a context was created for STMT when it was scanned, return it.  */
3464 
3465 static omp_context *
maybe_lookup_ctx(gimple * stmt)3466 maybe_lookup_ctx (gimple *stmt)
3467 {
3468   splay_tree_node n;
3469   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3470   return n ? (omp_context *) n->value : NULL;
3471 }
3472 
3473 
3474 /* Find the mapping for DECL in CTX or the immediately enclosing
3475    context that has a mapping for DECL.
3476 
3477    If CTX is a nested parallel directive, we may have to use the decl
3478    mappings created in CTX's parent context.  Suppose that we have the
3479    following parallel nesting (variable UIDs showed for clarity):
3480 
3481 	iD.1562 = 0;
3482      	#omp parallel shared(iD.1562)		-> outer parallel
3483 	  iD.1562 = iD.1562 + 1;
3484 
3485 	  #omp parallel shared (iD.1562)	-> inner parallel
3486 	     iD.1562 = iD.1562 - 1;
3487 
3488    Each parallel structure will create a distinct .omp_data_s structure
3489    for copying iD.1562 in/out of the directive:
3490 
3491   	outer parallel		.omp_data_s.1.i -> iD.1562
3492 	inner parallel		.omp_data_s.2.i -> iD.1562
3493 
3494    A shared variable mapping will produce a copy-out operation before
3495    the parallel directive and a copy-in operation after it.  So, in
3496    this case we would have:
3497 
3498   	iD.1562 = 0;
3499 	.omp_data_o.1.i = iD.1562;
3500 	#omp parallel shared(iD.1562)		-> outer parallel
3501 	  .omp_data_i.1 = &.omp_data_o.1
3502 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3503 
3504 	  .omp_data_o.2.i = iD.1562;		-> **
3505 	  #omp parallel shared(iD.1562)		-> inner parallel
3506 	    .omp_data_i.2 = &.omp_data_o.2
3507 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3508 
3509 
3510     ** This is a problem.  The symbol iD.1562 cannot be referenced
3511        inside the body of the outer parallel region.  But since we are
3512        emitting this copy operation while expanding the inner parallel
3513        directive, we need to access the CTX structure of the outer
3514        parallel directive to get the correct mapping:
3515 
3516 	  .omp_data_o.2.i = .omp_data_i.1->i
3517 
3518     Since there may be other workshare or parallel directives enclosing
3519     the parallel directive, it may be necessary to walk up the context
3520     parent chain.  This is not a problem in general because nested
3521     parallelism happens only rarely.  */
3522 
3523 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3524 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3525 {
3526   tree t;
3527   omp_context *up;
3528 
3529   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3530     t = maybe_lookup_decl (decl, up);
3531 
3532   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3533 
3534   return t ? t : decl;
3535 }
3536 
3537 
3538 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3539    in outer contexts.  */
3540 
3541 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3542 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3543 {
3544   tree t = NULL;
3545   omp_context *up;
3546 
3547   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3548     t = maybe_lookup_decl (decl, up);
3549 
3550   return t ? t : decl;
3551 }
3552 
3553 
3554 /* Construct the initialization value for reduction operation OP.  */
3555 
3556 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)3557 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3558 {
3559   switch (op)
3560     {
3561     case PLUS_EXPR:
3562     case MINUS_EXPR:
3563     case BIT_IOR_EXPR:
3564     case BIT_XOR_EXPR:
3565     case TRUTH_OR_EXPR:
3566     case TRUTH_ORIF_EXPR:
3567     case TRUTH_XOR_EXPR:
3568     case NE_EXPR:
3569       return build_zero_cst (type);
3570 
3571     case MULT_EXPR:
3572     case TRUTH_AND_EXPR:
3573     case TRUTH_ANDIF_EXPR:
3574     case EQ_EXPR:
3575       return fold_convert_loc (loc, type, integer_one_node);
3576 
3577     case BIT_AND_EXPR:
3578       return fold_convert_loc (loc, type, integer_minus_one_node);
3579 
3580     case MAX_EXPR:
3581       if (SCALAR_FLOAT_TYPE_P (type))
3582 	{
3583 	  REAL_VALUE_TYPE max, min;
3584 	  if (HONOR_INFINITIES (type))
3585 	    {
3586 	      real_inf (&max);
3587 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3588 	    }
3589 	  else
3590 	    real_maxval (&min, 1, TYPE_MODE (type));
3591 	  return build_real (type, min);
3592 	}
3593       else if (POINTER_TYPE_P (type))
3594 	{
3595 	  wide_int min
3596 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3597 	  return wide_int_to_tree (type, min);
3598 	}
3599       else
3600 	{
3601 	  gcc_assert (INTEGRAL_TYPE_P (type));
3602 	  return TYPE_MIN_VALUE (type);
3603 	}
3604 
3605     case MIN_EXPR:
3606       if (SCALAR_FLOAT_TYPE_P (type))
3607 	{
3608 	  REAL_VALUE_TYPE max;
3609 	  if (HONOR_INFINITIES (type))
3610 	    real_inf (&max);
3611 	  else
3612 	    real_maxval (&max, 0, TYPE_MODE (type));
3613 	  return build_real (type, max);
3614 	}
3615       else if (POINTER_TYPE_P (type))
3616 	{
3617 	  wide_int max
3618 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3619 	  return wide_int_to_tree (type, max);
3620 	}
3621       else
3622 	{
3623 	  gcc_assert (INTEGRAL_TYPE_P (type));
3624 	  return TYPE_MAX_VALUE (type);
3625 	}
3626 
3627     default:
3628       gcc_unreachable ();
3629     }
3630 }
3631 
3632 /* Construct the initialization value for reduction CLAUSE.  */
3633 
3634 tree
omp_reduction_init(tree clause,tree type)3635 omp_reduction_init (tree clause, tree type)
3636 {
3637   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3638 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3639 }
3640 
3641 /* Return alignment to be assumed for var in CLAUSE, which should be
3642    OMP_CLAUSE_ALIGNED.  */
3643 
3644 static tree
omp_clause_aligned_alignment(tree clause)3645 omp_clause_aligned_alignment (tree clause)
3646 {
3647   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3648     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3649 
3650   /* Otherwise return implementation defined alignment.  */
3651   unsigned int al = 1;
3652   opt_scalar_mode mode_iter;
3653   auto_vector_sizes sizes;
3654   targetm.vectorize.autovectorize_vector_sizes (&sizes);
3655   poly_uint64 vs = 0;
3656   for (unsigned int i = 0; i < sizes.length (); ++i)
3657     vs = ordered_max (vs, sizes[i]);
3658   static enum mode_class classes[]
3659     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3660   for (int i = 0; i < 4; i += 2)
3661     /* The for loop above dictates that we only walk through scalar classes.  */
3662     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3663       {
3664 	scalar_mode mode = mode_iter.require ();
3665 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3666 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3667 	  continue;
3668 	while (maybe_ne (vs, 0U)
3669 	       && known_lt (GET_MODE_SIZE (vmode), vs)
3670 	       && GET_MODE_2XWIDER_MODE (vmode).exists ())
3671 	  vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3672 
3673 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3674 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3675 	  continue;
3676 	poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3677 				       GET_MODE_SIZE (mode));
3678 	type = build_vector_type (type, nelts);
3679 	if (TYPE_MODE (type) != vmode)
3680 	  continue;
3681 	if (TYPE_ALIGN_UNIT (type) > al)
3682 	  al = TYPE_ALIGN_UNIT (type);
3683       }
3684   return build_int_cst (integer_type_node, al);
3685 }
3686 
3687 
3688 /* This structure is part of the interface between lower_rec_simd_input_clauses
3689    and lower_rec_input_clauses.  */
3690 
3691 struct omplow_simd_context {
omplow_simd_contextomplow_simd_context3692   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3693   tree idx;
3694   tree lane;
3695   vec<tree, va_heap> simt_eargs;
3696   gimple_seq simt_dlist;
3697   poly_uint64_pod max_vf;
3698   bool is_simt;
3699 };
3700 
3701 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3702    privatization.  */
3703 
3704 static bool
lower_rec_simd_input_clauses(tree new_var,omp_context * ctx,omplow_simd_context * sctx,tree & ivar,tree & lvar)3705 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3706 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3707 {
3708   if (known_eq (sctx->max_vf, 0U))
3709     {
3710       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3711       if (maybe_gt (sctx->max_vf, 1U))
3712 	{
3713 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3714 				    OMP_CLAUSE_SAFELEN);
3715 	  if (c)
3716 	    {
3717 	      poly_uint64 safe_len;
3718 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3719 		  || maybe_lt (safe_len, 1U))
3720 		sctx->max_vf = 1;
3721 	      else
3722 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3723 	    }
3724 	}
3725       if (maybe_gt (sctx->max_vf, 1U))
3726 	{
3727 	  sctx->idx = create_tmp_var (unsigned_type_node);
3728 	  sctx->lane = create_tmp_var (unsigned_type_node);
3729 	}
3730     }
3731   if (known_eq (sctx->max_vf, 1U))
3732     return false;
3733 
3734   if (sctx->is_simt)
3735     {
3736       if (is_gimple_reg (new_var))
3737 	{
3738 	  ivar = lvar = new_var;
3739 	  return true;
3740 	}
3741       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3742       ivar = lvar = create_tmp_var (type);
3743       TREE_ADDRESSABLE (ivar) = 1;
3744       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3745 					  NULL, DECL_ATTRIBUTES (ivar));
3746       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3747       tree clobber = build_constructor (type, NULL);
3748       TREE_THIS_VOLATILE (clobber) = 1;
3749       gimple *g = gimple_build_assign (ivar, clobber);
3750       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3751     }
3752   else
3753     {
3754       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3755       tree avar = create_tmp_var_raw (atype);
3756       if (TREE_ADDRESSABLE (new_var))
3757 	TREE_ADDRESSABLE (avar) = 1;
3758       DECL_ATTRIBUTES (avar)
3759 	= tree_cons (get_identifier ("omp simd array"), NULL,
3760 		     DECL_ATTRIBUTES (avar));
3761       gimple_add_tmp_var (avar);
3762       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3763 		     NULL_TREE, NULL_TREE);
3764       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3765 		     NULL_TREE, NULL_TREE);
3766     }
3767   if (DECL_P (new_var))
3768     {
3769       SET_DECL_VALUE_EXPR (new_var, lvar);
3770       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3771     }
3772   return true;
3773 }
3774 
3775 /* Helper function of lower_rec_input_clauses.  For a reference
3776    in simd reduction, add an underlying variable it will reference.  */
3777 
3778 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)3779 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3780 {
3781   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3782   if (TREE_CONSTANT (z))
3783     {
3784       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3785 			      get_name (new_vard));
3786       gimple_add_tmp_var (z);
3787       TREE_ADDRESSABLE (z) = 1;
3788       z = build_fold_addr_expr_loc (loc, z);
3789       gimplify_assign (new_vard, z, ilist);
3790     }
3791 }
3792 
3793 /* Helper function for lower_rec_input_clauses.  Emit into ilist sequence
3794    code to emit (type) (tskred_temp[idx]).  */
3795 
3796 static tree
task_reduction_read(gimple_seq * ilist,tree tskred_temp,tree type,unsigned idx)3797 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3798 		     unsigned idx)
3799 {
3800   unsigned HOST_WIDE_INT sz
3801     = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3802   tree r = build2 (MEM_REF, pointer_sized_int_node,
3803 		   tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3804 					       idx * sz));
3805   tree v = create_tmp_var (pointer_sized_int_node);
3806   gimple *g = gimple_build_assign (v, r);
3807   gimple_seq_add_stmt (ilist, g);
3808   if (!useless_type_conversion_p (type, pointer_sized_int_node))
3809     {
3810       v = create_tmp_var (type);
3811       g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3812       gimple_seq_add_stmt (ilist, g);
3813     }
3814   return v;
3815 }
3816 
3817 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3818    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3819    private variables.  Initialization statements go in ILIST, while calls
3820    to destructors go in DLIST.  */
3821 
3822 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)3823 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3824 			 omp_context *ctx, struct omp_for_data *fd)
3825 {
3826   tree c, dtor, copyin_seq, x, ptr;
3827   bool copyin_by_ref = false;
3828   bool lastprivate_firstprivate = false;
3829   bool reduction_omp_orig_ref = false;
3830   int pass;
3831   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3832 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3833   omplow_simd_context sctx = omplow_simd_context ();
3834   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3835   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3836   gimple_seq llist[3] = { };
3837   tree nonconst_simd_if = NULL_TREE;
3838 
3839   copyin_seq = NULL;
3840   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3841 
3842   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3843      with data sharing clauses referencing variable sized vars.  That
3844      is unnecessarily hard to support and very unlikely to result in
3845      vectorized code anyway.  */
3846   if (is_simd)
3847     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3848       switch (OMP_CLAUSE_CODE (c))
3849 	{
3850 	case OMP_CLAUSE_LINEAR:
3851 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3852 	    sctx.max_vf = 1;
3853 	  /* FALLTHRU */
3854 	case OMP_CLAUSE_PRIVATE:
3855 	case OMP_CLAUSE_FIRSTPRIVATE:
3856 	case OMP_CLAUSE_LASTPRIVATE:
3857 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3858 	    sctx.max_vf = 1;
3859 	  break;
3860 	case OMP_CLAUSE_REDUCTION:
3861 	case OMP_CLAUSE_IN_REDUCTION:
3862 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3863 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3864 	    sctx.max_vf = 1;
3865 	  break;
3866 	case OMP_CLAUSE_IF:
3867 	  if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3868 	    sctx.max_vf = 1;
3869 	  else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3870 	    nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3871 	  break;
3872         case OMP_CLAUSE_SIMDLEN:
3873 	  if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3874 	    sctx.max_vf = 1;
3875 	  break;
3876 	default:
3877 	  continue;
3878 	}
3879 
3880   /* Add a placeholder for simduid.  */
3881   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3882     sctx.simt_eargs.safe_push (NULL_TREE);
3883 
3884   unsigned task_reduction_cnt = 0;
3885   unsigned task_reduction_cntorig = 0;
3886   unsigned task_reduction_cnt_full = 0;
3887   unsigned task_reduction_cntorig_full = 0;
3888   unsigned task_reduction_other_cnt = 0;
3889   tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3890   tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3891   /* Do all the fixed sized types in the first pass, and the variable sized
3892      types in the second pass.  This makes sure that the scalar arguments to
3893      the variable sized types are processed before we use them in the
3894      variable sized operations.  For task reductions we use 4 passes, in the
3895      first two we ignore them, in the third one gather arguments for
3896      GOMP_task_reduction_remap call and in the last pass actually handle
3897      the task reductions.  */
3898   for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3899 			 ? 4 : 2); ++pass)
3900     {
3901       if (pass == 2 && task_reduction_cnt)
3902 	{
3903 	  tskred_atype
3904 	    = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3905 						     + task_reduction_cntorig);
3906 	  tskred_avar = create_tmp_var_raw (tskred_atype);
3907 	  gimple_add_tmp_var (tskred_avar);
3908 	  TREE_ADDRESSABLE (tskred_avar) = 1;
3909 	  task_reduction_cnt_full = task_reduction_cnt;
3910 	  task_reduction_cntorig_full = task_reduction_cntorig;
3911 	}
3912       else if (pass == 3 && task_reduction_cnt)
3913 	{
3914 	  x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3915 	  gimple *g
3916 	    = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3917 				 size_int (task_reduction_cntorig),
3918 				 build_fold_addr_expr (tskred_avar));
3919 	  gimple_seq_add_stmt (ilist, g);
3920 	}
3921       if (pass == 3 && task_reduction_other_cnt)
3922 	{
3923 	  /* For reduction clauses, build
3924 	     tskred_base = (void *) tskred_temp[2]
3925 			   + omp_get_thread_num () * tskred_temp[1]
3926 	     or if tskred_temp[1] is known to be constant, that constant
3927 	     directly.  This is the start of the private reduction copy block
3928 	     for the current thread.  */
3929 	  tree v = create_tmp_var (integer_type_node);
3930 	  x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3931 	  gimple *g = gimple_build_call (x, 0);
3932 	  gimple_call_set_lhs (g, v);
3933 	  gimple_seq_add_stmt (ilist, g);
3934 	  c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3935 	  tskred_temp = OMP_CLAUSE_DECL (c);
3936 	  if (is_taskreg_ctx (ctx))
3937 	    tskred_temp = lookup_decl (tskred_temp, ctx);
3938 	  tree v2 = create_tmp_var (sizetype);
3939 	  g = gimple_build_assign (v2, NOP_EXPR, v);
3940 	  gimple_seq_add_stmt (ilist, g);
3941 	  if (ctx->task_reductions[0])
3942 	    v = fold_convert (sizetype, ctx->task_reductions[0]);
3943 	  else
3944 	    v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3945 	  tree v3 = create_tmp_var (sizetype);
3946 	  g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3947 	  gimple_seq_add_stmt (ilist, g);
3948 	  v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3949 	  tskred_base = create_tmp_var (ptr_type_node);
3950 	  g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3951 	  gimple_seq_add_stmt (ilist, g);
3952 	}
3953       task_reduction_cnt = 0;
3954       task_reduction_cntorig = 0;
3955       task_reduction_other_cnt = 0;
3956       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3957 	{
3958 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3959 	  tree var, new_var;
3960 	  bool by_ref;
3961 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3962 	  bool task_reduction_p = false;
3963 	  bool task_reduction_needs_orig_p = false;
3964 	  tree cond = NULL_TREE;
3965 
3966 	  switch (c_kind)
3967 	    {
3968 	    case OMP_CLAUSE_PRIVATE:
3969 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3970 		continue;
3971 	      break;
3972 	    case OMP_CLAUSE_SHARED:
3973 	      /* Ignore shared directives in teams construct inside
3974 		 of target construct.  */
3975 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3976 		  && !is_host_teams_ctx (ctx))
3977 		continue;
3978 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3979 		{
3980 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3981 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3982 		  continue;
3983 		}
3984 	    case OMP_CLAUSE_FIRSTPRIVATE:
3985 	    case OMP_CLAUSE_COPYIN:
3986 	      break;
3987 	    case OMP_CLAUSE_LINEAR:
3988 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3989 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3990 		lastprivate_firstprivate = true;
3991 	      break;
3992 	    case OMP_CLAUSE_REDUCTION:
3993 	    case OMP_CLAUSE_IN_REDUCTION:
3994 	      if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3995 		{
3996 		  task_reduction_p = true;
3997 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3998 		    {
3999 		      task_reduction_other_cnt++;
4000 		      if (pass == 2)
4001 			continue;
4002 		    }
4003 		  else
4004 		    task_reduction_cnt++;
4005 		  if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4006 		    {
4007 		      var = OMP_CLAUSE_DECL (c);
4008 		      /* If var is a global variable that isn't privatized
4009 			 in outer contexts, we don't need to look up the
4010 			 original address, it is always the address of the
4011 			 global variable itself.  */
4012 		      if (!DECL_P (var)
4013 			  || omp_is_reference (var)
4014 			  || !is_global_var
4015 				(maybe_lookup_decl_in_outer_ctx (var, ctx)))
4016 			{
4017 			  task_reduction_needs_orig_p = true;
4018 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4019 			    task_reduction_cntorig++;
4020 			}
4021 		    }
4022 		}
4023 	      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4024 		reduction_omp_orig_ref = true;
4025 	      break;
4026 	    case OMP_CLAUSE__REDUCTEMP_:
4027 	      if (!is_taskreg_ctx (ctx))
4028 		continue;
4029 	      /* FALLTHRU */
4030 	    case OMP_CLAUSE__LOOPTEMP_:
4031 	      /* Handle _looptemp_/_reductemp_ clauses only on
4032 		 parallel/task.  */
4033 	      if (fd)
4034 		continue;
4035 	      break;
4036 	    case OMP_CLAUSE_LASTPRIVATE:
4037 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4038 		{
4039 		  lastprivate_firstprivate = true;
4040 		  if (pass != 0 || is_taskloop_ctx (ctx))
4041 		    continue;
4042 		}
4043 	      /* Even without corresponding firstprivate, if
4044 		 decl is Fortran allocatable, it needs outer var
4045 		 reference.  */
4046 	      else if (pass == 0
4047 		       && lang_hooks.decls.omp_private_outer_ref
4048 							(OMP_CLAUSE_DECL (c)))
4049 		lastprivate_firstprivate = true;
4050 	      break;
4051 	    case OMP_CLAUSE_ALIGNED:
4052 	      if (pass != 1)
4053 		continue;
4054 	      var = OMP_CLAUSE_DECL (c);
4055 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4056 		  && !is_global_var (var))
4057 		{
4058 		  new_var = maybe_lookup_decl (var, ctx);
4059 		  if (new_var == NULL_TREE)
4060 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4061 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4062 		  tree alarg = omp_clause_aligned_alignment (c);
4063 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4064 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4065 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4066 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4067 		  gimplify_and_add (x, ilist);
4068 		}
4069 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4070 		       && is_global_var (var))
4071 		{
4072 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4073 		  new_var = lookup_decl (var, ctx);
4074 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4075 		  t = build_fold_addr_expr_loc (clause_loc, t);
4076 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4077 		  tree alarg = omp_clause_aligned_alignment (c);
4078 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4079 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4080 		  t = fold_convert_loc (clause_loc, ptype, t);
4081 		  x = create_tmp_var (ptype);
4082 		  t = build2 (MODIFY_EXPR, ptype, x, t);
4083 		  gimplify_and_add (t, ilist);
4084 		  t = build_simple_mem_ref_loc (clause_loc, x);
4085 		  SET_DECL_VALUE_EXPR (new_var, t);
4086 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4087 		}
4088 	      continue;
4089 	    default:
4090 	      continue;
4091 	    }
4092 
4093 	  if (task_reduction_p != (pass >= 2))
4094 	    continue;
4095 
4096 	  new_var = var = OMP_CLAUSE_DECL (c);
4097 	  if ((c_kind == OMP_CLAUSE_REDUCTION
4098 	       || c_kind == OMP_CLAUSE_IN_REDUCTION)
4099 	      && TREE_CODE (var) == MEM_REF)
4100 	    {
4101 	      var = TREE_OPERAND (var, 0);
4102 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4103 		var = TREE_OPERAND (var, 0);
4104 	      if (TREE_CODE (var) == INDIRECT_REF
4105 		  || TREE_CODE (var) == ADDR_EXPR)
4106 		var = TREE_OPERAND (var, 0);
4107 	      if (is_variable_sized (var))
4108 		{
4109 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4110 		  var = DECL_VALUE_EXPR (var);
4111 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4112 		  var = TREE_OPERAND (var, 0);
4113 		  gcc_assert (DECL_P (var));
4114 		}
4115 	      new_var = var;
4116 	    }
4117 	  if (c_kind != OMP_CLAUSE_COPYIN)
4118 	    new_var = lookup_decl (var, ctx);
4119 
4120 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4121 	    {
4122 	      if (pass != 0)
4123 		continue;
4124 	    }
4125 	  /* C/C++ array section reductions.  */
4126 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
4127 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
4128 		   && var != OMP_CLAUSE_DECL (c))
4129 	    {
4130 	      if (pass == 0)
4131 		continue;
4132 
4133 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4134 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4135 
4136 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4137 		{
4138 		  tree b = TREE_OPERAND (orig_var, 1);
4139 		  b = maybe_lookup_decl (b, ctx);
4140 		  if (b == NULL)
4141 		    {
4142 		      b = TREE_OPERAND (orig_var, 1);
4143 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4144 		    }
4145 		  if (integer_zerop (bias))
4146 		    bias = b;
4147 		  else
4148 		    {
4149 		      bias = fold_convert_loc (clause_loc,
4150 					       TREE_TYPE (b), bias);
4151 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4152 					      TREE_TYPE (b), b, bias);
4153 		    }
4154 		  orig_var = TREE_OPERAND (orig_var, 0);
4155 		}
4156 	      if (pass == 2)
4157 		{
4158 		  tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4159 		  if (is_global_var (out)
4160 		      && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4161 		      && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4162 			  || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4163 			      != POINTER_TYPE)))
4164 		    x = var;
4165 		  else
4166 		    {
4167 		      bool by_ref = use_pointer_for_field (var, NULL);
4168 		      x = build_receiver_ref (var, by_ref, ctx);
4169 		      if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4170 			  && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4171 			      == POINTER_TYPE))
4172 			x = build_fold_addr_expr (x);
4173 		    }
4174 		  if (TREE_CODE (orig_var) == INDIRECT_REF)
4175 		    x = build_simple_mem_ref (x);
4176 		  else if (TREE_CODE (orig_var) == ADDR_EXPR)
4177 		    {
4178 		      if (var == TREE_OPERAND (orig_var, 0))
4179 			x = build_fold_addr_expr (x);
4180 		    }
4181 		  bias = fold_convert (sizetype, bias);
4182 		  x = fold_convert (ptr_type_node, x);
4183 		  x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4184 				       TREE_TYPE (x), x, bias);
4185 		  unsigned cnt = task_reduction_cnt - 1;
4186 		  if (!task_reduction_needs_orig_p)
4187 		    cnt += (task_reduction_cntorig_full
4188 			    - task_reduction_cntorig);
4189 		  else
4190 		    cnt = task_reduction_cntorig - 1;
4191 		  tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4192 				   size_int (cnt), NULL_TREE, NULL_TREE);
4193 		  gimplify_assign (r, x, ilist);
4194 		  continue;
4195 		}
4196 
4197 	      if (TREE_CODE (orig_var) == INDIRECT_REF
4198 		  || TREE_CODE (orig_var) == ADDR_EXPR)
4199 		orig_var = TREE_OPERAND (orig_var, 0);
4200 	      tree d = OMP_CLAUSE_DECL (c);
4201 	      tree type = TREE_TYPE (d);
4202 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4203 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4204 	      const char *name = get_name (orig_var);
4205 	      if (pass == 3)
4206 		{
4207 		  tree xv = create_tmp_var (ptr_type_node);
4208 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4209 		    {
4210 		      unsigned cnt = task_reduction_cnt - 1;
4211 		      if (!task_reduction_needs_orig_p)
4212 			cnt += (task_reduction_cntorig_full
4213 				- task_reduction_cntorig);
4214 		      else
4215 			cnt = task_reduction_cntorig - 1;
4216 		      x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4217 				  size_int (cnt), NULL_TREE, NULL_TREE);
4218 
4219 		      gimple *g = gimple_build_assign (xv, x);
4220 		      gimple_seq_add_stmt (ilist, g);
4221 		    }
4222 		  else
4223 		    {
4224 		      unsigned int idx = *ctx->task_reduction_map->get (c);
4225 		      tree off;
4226 		      if (ctx->task_reductions[1 + idx])
4227 			off = fold_convert (sizetype,
4228 					    ctx->task_reductions[1 + idx]);
4229 		      else
4230 			off = task_reduction_read (ilist, tskred_temp, sizetype,
4231 						   7 + 3 * idx + 1);
4232 		      gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4233 						       tskred_base, off);
4234 		      gimple_seq_add_stmt (ilist, g);
4235 		    }
4236 		  x = fold_convert (build_pointer_type (boolean_type_node),
4237 				    xv);
4238 		  if (TREE_CONSTANT (v))
4239 		    x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4240 				     TYPE_SIZE_UNIT (type));
4241 		  else
4242 		    {
4243 		      tree t = maybe_lookup_decl (v, ctx);
4244 		      if (t)
4245 			v = t;
4246 		      else
4247 			v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4248 		      gimplify_expr (&v, ilist, NULL, is_gimple_val,
4249 				     fb_rvalue);
4250 		      t = fold_build2_loc (clause_loc, PLUS_EXPR,
4251 					   TREE_TYPE (v), v,
4252 					   build_int_cst (TREE_TYPE (v), 1));
4253 		      t = fold_build2_loc (clause_loc, MULT_EXPR,
4254 					   TREE_TYPE (v), t,
4255 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4256 		      x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4257 		    }
4258 		  cond = create_tmp_var (TREE_TYPE (x));
4259 		  gimplify_assign (cond, x, ilist);
4260 		  x = xv;
4261 		}
4262 	      else if (TREE_CONSTANT (v))
4263 		{
4264 		  x = create_tmp_var_raw (type, name);
4265 		  gimple_add_tmp_var (x);
4266 		  TREE_ADDRESSABLE (x) = 1;
4267 		  x = build_fold_addr_expr_loc (clause_loc, x);
4268 		}
4269 	      else
4270 		{
4271 		  tree atmp
4272 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4273 		  tree t = maybe_lookup_decl (v, ctx);
4274 		  if (t)
4275 		    v = t;
4276 		  else
4277 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4278 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4279 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
4280 				       TREE_TYPE (v), v,
4281 				       build_int_cst (TREE_TYPE (v), 1));
4282 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
4283 				       TREE_TYPE (v), t,
4284 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4285 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4286 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4287 		}
4288 
4289 	      tree ptype = build_pointer_type (TREE_TYPE (type));
4290 	      x = fold_convert_loc (clause_loc, ptype, x);
4291 	      tree y = create_tmp_var (ptype, name);
4292 	      gimplify_assign (y, x, ilist);
4293 	      x = y;
4294 	      tree yb = y;
4295 
4296 	      if (!integer_zerop (bias))
4297 		{
4298 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4299 					   bias);
4300 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4301 					 x);
4302 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4303 					pointer_sized_int_node, yb, bias);
4304 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4305 		  yb = create_tmp_var (ptype, name);
4306 		  gimplify_assign (yb, x, ilist);
4307 		  x = yb;
4308 		}
4309 
4310 	      d = TREE_OPERAND (d, 0);
4311 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4312 		d = TREE_OPERAND (d, 0);
4313 	      if (TREE_CODE (d) == ADDR_EXPR)
4314 		{
4315 		  if (orig_var != var)
4316 		    {
4317 		      gcc_assert (is_variable_sized (orig_var));
4318 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4319 					    x);
4320 		      gimplify_assign (new_var, x, ilist);
4321 		      tree new_orig_var = lookup_decl (orig_var, ctx);
4322 		      tree t = build_fold_indirect_ref (new_var);
4323 		      DECL_IGNORED_P (new_var) = 0;
4324 		      TREE_THIS_NOTRAP (t) = 1;
4325 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
4326 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4327 		    }
4328 		  else
4329 		    {
4330 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4331 				  build_int_cst (ptype, 0));
4332 		      SET_DECL_VALUE_EXPR (new_var, x);
4333 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4334 		    }
4335 		}
4336 	      else
4337 		{
4338 		  gcc_assert (orig_var == var);
4339 		  if (TREE_CODE (d) == INDIRECT_REF)
4340 		    {
4341 		      x = create_tmp_var (ptype, name);
4342 		      TREE_ADDRESSABLE (x) = 1;
4343 		      gimplify_assign (x, yb, ilist);
4344 		      x = build_fold_addr_expr_loc (clause_loc, x);
4345 		    }
4346 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4347 		  gimplify_assign (new_var, x, ilist);
4348 		}
4349 	      /* GOMP_taskgroup_reduction_register memsets the whole
4350 		 array to zero.  If the initializer is zero, we don't
4351 		 need to initialize it again, just mark it as ever
4352 		 used unconditionally, i.e. cond = true.  */
4353 	      if (cond
4354 		  && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4355 		  && initializer_zerop (omp_reduction_init (c,
4356 							    TREE_TYPE (type))))
4357 		{
4358 		  gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4359 						   boolean_true_node);
4360 		  gimple_seq_add_stmt (ilist, g);
4361 		  continue;
4362 		}
4363 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
4364 	      if (cond)
4365 		{
4366 		  gimple *g;
4367 		  if (!is_parallel_ctx (ctx))
4368 		    {
4369 		      tree condv = create_tmp_var (boolean_type_node);
4370 		      g = gimple_build_assign (condv,
4371 					       build_simple_mem_ref (cond));
4372 		      gimple_seq_add_stmt (ilist, g);
4373 		      tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4374 		      g = gimple_build_cond (NE_EXPR, condv,
4375 					     boolean_false_node, end, lab1);
4376 		      gimple_seq_add_stmt (ilist, g);
4377 		      gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4378 		    }
4379 		  g = gimple_build_assign (build_simple_mem_ref (cond),
4380 					   boolean_true_node);
4381 		  gimple_seq_add_stmt (ilist, g);
4382 		}
4383 
4384 	      tree y1 = create_tmp_var (ptype);
4385 	      gimplify_assign (y1, y, ilist);
4386 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
4387 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
4388 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
4389 	      if (task_reduction_needs_orig_p)
4390 		{
4391 		  y3 = create_tmp_var (ptype);
4392 		  tree ref;
4393 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4394 		    ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4395 				  size_int (task_reduction_cnt_full
4396 					    + task_reduction_cntorig - 1),
4397 				  NULL_TREE, NULL_TREE);
4398 		  else
4399 		    {
4400 		      unsigned int idx = *ctx->task_reduction_map->get (c);
4401 		      ref = task_reduction_read (ilist, tskred_temp, ptype,
4402 						 7 + 3 * idx);
4403 		    }
4404 		  gimplify_assign (y3, ref, ilist);
4405 		}
4406 	      else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4407 		{
4408 		  if (pass != 3)
4409 		    {
4410 		      y2 = create_tmp_var (ptype);
4411 		      gimplify_assign (y2, y, ilist);
4412 		    }
4413 		  if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4414 		    {
4415 		      tree ref = build_outer_var_ref (var, ctx);
4416 		      /* For ref build_outer_var_ref already performs this.  */
4417 		      if (TREE_CODE (d) == INDIRECT_REF)
4418 			gcc_assert (omp_is_reference (var));
4419 		      else if (TREE_CODE (d) == ADDR_EXPR)
4420 			ref = build_fold_addr_expr (ref);
4421 		      else if (omp_is_reference (var))
4422 			ref = build_fold_addr_expr (ref);
4423 		      ref = fold_convert_loc (clause_loc, ptype, ref);
4424 		      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4425 			  && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4426 			{
4427 			  y3 = create_tmp_var (ptype);
4428 			  gimplify_assign (y3, unshare_expr (ref), ilist);
4429 			}
4430 		      if (is_simd)
4431 			{
4432 			  y4 = create_tmp_var (ptype);
4433 			  gimplify_assign (y4, ref, dlist);
4434 			}
4435 		    }
4436 		}
4437 	      tree i = create_tmp_var (TREE_TYPE (v));
4438 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4439 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
4440 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
4441 	      if (y2)
4442 		{
4443 		  i2 = create_tmp_var (TREE_TYPE (v));
4444 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4445 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
4446 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
4447 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4448 		}
4449 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4450 		{
4451 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4452 		  tree decl_placeholder
4453 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4454 		  SET_DECL_VALUE_EXPR (decl_placeholder,
4455 				       build_simple_mem_ref (y1));
4456 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4457 		  SET_DECL_VALUE_EXPR (placeholder,
4458 				       y3 ? build_simple_mem_ref (y3)
4459 				       : error_mark_node);
4460 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4461 		  x = lang_hooks.decls.omp_clause_default_ctor
4462 				(c, build_simple_mem_ref (y1),
4463 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4464 		  if (x)
4465 		    gimplify_and_add (x, ilist);
4466 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4467 		    {
4468 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4469 		      lower_omp (&tseq, ctx);
4470 		      gimple_seq_add_seq (ilist, tseq);
4471 		    }
4472 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4473 		  if (is_simd)
4474 		    {
4475 		      SET_DECL_VALUE_EXPR (decl_placeholder,
4476 					   build_simple_mem_ref (y2));
4477 		      SET_DECL_VALUE_EXPR (placeholder,
4478 					   build_simple_mem_ref (y4));
4479 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4480 		      lower_omp (&tseq, ctx);
4481 		      gimple_seq_add_seq (dlist, tseq);
4482 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4483 		    }
4484 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4485 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4486 		  if (y2)
4487 		    {
4488 		      x = lang_hooks.decls.omp_clause_dtor
4489 						(c, build_simple_mem_ref (y2));
4490 		      if (x)
4491 			{
4492 			  gimple_seq tseq = NULL;
4493 			  dtor = x;
4494 			  gimplify_stmt (&dtor, &tseq);
4495 			  gimple_seq_add_seq (dlist, tseq);
4496 			}
4497 		    }
4498 		}
4499 	      else
4500 		{
4501 		  x = omp_reduction_init (c, TREE_TYPE (type));
4502 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4503 
4504 		  /* reduction(-:var) sums up the partial results, so it
4505 		     acts identically to reduction(+:var).  */
4506 		  if (code == MINUS_EXPR)
4507 		    code = PLUS_EXPR;
4508 
4509 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4510 		  if (is_simd)
4511 		    {
4512 		      x = build2 (code, TREE_TYPE (type),
4513 				  build_simple_mem_ref (y4),
4514 				  build_simple_mem_ref (y2));
4515 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4516 		    }
4517 		}
4518 	      gimple *g
4519 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4520 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4521 	      gimple_seq_add_stmt (ilist, g);
4522 	      if (y3)
4523 		{
4524 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4525 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4526 		  gimple_seq_add_stmt (ilist, g);
4527 		}
4528 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4529 				       build_int_cst (TREE_TYPE (i), 1));
4530 	      gimple_seq_add_stmt (ilist, g);
4531 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4532 	      gimple_seq_add_stmt (ilist, g);
4533 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4534 	      if (y2)
4535 		{
4536 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4537 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4538 		  gimple_seq_add_stmt (dlist, g);
4539 		  if (y4)
4540 		    {
4541 		      g = gimple_build_assign
4542 					(y4, POINTER_PLUS_EXPR, y4,
4543 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4544 		      gimple_seq_add_stmt (dlist, g);
4545 		    }
4546 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4547 					   build_int_cst (TREE_TYPE (i2), 1));
4548 		  gimple_seq_add_stmt (dlist, g);
4549 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4550 		  gimple_seq_add_stmt (dlist, g);
4551 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4552 		}
4553 	      continue;
4554 	    }
4555 	  else if (pass == 2)
4556 	    {
4557 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4558 		x = var;
4559 	      else
4560 		{
4561 		  bool by_ref = use_pointer_for_field (var, ctx);
4562 		  x = build_receiver_ref (var, by_ref, ctx);
4563 		}
4564 	      if (!omp_is_reference (var))
4565 		x = build_fold_addr_expr (x);
4566 	      x = fold_convert (ptr_type_node, x);
4567 	      unsigned cnt = task_reduction_cnt - 1;
4568 	      if (!task_reduction_needs_orig_p)
4569 		cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4570 	      else
4571 		cnt = task_reduction_cntorig - 1;
4572 	      tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4573 			       size_int (cnt), NULL_TREE, NULL_TREE);
4574 	      gimplify_assign (r, x, ilist);
4575 	      continue;
4576 	    }
4577 	  else if (pass == 3)
4578 	    {
4579 	      tree type = TREE_TYPE (new_var);
4580 	      if (!omp_is_reference (var))
4581 		type = build_pointer_type (type);
4582 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4583 		{
4584 		  unsigned cnt = task_reduction_cnt - 1;
4585 		  if (!task_reduction_needs_orig_p)
4586 		    cnt += (task_reduction_cntorig_full
4587 			    - task_reduction_cntorig);
4588 		  else
4589 		    cnt = task_reduction_cntorig - 1;
4590 		  x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4591 			      size_int (cnt), NULL_TREE, NULL_TREE);
4592 		}
4593 	      else
4594 		{
4595 		  unsigned int idx = *ctx->task_reduction_map->get (c);
4596 		  tree off;
4597 		  if (ctx->task_reductions[1 + idx])
4598 		    off = fold_convert (sizetype,
4599 					ctx->task_reductions[1 + idx]);
4600 		  else
4601 		    off = task_reduction_read (ilist, tskred_temp, sizetype,
4602 					       7 + 3 * idx + 1);
4603 		  x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4604 				   tskred_base, off);
4605 		}
4606 	      x = fold_convert (type, x);
4607 	      tree t;
4608 	      if (omp_is_reference (var))
4609 		{
4610 		  gimplify_assign (new_var, x, ilist);
4611 		  t = new_var;
4612 		  new_var = build_simple_mem_ref (new_var);
4613 		}
4614 	      else
4615 		{
4616 		  t = create_tmp_var (type);
4617 		  gimplify_assign (t, x, ilist);
4618 		  SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4619 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4620 		}
4621 	      t = fold_convert (build_pointer_type (boolean_type_node), t);
4622 	      t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4623 			       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4624 	      cond = create_tmp_var (TREE_TYPE (t));
4625 	      gimplify_assign (cond, t, ilist);
4626 	    }
4627 	  else if (is_variable_sized (var))
4628 	    {
4629 	      /* For variable sized types, we need to allocate the
4630 		 actual storage here.  Call alloca and store the
4631 		 result in the pointer decl that we created elsewhere.  */
4632 	      if (pass == 0)
4633 		continue;
4634 
4635 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4636 		{
4637 		  gcall *stmt;
4638 		  tree tmp, atmp;
4639 
4640 		  ptr = DECL_VALUE_EXPR (new_var);
4641 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4642 		  ptr = TREE_OPERAND (ptr, 0);
4643 		  gcc_assert (DECL_P (ptr));
4644 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4645 
4646 		  /* void *tmp = __builtin_alloca */
4647 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4648 		  stmt = gimple_build_call (atmp, 2, x,
4649 					    size_int (DECL_ALIGN (var)));
4650 		  cfun->calls_alloca = 1;
4651 		  tmp = create_tmp_var_raw (ptr_type_node);
4652 		  gimple_add_tmp_var (tmp);
4653 		  gimple_call_set_lhs (stmt, tmp);
4654 
4655 		  gimple_seq_add_stmt (ilist, stmt);
4656 
4657 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4658 		  gimplify_assign (ptr, x, ilist);
4659 		}
4660 	    }
4661 	  else if (omp_is_reference (var)
4662 		   && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4663 		       || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4664 	    {
4665 	      /* For references that are being privatized for Fortran,
4666 		 allocate new backing storage for the new pointer
4667 		 variable.  This allows us to avoid changing all the
4668 		 code that expects a pointer to something that expects
4669 		 a direct variable.  */
4670 	      if (pass == 0)
4671 		continue;
4672 
4673 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4674 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4675 		{
4676 		  x = build_receiver_ref (var, false, ctx);
4677 		  x = build_fold_addr_expr_loc (clause_loc, x);
4678 		}
4679 	      else if (TREE_CONSTANT (x))
4680 		{
4681 		  /* For reduction in SIMD loop, defer adding the
4682 		     initialization of the reference, because if we decide
4683 		     to use SIMD array for it, the initilization could cause
4684 		     expansion ICE.  */
4685 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4686 		    x = NULL_TREE;
4687 		  else
4688 		    {
4689 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4690 					      get_name (var));
4691 		      gimple_add_tmp_var (x);
4692 		      TREE_ADDRESSABLE (x) = 1;
4693 		      x = build_fold_addr_expr_loc (clause_loc, x);
4694 		    }
4695 		}
4696 	      else
4697 		{
4698 		  tree atmp
4699 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4700 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4701 		  tree al = size_int (TYPE_ALIGN (rtype));
4702 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4703 		}
4704 
4705 	      if (x)
4706 		{
4707 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4708 		  gimplify_assign (new_var, x, ilist);
4709 		}
4710 
4711 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4712 	    }
4713 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
4714 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
4715 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4716 	    {
4717 	      if (pass == 0)
4718 		continue;
4719 	    }
4720 	  else if (pass != 0)
4721 	    continue;
4722 
4723 	  switch (OMP_CLAUSE_CODE (c))
4724 	    {
4725 	    case OMP_CLAUSE_SHARED:
4726 	      /* Ignore shared directives in teams construct inside
4727 		 target construct.  */
4728 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4729 		  && !is_host_teams_ctx (ctx))
4730 		continue;
4731 	      /* Shared global vars are just accessed directly.  */
4732 	      if (is_global_var (new_var))
4733 		break;
4734 	      /* For taskloop firstprivate/lastprivate, represented
4735 		 as firstprivate and shared clause on the task, new_var
4736 		 is the firstprivate var.  */
4737 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4738 		break;
4739 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4740 		 needs to be delayed until after fixup_child_record_type so
4741 		 that we get the correct type during the dereference.  */
4742 	      by_ref = use_pointer_for_field (var, ctx);
4743 	      x = build_receiver_ref (var, by_ref, ctx);
4744 	      SET_DECL_VALUE_EXPR (new_var, x);
4745 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4746 
4747 	      /* ??? If VAR is not passed by reference, and the variable
4748 		 hasn't been initialized yet, then we'll get a warning for
4749 		 the store into the omp_data_s structure.  Ideally, we'd be
4750 		 able to notice this and not store anything at all, but
4751 		 we're generating code too early.  Suppress the warning.  */
4752 	      if (!by_ref)
4753 		TREE_NO_WARNING (var) = 1;
4754 	      break;
4755 
4756 	    case OMP_CLAUSE_LASTPRIVATE:
4757 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4758 		break;
4759 	      /* FALLTHRU */
4760 
4761 	    case OMP_CLAUSE_PRIVATE:
4762 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4763 		x = build_outer_var_ref (var, ctx);
4764 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4765 		{
4766 		  if (is_task_ctx (ctx))
4767 		    x = build_receiver_ref (var, false, ctx);
4768 		  else
4769 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4770 		}
4771 	      else
4772 		x = NULL;
4773 	    do_private:
4774 	      tree nx;
4775 	      nx = lang_hooks.decls.omp_clause_default_ctor
4776 						(c, unshare_expr (new_var), x);
4777 	      if (is_simd)
4778 		{
4779 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4780 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4781 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4782 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4783 						       ivar, lvar))
4784 		    {
4785 		      if (nx)
4786 			x = lang_hooks.decls.omp_clause_default_ctor
4787 						(c, unshare_expr (ivar), x);
4788 		      if (nx && x)
4789 			gimplify_and_add (x, &llist[0]);
4790 		      if (y)
4791 			{
4792 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4793 			  if (y)
4794 			    {
4795 			      gimple_seq tseq = NULL;
4796 
4797 			      dtor = y;
4798 			      gimplify_stmt (&dtor, &tseq);
4799 			      gimple_seq_add_seq (&llist[1], tseq);
4800 			    }
4801 			}
4802 		      break;
4803 		    }
4804 		}
4805 	      if (nx)
4806 		gimplify_and_add (nx, ilist);
4807 	      /* FALLTHRU */
4808 
4809 	    do_dtor:
4810 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4811 	      if (x)
4812 		{
4813 		  gimple_seq tseq = NULL;
4814 
4815 		  dtor = x;
4816 		  gimplify_stmt (&dtor, &tseq);
4817 		  gimple_seq_add_seq (dlist, tseq);
4818 		}
4819 	      break;
4820 
4821 	    case OMP_CLAUSE_LINEAR:
4822 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4823 		goto do_firstprivate;
4824 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4825 		x = NULL;
4826 	      else
4827 		x = build_outer_var_ref (var, ctx);
4828 	      goto do_private;
4829 
4830 	    case OMP_CLAUSE_FIRSTPRIVATE:
4831 	      if (is_task_ctx (ctx))
4832 		{
4833 		  if ((omp_is_reference (var)
4834 		       && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4835 		      || is_variable_sized (var))
4836 		    goto do_dtor;
4837 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4838 									  ctx))
4839 			   || use_pointer_for_field (var, NULL))
4840 		    {
4841 		      x = build_receiver_ref (var, false, ctx);
4842 		      SET_DECL_VALUE_EXPR (new_var, x);
4843 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4844 		      goto do_dtor;
4845 		    }
4846 		}
4847 	      if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4848 		  && omp_is_reference (var))
4849 		{
4850 		  x = build_outer_var_ref (var, ctx);
4851 		  gcc_assert (TREE_CODE (x) == MEM_REF
4852 			      && integer_zerop (TREE_OPERAND (x, 1)));
4853 		  x = TREE_OPERAND (x, 0);
4854 		  x = lang_hooks.decls.omp_clause_copy_ctor
4855 						(c, unshare_expr (new_var), x);
4856 		  gimplify_and_add (x, ilist);
4857 		  goto do_dtor;
4858 		}
4859 	    do_firstprivate:
4860 	      x = build_outer_var_ref (var, ctx);
4861 	      if (is_simd)
4862 		{
4863 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4864 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4865 		    {
4866 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4867 		      tree stept = TREE_TYPE (t);
4868 		      tree ct = omp_find_clause (clauses,
4869 						 OMP_CLAUSE__LOOPTEMP_);
4870 		      gcc_assert (ct);
4871 		      tree l = OMP_CLAUSE_DECL (ct);
4872 		      tree n1 = fd->loop.n1;
4873 		      tree step = fd->loop.step;
4874 		      tree itype = TREE_TYPE (l);
4875 		      if (POINTER_TYPE_P (itype))
4876 			itype = signed_type_for (itype);
4877 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4878 		      if (TYPE_UNSIGNED (itype)
4879 			  && fd->loop.cond_code == GT_EXPR)
4880 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4881 					 fold_build1 (NEGATE_EXPR, itype, l),
4882 					 fold_build1 (NEGATE_EXPR,
4883 						      itype, step));
4884 		      else
4885 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4886 		      t = fold_build2 (MULT_EXPR, stept,
4887 				       fold_convert (stept, l), t);
4888 
4889 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4890 			{
4891 			  x = lang_hooks.decls.omp_clause_linear_ctor
4892 							(c, new_var, x, t);
4893 			  gimplify_and_add (x, ilist);
4894 			  goto do_dtor;
4895 			}
4896 
4897 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4898 			x = fold_build2 (POINTER_PLUS_EXPR,
4899 					 TREE_TYPE (x), x, t);
4900 		      else
4901 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4902 		    }
4903 
4904 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4905 		       || TREE_ADDRESSABLE (new_var))
4906 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4907 						       ivar, lvar))
4908 		    {
4909 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4910 			{
4911 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4912 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4913 			  gimplify_and_add (x, ilist);
4914 			  gimple_stmt_iterator gsi
4915 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4916 			  gassign *g
4917 			    = gimple_build_assign (unshare_expr (lvar), iv);
4918 			  gsi_insert_before_without_update (&gsi, g,
4919 							    GSI_SAME_STMT);
4920 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4921 			  enum tree_code code = PLUS_EXPR;
4922 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4923 			    code = POINTER_PLUS_EXPR;
4924 			  g = gimple_build_assign (iv, code, iv, t);
4925 			  gsi_insert_before_without_update (&gsi, g,
4926 							    GSI_SAME_STMT);
4927 			  break;
4928 			}
4929 		      x = lang_hooks.decls.omp_clause_copy_ctor
4930 						(c, unshare_expr (ivar), x);
4931 		      gimplify_and_add (x, &llist[0]);
4932 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4933 		      if (x)
4934 			{
4935 			  gimple_seq tseq = NULL;
4936 
4937 			  dtor = x;
4938 			  gimplify_stmt (&dtor, &tseq);
4939 			  gimple_seq_add_seq (&llist[1], tseq);
4940 			}
4941 		      break;
4942 		    }
4943 		}
4944 	      x = lang_hooks.decls.omp_clause_copy_ctor
4945 						(c, unshare_expr (new_var), x);
4946 	      gimplify_and_add (x, ilist);
4947 	      goto do_dtor;
4948 
4949 	    case OMP_CLAUSE__LOOPTEMP_:
4950 	    case OMP_CLAUSE__REDUCTEMP_:
4951 	      gcc_assert (is_taskreg_ctx (ctx));
4952 	      x = build_outer_var_ref (var, ctx);
4953 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4954 	      gimplify_and_add (x, ilist);
4955 	      break;
4956 
4957 	    case OMP_CLAUSE_COPYIN:
4958 	      by_ref = use_pointer_for_field (var, NULL);
4959 	      x = build_receiver_ref (var, by_ref, ctx);
4960 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4961 	      append_to_statement_list (x, &copyin_seq);
4962 	      copyin_by_ref |= by_ref;
4963 	      break;
4964 
4965 	    case OMP_CLAUSE_REDUCTION:
4966 	    case OMP_CLAUSE_IN_REDUCTION:
4967 	      /* OpenACC reductions are initialized using the
4968 		 GOACC_REDUCTION internal function.  */
4969 	      if (is_gimple_omp_oacc (ctx->stmt))
4970 		break;
4971 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4972 		{
4973 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4974 		  gimple *tseq;
4975 		  tree ptype = TREE_TYPE (placeholder);
4976 		  if (cond)
4977 		    {
4978 		      x = error_mark_node;
4979 		      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
4980 			  && !task_reduction_needs_orig_p)
4981 			x = var;
4982 		      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4983 			{
4984 			  tree pptype = build_pointer_type (ptype);
4985 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4986 			    x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4987 					size_int (task_reduction_cnt_full
4988 						  + task_reduction_cntorig - 1),
4989 					NULL_TREE, NULL_TREE);
4990 			  else
4991 			    {
4992 			      unsigned int idx
4993 				= *ctx->task_reduction_map->get (c);
4994 			      x = task_reduction_read (ilist, tskred_temp,
4995 						       pptype, 7 + 3 * idx);
4996 			    }
4997 			  x = fold_convert (pptype, x);
4998 			  x = build_simple_mem_ref (x);
4999 			}
5000 		    }
5001 		  else
5002 		    {
5003 		      x = build_outer_var_ref (var, ctx);
5004 
5005 		      if (omp_is_reference (var)
5006 			  && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5007 			x = build_fold_addr_expr_loc (clause_loc, x);
5008 		    }
5009 		  SET_DECL_VALUE_EXPR (placeholder, x);
5010 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5011 		  tree new_vard = new_var;
5012 		  if (omp_is_reference (var))
5013 		    {
5014 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5015 		      new_vard = TREE_OPERAND (new_var, 0);
5016 		      gcc_assert (DECL_P (new_vard));
5017 		    }
5018 		  if (is_simd
5019 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5020 						       ivar, lvar))
5021 		    {
5022 		      if (new_vard == new_var)
5023 			{
5024 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5025 			  SET_DECL_VALUE_EXPR (new_var, ivar);
5026 			}
5027 		      else
5028 			{
5029 			  SET_DECL_VALUE_EXPR (new_vard,
5030 					       build_fold_addr_expr (ivar));
5031 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5032 			}
5033 		      x = lang_hooks.decls.omp_clause_default_ctor
5034 				(c, unshare_expr (ivar),
5035 				 build_outer_var_ref (var, ctx));
5036 		      if (x)
5037 			gimplify_and_add (x, &llist[0]);
5038 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5039 			{
5040 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5041 			  lower_omp (&tseq, ctx);
5042 			  gimple_seq_add_seq (&llist[0], tseq);
5043 			}
5044 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5045 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5046 		      lower_omp (&tseq, ctx);
5047 		      gimple_seq_add_seq (&llist[1], tseq);
5048 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5049 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5050 		      if (new_vard == new_var)
5051 			SET_DECL_VALUE_EXPR (new_var, lvar);
5052 		      else
5053 			SET_DECL_VALUE_EXPR (new_vard,
5054 					     build_fold_addr_expr (lvar));
5055 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5056 		      if (x)
5057 			{
5058 			  tseq = NULL;
5059 			  dtor = x;
5060 			  gimplify_stmt (&dtor, &tseq);
5061 			  gimple_seq_add_seq (&llist[1], tseq);
5062 			}
5063 		      break;
5064 		    }
5065 		  /* If this is a reference to constant size reduction var
5066 		     with placeholder, we haven't emitted the initializer
5067 		     for it because it is undesirable if SIMD arrays are used.
5068 		     But if they aren't used, we need to emit the deferred
5069 		     initialization now.  */
5070 		  else if (omp_is_reference (var) && is_simd)
5071 		    handle_simd_reference (clause_loc, new_vard, ilist);
5072 
5073 		  tree lab2 = NULL_TREE;
5074 		  if (cond)
5075 		    {
5076 		      gimple *g;
5077 		      if (!is_parallel_ctx (ctx))
5078 			{
5079 			  tree condv = create_tmp_var (boolean_type_node);
5080 			  tree m = build_simple_mem_ref (cond);
5081 			  g = gimple_build_assign (condv, m);
5082 			  gimple_seq_add_stmt (ilist, g);
5083 			  tree lab1
5084 			    = create_artificial_label (UNKNOWN_LOCATION);
5085 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
5086 			  g = gimple_build_cond (NE_EXPR, condv,
5087 						 boolean_false_node,
5088 						 lab2, lab1);
5089 			  gimple_seq_add_stmt (ilist, g);
5090 			  gimple_seq_add_stmt (ilist,
5091 					       gimple_build_label (lab1));
5092 			}
5093 		      g = gimple_build_assign (build_simple_mem_ref (cond),
5094 					       boolean_true_node);
5095 		      gimple_seq_add_stmt (ilist, g);
5096 		    }
5097 		  x = lang_hooks.decls.omp_clause_default_ctor
5098 				(c, unshare_expr (new_var),
5099 				 cond ? NULL_TREE
5100 				 : build_outer_var_ref (var, ctx));
5101 		  if (x)
5102 		    gimplify_and_add (x, ilist);
5103 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5104 		    {
5105 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5106 		      lower_omp (&tseq, ctx);
5107 		      gimple_seq_add_seq (ilist, tseq);
5108 		    }
5109 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5110 		  if (is_simd)
5111 		    {
5112 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5113 		      lower_omp (&tseq, ctx);
5114 		      gimple_seq_add_seq (dlist, tseq);
5115 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5116 		    }
5117 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5118 		  if (cond)
5119 		    {
5120 		      if (lab2)
5121 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5122 		      break;
5123 		    }
5124 		  goto do_dtor;
5125 		}
5126 	      else
5127 		{
5128 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
5129 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5130 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5131 
5132 		  if (cond)
5133 		    {
5134 		      gimple *g;
5135 		      tree lab2 = NULL_TREE;
5136 		      /* GOMP_taskgroup_reduction_register memsets the whole
5137 			 array to zero.  If the initializer is zero, we don't
5138 			 need to initialize it again, just mark it as ever
5139 			 used unconditionally, i.e. cond = true.  */
5140 		      if (initializer_zerop (x))
5141 			{
5142 			  g = gimple_build_assign (build_simple_mem_ref (cond),
5143 						   boolean_true_node);
5144 			  gimple_seq_add_stmt (ilist, g);
5145 			  break;
5146 			}
5147 
5148 		      /* Otherwise, emit
5149 			 if (!cond) { cond = true; new_var = x; }  */
5150 		      if (!is_parallel_ctx (ctx))
5151 			{
5152 			  tree condv = create_tmp_var (boolean_type_node);
5153 			  tree m = build_simple_mem_ref (cond);
5154 			  g = gimple_build_assign (condv, m);
5155 			  gimple_seq_add_stmt (ilist, g);
5156 			  tree lab1
5157 			    = create_artificial_label (UNKNOWN_LOCATION);
5158 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
5159 			  g = gimple_build_cond (NE_EXPR, condv,
5160 						 boolean_false_node,
5161 						 lab2, lab1);
5162 			  gimple_seq_add_stmt (ilist, g);
5163 			  gimple_seq_add_stmt (ilist,
5164 					       gimple_build_label (lab1));
5165 			}
5166 		      g = gimple_build_assign (build_simple_mem_ref (cond),
5167 					       boolean_true_node);
5168 		      gimple_seq_add_stmt (ilist, g);
5169 		      gimplify_assign (new_var, x, ilist);
5170 		      if (lab2)
5171 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5172 		      break;
5173 		    }
5174 
5175 		  /* reduction(-:var) sums up the partial results, so it
5176 		     acts identically to reduction(+:var).  */
5177 		  if (code == MINUS_EXPR)
5178 		    code = PLUS_EXPR;
5179 
5180 		  tree new_vard = new_var;
5181 		  if (is_simd && omp_is_reference (var))
5182 		    {
5183 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5184 		      new_vard = TREE_OPERAND (new_var, 0);
5185 		      gcc_assert (DECL_P (new_vard));
5186 		    }
5187 		  if (is_simd
5188 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5189 						       ivar, lvar))
5190 		    {
5191 		      tree ref = build_outer_var_ref (var, ctx);
5192 
5193 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5194 
5195 		      if (sctx.is_simt)
5196 			{
5197 			  if (!simt_lane)
5198 			    simt_lane = create_tmp_var (unsigned_type_node);
5199 			  x = build_call_expr_internal_loc
5200 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5201 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
5202 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
5203 			  gimplify_assign (ivar, x, &llist[2]);
5204 			}
5205 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
5206 		      ref = build_outer_var_ref (var, ctx);
5207 		      gimplify_assign (ref, x, &llist[1]);
5208 
5209 		      if (new_vard != new_var)
5210 			{
5211 			  SET_DECL_VALUE_EXPR (new_vard,
5212 					       build_fold_addr_expr (lvar));
5213 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5214 			}
5215 		    }
5216 		  else
5217 		    {
5218 		      if (omp_is_reference (var) && is_simd)
5219 			handle_simd_reference (clause_loc, new_vard, ilist);
5220 		      gimplify_assign (new_var, x, ilist);
5221 		      if (is_simd)
5222 			{
5223 			  tree ref = build_outer_var_ref (var, ctx);
5224 
5225 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5226 			  ref = build_outer_var_ref (var, ctx);
5227 			  gimplify_assign (ref, x, dlist);
5228 			}
5229 		    }
5230 		}
5231 	      break;
5232 
5233 	    default:
5234 	      gcc_unreachable ();
5235 	    }
5236 	}
5237     }
5238   if (tskred_avar)
5239     {
5240       tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5241       TREE_THIS_VOLATILE (clobber) = 1;
5242       gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5243     }
5244 
5245   if (known_eq (sctx.max_vf, 1U))
5246     sctx.is_simt = false;
5247 
5248   if (nonconst_simd_if)
5249     {
5250       if (sctx.lane == NULL_TREE)
5251 	{
5252 	  sctx.idx = create_tmp_var (unsigned_type_node);
5253 	  sctx.lane = create_tmp_var (unsigned_type_node);
5254 	}
5255       /* FIXME: For now.  */
5256       sctx.is_simt = false;
5257     }
5258 
5259   if (sctx.lane || sctx.is_simt)
5260     {
5261       uid = create_tmp_var (ptr_type_node, "simduid");
5262       /* Don't want uninit warnings on simduid, it is always uninitialized,
5263 	 but we use it not for the value, but for the DECL_UID only.  */
5264       TREE_NO_WARNING (uid) = 1;
5265       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5266       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5267       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5268       gimple_omp_for_set_clauses (ctx->stmt, c);
5269     }
5270   /* Emit calls denoting privatized variables and initializing a pointer to
5271      structure that holds private variables as fields after ompdevlow pass.  */
5272   if (sctx.is_simt)
5273     {
5274       sctx.simt_eargs[0] = uid;
5275       gimple *g
5276 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5277       gimple_call_set_lhs (g, uid);
5278       gimple_seq_add_stmt (ilist, g);
5279       sctx.simt_eargs.release ();
5280 
5281       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5282       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5283       gimple_call_set_lhs (g, simtrec);
5284       gimple_seq_add_stmt (ilist, g);
5285     }
5286   if (sctx.lane)
5287     {
5288       gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5289 					      1 + (nonconst_simd_if != NULL),
5290 					      uid, nonconst_simd_if);
5291       gimple_call_set_lhs (g, sctx.lane);
5292       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5293       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5294       g = gimple_build_assign (sctx.lane, INTEGER_CST,
5295 			       build_int_cst (unsigned_type_node, 0));
5296       gimple_seq_add_stmt (ilist, g);
5297       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
5298       if (llist[2])
5299 	{
5300 	  tree simt_vf = create_tmp_var (unsigned_type_node);
5301 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5302 	  gimple_call_set_lhs (g, simt_vf);
5303 	  gimple_seq_add_stmt (dlist, g);
5304 
5305 	  tree t = build_int_cst (unsigned_type_node, 1);
5306 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5307 	  gimple_seq_add_stmt (dlist, g);
5308 
5309 	  t = build_int_cst (unsigned_type_node, 0);
5310 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5311 	  gimple_seq_add_stmt (dlist, g);
5312 
5313 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5314 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
5315 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5316 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5317 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
5318 
5319 	  gimple_seq_add_seq (dlist, llist[2]);
5320 
5321 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5322 	  gimple_seq_add_stmt (dlist, g);
5323 
5324 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
5325 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5326 	  gimple_seq_add_stmt (dlist, g);
5327 
5328 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
5329 	}
5330       for (int i = 0; i < 2; i++)
5331 	if (llist[i])
5332 	  {
5333 	    tree vf = create_tmp_var (unsigned_type_node);
5334 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5335 	    gimple_call_set_lhs (g, vf);
5336 	    gimple_seq *seq = i == 0 ? ilist : dlist;
5337 	    gimple_seq_add_stmt (seq, g);
5338 	    tree t = build_int_cst (unsigned_type_node, 0);
5339 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5340 	    gimple_seq_add_stmt (seq, g);
5341 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
5342 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
5343 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
5344 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
5345 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
5346 	    gimple_seq_add_seq (seq, llist[i]);
5347 	    t = build_int_cst (unsigned_type_node, 1);
5348 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5349 	    gimple_seq_add_stmt (seq, g);
5350 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
5351 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5352 	    gimple_seq_add_stmt (seq, g);
5353 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
5354 	  }
5355     }
5356   if (sctx.is_simt)
5357     {
5358       gimple_seq_add_seq (dlist, sctx.simt_dlist);
5359       gimple *g
5360 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5361       gimple_seq_add_stmt (dlist, g);
5362     }
5363 
5364   /* The copyin sequence is not to be executed by the main thread, since
5365      that would result in self-copies.  Perhaps not visible to scalars,
5366      but it certainly is to C++ operator=.  */
5367   if (copyin_seq)
5368     {
5369       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5370 			   0);
5371       x = build2 (NE_EXPR, boolean_type_node, x,
5372 		  build_int_cst (TREE_TYPE (x), 0));
5373       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5374       gimplify_and_add (x, ilist);
5375     }
5376 
5377   /* If any copyin variable is passed by reference, we must ensure the
5378      master thread doesn't modify it before it is copied over in all
5379      threads.  Similarly for variables in both firstprivate and
5380      lastprivate clauses we need to ensure the lastprivate copying
5381      happens after firstprivate copying in all threads.  And similarly
5382      for UDRs if initializer expression refers to omp_orig.  */
5383   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5384     {
5385       /* Don't add any barrier for #pragma omp simd or
5386 	 #pragma omp distribute.  */
5387       if (!is_task_ctx (ctx)
5388 	  && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5389 	      || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5390 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5391     }
5392 
5393   /* If max_vf is non-zero, then we can use only a vectorization factor
5394      up to the max_vf we chose.  So stick it into the safelen clause.  */
5395   if (maybe_ne (sctx.max_vf, 0U))
5396     {
5397       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5398 				OMP_CLAUSE_SAFELEN);
5399       poly_uint64 safe_len;
5400       if (c == NULL_TREE
5401 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5402 	      && maybe_gt (safe_len, sctx.max_vf)))
5403 	{
5404 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5405 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5406 						       sctx.max_vf);
5407 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5408 	  gimple_omp_for_set_clauses (ctx->stmt, c);
5409 	}
5410     }
5411 }
5412 
5413 
5414 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
5415    both parallel and workshare constructs.  PREDICATE may be NULL if it's
5416    always true.   */
5417 
5418 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * stmt_list,omp_context * ctx)5419 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
5420 			   omp_context *ctx)
5421 {
5422   tree x, c, label = NULL, orig_clauses = clauses;
5423   bool par_clauses = false;
5424   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5425 
5426   /* Early exit if there are no lastprivate or linear clauses.  */
5427   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5428     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5429 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5430 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5431       break;
5432   if (clauses == NULL)
5433     {
5434       /* If this was a workshare clause, see if it had been combined
5435 	 with its parallel.  In that case, look for the clauses on the
5436 	 parallel statement itself.  */
5437       if (is_parallel_ctx (ctx))
5438 	return;
5439 
5440       ctx = ctx->outer;
5441       if (ctx == NULL || !is_parallel_ctx (ctx))
5442 	return;
5443 
5444       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5445 				 OMP_CLAUSE_LASTPRIVATE);
5446       if (clauses == NULL)
5447 	return;
5448       par_clauses = true;
5449     }
5450 
5451   bool maybe_simt = false;
5452   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5453       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5454     {
5455       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5456       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5457       if (simduid)
5458 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5459     }
5460 
5461   if (predicate)
5462     {
5463       gcond *stmt;
5464       tree label_true, arm1, arm2;
5465       enum tree_code pred_code = TREE_CODE (predicate);
5466 
5467       label = create_artificial_label (UNKNOWN_LOCATION);
5468       label_true = create_artificial_label (UNKNOWN_LOCATION);
5469       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5470 	{
5471 	  arm1 = TREE_OPERAND (predicate, 0);
5472 	  arm2 = TREE_OPERAND (predicate, 1);
5473 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5474 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5475 	}
5476       else
5477 	{
5478 	  arm1 = predicate;
5479 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5480 	  arm2 = boolean_false_node;
5481 	  pred_code = NE_EXPR;
5482 	}
5483       if (maybe_simt)
5484 	{
5485 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
5486 	  c = fold_convert (integer_type_node, c);
5487 	  simtcond = create_tmp_var (integer_type_node);
5488 	  gimplify_assign (simtcond, c, stmt_list);
5489 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5490 						 1, simtcond);
5491 	  c = create_tmp_var (integer_type_node);
5492 	  gimple_call_set_lhs (g, c);
5493 	  gimple_seq_add_stmt (stmt_list, g);
5494 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5495 				    label_true, label);
5496 	}
5497       else
5498 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5499       gimple_seq_add_stmt (stmt_list, stmt);
5500       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5501     }
5502 
5503   for (c = clauses; c ;)
5504     {
5505       tree var, new_var;
5506       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5507 
5508       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5509 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5510 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
5511 	{
5512 	  var = OMP_CLAUSE_DECL (c);
5513 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5514 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5515 	      && is_taskloop_ctx (ctx))
5516 	    {
5517 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5518 	      new_var = lookup_decl (var, ctx->outer);
5519 	    }
5520 	  else
5521 	    {
5522 	      new_var = lookup_decl (var, ctx);
5523 	      /* Avoid uninitialized warnings for lastprivate and
5524 		 for linear iterators.  */
5525 	      if (predicate
5526 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5527 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5528 		TREE_NO_WARNING (new_var) = 1;
5529 	    }
5530 
5531 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
5532 	    {
5533 	      tree val = DECL_VALUE_EXPR (new_var);
5534 	      if (TREE_CODE (val) == ARRAY_REF
5535 		  && VAR_P (TREE_OPERAND (val, 0))
5536 		  && lookup_attribute ("omp simd array",
5537 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
5538 								      0))))
5539 		{
5540 		  if (lastlane == NULL)
5541 		    {
5542 		      lastlane = create_tmp_var (unsigned_type_node);
5543 		      gcall *g
5544 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5545 						      2, simduid,
5546 						      TREE_OPERAND (val, 1));
5547 		      gimple_call_set_lhs (g, lastlane);
5548 		      gimple_seq_add_stmt (stmt_list, g);
5549 		    }
5550 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5551 				    TREE_OPERAND (val, 0), lastlane,
5552 				    NULL_TREE, NULL_TREE);
5553 		}
5554 	    }
5555 	  else if (maybe_simt)
5556 	    {
5557 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5558 			  ? DECL_VALUE_EXPR (new_var)
5559 			  : new_var);
5560 	      if (simtlast == NULL)
5561 		{
5562 		  simtlast = create_tmp_var (unsigned_type_node);
5563 		  gcall *g = gimple_build_call_internal
5564 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5565 		  gimple_call_set_lhs (g, simtlast);
5566 		  gimple_seq_add_stmt (stmt_list, g);
5567 		}
5568 	      x = build_call_expr_internal_loc
5569 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5570 		 TREE_TYPE (val), 2, val, simtlast);
5571 	      new_var = unshare_expr (new_var);
5572 	      gimplify_assign (new_var, x, stmt_list);
5573 	      new_var = unshare_expr (new_var);
5574 	    }
5575 
5576 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5577 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
5578 	    {
5579 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
5580 	      gimple_seq_add_seq (stmt_list,
5581 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5582 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
5583 	    }
5584 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5585 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5586 	    {
5587 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5588 	      gimple_seq_add_seq (stmt_list,
5589 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5590 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5591 	    }
5592 
5593 	  x = NULL_TREE;
5594 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5595 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5596 	    {
5597 	      gcc_checking_assert (is_taskloop_ctx (ctx));
5598 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5599 							  ctx->outer->outer);
5600 	      if (is_global_var (ovar))
5601 		x = ovar;
5602 	    }
5603 	  if (!x)
5604 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
5605 	  if (omp_is_reference (var))
5606 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5607 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
5608 	  gimplify_and_add (x, stmt_list);
5609 	}
5610       c = OMP_CLAUSE_CHAIN (c);
5611       if (c == NULL && !par_clauses)
5612 	{
5613 	  /* If this was a workshare clause, see if it had been combined
5614 	     with its parallel.  In that case, continue looking for the
5615 	     clauses also on the parallel statement itself.  */
5616 	  if (is_parallel_ctx (ctx))
5617 	    break;
5618 
5619 	  ctx = ctx->outer;
5620 	  if (ctx == NULL || !is_parallel_ctx (ctx))
5621 	    break;
5622 
5623 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5624 			       OMP_CLAUSE_LASTPRIVATE);
5625 	  par_clauses = true;
5626 	}
5627     }
5628 
5629   if (label)
5630     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
5631 }
5632 
5633 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5634    (which might be a placeholder).  INNER is true if this is an inner
5635    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
5636    join markers.  Generate the before-loop forking sequence in
5637    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
5638    general form of these sequences is
5639 
5640      GOACC_REDUCTION_SETUP
5641      GOACC_FORK
5642      GOACC_REDUCTION_INIT
5643      ...
5644      GOACC_REDUCTION_FINI
5645      GOACC_JOIN
5646      GOACC_REDUCTION_TEARDOWN.  */
5647 
5648 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)5649 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5650 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
5651 		       gimple_seq *join_seq, omp_context *ctx)
5652 {
5653   gimple_seq before_fork = NULL;
5654   gimple_seq after_fork = NULL;
5655   gimple_seq before_join = NULL;
5656   gimple_seq after_join = NULL;
5657   tree init_code = NULL_TREE, fini_code = NULL_TREE,
5658     setup_code = NULL_TREE, teardown_code = NULL_TREE;
5659   unsigned offset = 0;
5660 
5661   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5662     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5663       {
5664 	tree orig = OMP_CLAUSE_DECL (c);
5665 	tree var = maybe_lookup_decl (orig, ctx);
5666 	tree ref_to_res = NULL_TREE;
5667 	tree incoming, outgoing, v1, v2, v3;
5668 	bool is_private = false;
5669 
5670 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5671 	if (rcode == MINUS_EXPR)
5672 	  rcode = PLUS_EXPR;
5673 	else if (rcode == TRUTH_ANDIF_EXPR)
5674 	  rcode = BIT_AND_EXPR;
5675 	else if (rcode == TRUTH_ORIF_EXPR)
5676 	  rcode = BIT_IOR_EXPR;
5677 	tree op = build_int_cst (unsigned_type_node, rcode);
5678 
5679 	if (!var)
5680 	  var = orig;
5681 
5682 	incoming = outgoing = var;
5683 
5684 	if (!inner)
5685 	  {
5686 	    /* See if an outer construct also reduces this variable.  */
5687 	    omp_context *outer = ctx;
5688 
5689 	    while (omp_context *probe = outer->outer)
5690 	      {
5691 		enum gimple_code type = gimple_code (probe->stmt);
5692 		tree cls;
5693 
5694 		switch (type)
5695 		  {
5696 		  case GIMPLE_OMP_FOR:
5697 		    cls = gimple_omp_for_clauses (probe->stmt);
5698 		    break;
5699 
5700 		  case GIMPLE_OMP_TARGET:
5701 		    if (gimple_omp_target_kind (probe->stmt)
5702 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
5703 		      goto do_lookup;
5704 
5705 		    cls = gimple_omp_target_clauses (probe->stmt);
5706 		    break;
5707 
5708 		  default:
5709 		    goto do_lookup;
5710 		  }
5711 
5712 		outer = probe;
5713 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
5714 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5715 		      && orig == OMP_CLAUSE_DECL (cls))
5716 		    {
5717 		      incoming = outgoing = lookup_decl (orig, probe);
5718 		      goto has_outer_reduction;
5719 		    }
5720 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5721 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5722 			   && orig == OMP_CLAUSE_DECL (cls))
5723 		    {
5724 		      is_private = true;
5725 		      goto do_lookup;
5726 		    }
5727 	      }
5728 
5729 	  do_lookup:
5730 	    /* This is the outermost construct with this reduction,
5731 	       see if there's a mapping for it.  */
5732 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5733 		&& maybe_lookup_field (orig, outer) && !is_private)
5734 	      {
5735 		ref_to_res = build_receiver_ref (orig, false, outer);
5736 		if (omp_is_reference (orig))
5737 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5738 
5739 		tree type = TREE_TYPE (var);
5740 		if (POINTER_TYPE_P (type))
5741 		  type = TREE_TYPE (type);
5742 
5743 		outgoing = var;
5744 		incoming = omp_reduction_init_op (loc, rcode, type);
5745 	      }
5746 	    else
5747 	      {
5748 		/* Try to look at enclosing contexts for reduction var,
5749 		   use original if no mapping found.  */
5750 		tree t = NULL_TREE;
5751 		omp_context *c = ctx->outer;
5752 		while (c && !t)
5753 		  {
5754 		    t = maybe_lookup_decl (orig, c);
5755 		    c = c->outer;
5756 		  }
5757 		incoming = outgoing = (t ? t : orig);
5758 	      }
5759 
5760 	  has_outer_reduction:;
5761 	  }
5762 
5763 	if (!ref_to_res)
5764 	  ref_to_res = integer_zero_node;
5765 
5766 	if (omp_is_reference (orig))
5767 	  {
5768 	    tree type = TREE_TYPE (var);
5769 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5770 
5771 	    if (!inner)
5772 	      {
5773 		tree x = create_tmp_var (TREE_TYPE (type), id);
5774 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5775 	      }
5776 
5777 	    v1 = create_tmp_var (type, id);
5778 	    v2 = create_tmp_var (type, id);
5779 	    v3 = create_tmp_var (type, id);
5780 
5781 	    gimplify_assign (v1, var, fork_seq);
5782 	    gimplify_assign (v2, var, fork_seq);
5783 	    gimplify_assign (v3, var, fork_seq);
5784 
5785 	    var = build_simple_mem_ref (var);
5786 	    v1 = build_simple_mem_ref (v1);
5787 	    v2 = build_simple_mem_ref (v2);
5788 	    v3 = build_simple_mem_ref (v3);
5789 	    outgoing = build_simple_mem_ref (outgoing);
5790 
5791 	    if (!TREE_CONSTANT (incoming))
5792 	      incoming = build_simple_mem_ref (incoming);
5793 	  }
5794 	else
5795 	  v1 = v2 = v3 = var;
5796 
5797 	/* Determine position in reduction buffer, which may be used
5798 	   by target.  The parser has ensured that this is not a
5799 	   variable-sized type.  */
5800 	fixed_size_mode mode
5801 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5802 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5803 	offset = (offset + align - 1) & ~(align - 1);
5804 	tree off = build_int_cst (sizetype, offset);
5805 	offset += GET_MODE_SIZE (mode);
5806 
5807 	if (!init_code)
5808 	  {
5809 	    init_code = build_int_cst (integer_type_node,
5810 				       IFN_GOACC_REDUCTION_INIT);
5811 	    fini_code = build_int_cst (integer_type_node,
5812 				       IFN_GOACC_REDUCTION_FINI);
5813 	    setup_code = build_int_cst (integer_type_node,
5814 					IFN_GOACC_REDUCTION_SETUP);
5815 	    teardown_code = build_int_cst (integer_type_node,
5816 					   IFN_GOACC_REDUCTION_TEARDOWN);
5817 	  }
5818 
5819 	tree setup_call
5820 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5821 					  TREE_TYPE (var), 6, setup_code,
5822 					  unshare_expr (ref_to_res),
5823 					  incoming, level, op, off);
5824 	tree init_call
5825 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5826 					  TREE_TYPE (var), 6, init_code,
5827 					  unshare_expr (ref_to_res),
5828 					  v1, level, op, off);
5829 	tree fini_call
5830 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5831 					  TREE_TYPE (var), 6, fini_code,
5832 					  unshare_expr (ref_to_res),
5833 					  v2, level, op, off);
5834 	tree teardown_call
5835 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5836 					  TREE_TYPE (var), 6, teardown_code,
5837 					  ref_to_res, v3, level, op, off);
5838 
5839 	gimplify_assign (v1, setup_call, &before_fork);
5840 	gimplify_assign (v2, init_call, &after_fork);
5841 	gimplify_assign (v3, fini_call, &before_join);
5842 	gimplify_assign (outgoing, teardown_call, &after_join);
5843       }
5844 
5845   /* Now stitch things together.  */
5846   gimple_seq_add_seq (fork_seq, before_fork);
5847   if (fork)
5848     gimple_seq_add_stmt (fork_seq, fork);
5849   gimple_seq_add_seq (fork_seq, after_fork);
5850 
5851   gimple_seq_add_seq (join_seq, before_join);
5852   if (join)
5853     gimple_seq_add_stmt (join_seq, join);
5854   gimple_seq_add_seq (join_seq, after_join);
5855 }
5856 
5857 /* Generate code to implement the REDUCTION clauses.  */
5858 
5859 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,omp_context * ctx)5860 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5861 {
5862   gimple_seq sub_seq = NULL;
5863   gimple *stmt;
5864   tree x, c;
5865   int count = 0;
5866 
5867   /* OpenACC loop reductions are handled elsewhere.  */
5868   if (is_gimple_omp_oacc (ctx->stmt))
5869     return;
5870 
5871   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5872   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5873       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5874     return;
5875 
5876   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5877      update in that case, otherwise use a lock.  */
5878   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5879     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5880 	&& !OMP_CLAUSE_REDUCTION_TASK (c))
5881       {
5882 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5883 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5884 	  {
5885 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5886 	    count = -1;
5887 	    break;
5888 	  }
5889 	count++;
5890       }
5891 
5892   if (count == 0)
5893     return;
5894 
5895   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5896     {
5897       tree var, ref, new_var, orig_var;
5898       enum tree_code code;
5899       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5900 
5901       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5902 	  || OMP_CLAUSE_REDUCTION_TASK (c))
5903 	continue;
5904 
5905       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5906       orig_var = var = OMP_CLAUSE_DECL (c);
5907       if (TREE_CODE (var) == MEM_REF)
5908 	{
5909 	  var = TREE_OPERAND (var, 0);
5910 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5911 	    var = TREE_OPERAND (var, 0);
5912 	  if (TREE_CODE (var) == ADDR_EXPR)
5913 	    var = TREE_OPERAND (var, 0);
5914 	  else
5915 	    {
5916 	      /* If this is a pointer or referenced based array
5917 		 section, the var could be private in the outer
5918 		 context e.g. on orphaned loop construct.  Pretend this
5919 		 is private variable's outer reference.  */
5920 	      ccode = OMP_CLAUSE_PRIVATE;
5921 	      if (TREE_CODE (var) == INDIRECT_REF)
5922 		var = TREE_OPERAND (var, 0);
5923 	    }
5924 	  orig_var = var;
5925 	  if (is_variable_sized (var))
5926 	    {
5927 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5928 	      var = DECL_VALUE_EXPR (var);
5929 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5930 	      var = TREE_OPERAND (var, 0);
5931 	      gcc_assert (DECL_P (var));
5932 	    }
5933 	}
5934       new_var = lookup_decl (var, ctx);
5935       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5936 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5937       ref = build_outer_var_ref (var, ctx, ccode);
5938       code = OMP_CLAUSE_REDUCTION_CODE (c);
5939 
5940       /* reduction(-:var) sums up the partial results, so it acts
5941 	 identically to reduction(+:var).  */
5942       if (code == MINUS_EXPR)
5943         code = PLUS_EXPR;
5944 
5945       if (count == 1)
5946 	{
5947 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5948 
5949 	  addr = save_expr (addr);
5950 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5951 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5952 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5953 	  OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
5954 	  gimplify_and_add (x, stmt_seqp);
5955 	  return;
5956 	}
5957       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5958 	{
5959 	  tree d = OMP_CLAUSE_DECL (c);
5960 	  tree type = TREE_TYPE (d);
5961 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5962 	  tree i = create_tmp_var (TREE_TYPE (v));
5963 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5964 	  tree bias = TREE_OPERAND (d, 1);
5965 	  d = TREE_OPERAND (d, 0);
5966 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5967 	    {
5968 	      tree b = TREE_OPERAND (d, 1);
5969 	      b = maybe_lookup_decl (b, ctx);
5970 	      if (b == NULL)
5971 		{
5972 		  b = TREE_OPERAND (d, 1);
5973 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5974 		}
5975 	      if (integer_zerop (bias))
5976 		bias = b;
5977 	      else
5978 		{
5979 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5980 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5981 					  TREE_TYPE (b), b, bias);
5982 		}
5983 	      d = TREE_OPERAND (d, 0);
5984 	    }
5985 	  /* For ref build_outer_var_ref already performs this, so
5986 	     only new_var needs a dereference.  */
5987 	  if (TREE_CODE (d) == INDIRECT_REF)
5988 	    {
5989 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5990 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5991 	    }
5992 	  else if (TREE_CODE (d) == ADDR_EXPR)
5993 	    {
5994 	      if (orig_var == var)
5995 		{
5996 		  new_var = build_fold_addr_expr (new_var);
5997 		  ref = build_fold_addr_expr (ref);
5998 		}
5999 	    }
6000 	  else
6001 	    {
6002 	      gcc_assert (orig_var == var);
6003 	      if (omp_is_reference (var))
6004 		ref = build_fold_addr_expr (ref);
6005 	    }
6006 	  if (DECL_P (v))
6007 	    {
6008 	      tree t = maybe_lookup_decl (v, ctx);
6009 	      if (t)
6010 		v = t;
6011 	      else
6012 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6013 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6014 	    }
6015 	  if (!integer_zerop (bias))
6016 	    {
6017 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
6018 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6019 					 TREE_TYPE (new_var), new_var,
6020 					 unshare_expr (bias));
6021 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6022 					 TREE_TYPE (ref), ref, bias);
6023 	    }
6024 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
6025 	  ref = fold_convert_loc (clause_loc, ptype, ref);
6026 	  tree m = create_tmp_var (ptype);
6027 	  gimplify_assign (m, new_var, stmt_seqp);
6028 	  new_var = m;
6029 	  m = create_tmp_var (ptype);
6030 	  gimplify_assign (m, ref, stmt_seqp);
6031 	  ref = m;
6032 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6033 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
6034 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
6035 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6036 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6037 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
6038 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6039 	    {
6040 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6041 	      tree decl_placeholder
6042 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6043 	      SET_DECL_VALUE_EXPR (placeholder, out);
6044 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6045 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6046 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6047 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6048 	      gimple_seq_add_seq (&sub_seq,
6049 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6050 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6051 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6052 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6053 	    }
6054 	  else
6055 	    {
6056 	      x = build2 (code, TREE_TYPE (out), out, priv);
6057 	      out = unshare_expr (out);
6058 	      gimplify_assign (out, x, &sub_seq);
6059 	    }
6060 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6061 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
6062 	  gimple_seq_add_stmt (&sub_seq, g);
6063 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6064 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
6065 	  gimple_seq_add_stmt (&sub_seq, g);
6066 	  g = gimple_build_assign (i, PLUS_EXPR, i,
6067 				   build_int_cst (TREE_TYPE (i), 1));
6068 	  gimple_seq_add_stmt (&sub_seq, g);
6069 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
6070 	  gimple_seq_add_stmt (&sub_seq, g);
6071 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6072 	}
6073       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6074 	{
6075 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6076 
6077 	  if (omp_is_reference (var)
6078 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
6079 					     TREE_TYPE (ref)))
6080 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
6081 	  SET_DECL_VALUE_EXPR (placeholder, ref);
6082 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6083 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6084 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6085 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6086 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6087 	}
6088       else
6089 	{
6090 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
6091 	  ref = build_outer_var_ref (var, ctx);
6092 	  gimplify_assign (ref, x, &sub_seq);
6093 	}
6094     }
6095 
6096   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6097 			    0);
6098   gimple_seq_add_stmt (stmt_seqp, stmt);
6099 
6100   gimple_seq_add_seq (stmt_seqp, sub_seq);
6101 
6102   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6103 			    0);
6104   gimple_seq_add_stmt (stmt_seqp, stmt);
6105 }
6106 
6107 
6108 /* Generate code to implement the COPYPRIVATE clauses.  */
6109 
6110 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)6111 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6112 			    omp_context *ctx)
6113 {
6114   tree c;
6115 
6116   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6117     {
6118       tree var, new_var, ref, x;
6119       bool by_ref;
6120       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6121 
6122       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6123 	continue;
6124 
6125       var = OMP_CLAUSE_DECL (c);
6126       by_ref = use_pointer_for_field (var, NULL);
6127 
6128       ref = build_sender_ref (var, ctx);
6129       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6130       if (by_ref)
6131 	{
6132 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
6133 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6134 	}
6135       gimplify_assign (ref, x, slist);
6136 
6137       ref = build_receiver_ref (var, false, ctx);
6138       if (by_ref)
6139 	{
6140 	  ref = fold_convert_loc (clause_loc,
6141 				  build_pointer_type (TREE_TYPE (new_var)),
6142 				  ref);
6143 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
6144 	}
6145       if (omp_is_reference (var))
6146 	{
6147 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6148 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
6149 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6150 	}
6151       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6152       gimplify_and_add (x, rlist);
6153     }
6154 }
6155 
6156 
6157 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6158    and REDUCTION from the sender (aka parent) side.  */
6159 
6160 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)6161 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6162     		    omp_context *ctx)
6163 {
6164   tree c, t;
6165   int ignored_looptemp = 0;
6166   bool is_taskloop = false;
6167 
6168   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6169      by GOMP_taskloop.  */
6170   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6171     {
6172       ignored_looptemp = 2;
6173       is_taskloop = true;
6174     }
6175 
6176   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6177     {
6178       tree val, ref, x, var;
6179       bool by_ref, do_in = false, do_out = false;
6180       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6181 
6182       switch (OMP_CLAUSE_CODE (c))
6183 	{
6184 	case OMP_CLAUSE_PRIVATE:
6185 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6186 	    break;
6187 	  continue;
6188 	case OMP_CLAUSE_FIRSTPRIVATE:
6189 	case OMP_CLAUSE_COPYIN:
6190 	case OMP_CLAUSE_LASTPRIVATE:
6191 	case OMP_CLAUSE_IN_REDUCTION:
6192 	case OMP_CLAUSE__REDUCTEMP_:
6193 	  break;
6194 	case OMP_CLAUSE_REDUCTION:
6195 	  if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6196 	    continue;
6197 	  break;
6198 	case OMP_CLAUSE_SHARED:
6199 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6200 	    break;
6201 	  continue;
6202 	case OMP_CLAUSE__LOOPTEMP_:
6203 	  if (ignored_looptemp)
6204 	    {
6205 	      ignored_looptemp--;
6206 	      continue;
6207 	    }
6208 	  break;
6209 	default:
6210 	  continue;
6211 	}
6212 
6213       val = OMP_CLAUSE_DECL (c);
6214       if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6215 	   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6216 	  && TREE_CODE (val) == MEM_REF)
6217 	{
6218 	  val = TREE_OPERAND (val, 0);
6219 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6220 	    val = TREE_OPERAND (val, 0);
6221 	  if (TREE_CODE (val) == INDIRECT_REF
6222 	      || TREE_CODE (val) == ADDR_EXPR)
6223 	    val = TREE_OPERAND (val, 0);
6224 	  if (is_variable_sized (val))
6225 	    continue;
6226 	}
6227 
6228       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6229 	 outer taskloop region.  */
6230       omp_context *ctx_for_o = ctx;
6231       if (is_taskloop
6232 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6233 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6234 	ctx_for_o = ctx->outer;
6235 
6236       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6237 
6238       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6239 	  && is_global_var (var)
6240 	  && (val == OMP_CLAUSE_DECL (c)
6241 	      || !is_task_ctx (ctx)
6242 	      || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6243 		  && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6244 		      || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6245 			  != POINTER_TYPE)))))
6246 	continue;
6247 
6248       t = omp_member_access_dummy_var (var);
6249       if (t)
6250 	{
6251 	  var = DECL_VALUE_EXPR (var);
6252 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6253 	  if (o != t)
6254 	    var = unshare_and_remap (var, t, o);
6255 	  else
6256 	    var = unshare_expr (var);
6257 	}
6258 
6259       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6260 	{
6261 	  /* Handle taskloop firstprivate/lastprivate, where the
6262 	     lastprivate on GIMPLE_OMP_TASK is represented as
6263 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
6264 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6265 	  x = omp_build_component_ref (ctx->sender_decl, f);
6266 	  if (use_pointer_for_field (val, ctx))
6267 	    var = build_fold_addr_expr (var);
6268 	  gimplify_assign (x, var, ilist);
6269 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
6270 	  continue;
6271 	}
6272 
6273       if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6274 	    && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6275 	   || val == OMP_CLAUSE_DECL (c))
6276 	  && is_variable_sized (val))
6277 	continue;
6278       by_ref = use_pointer_for_field (val, NULL);
6279 
6280       switch (OMP_CLAUSE_CODE (c))
6281 	{
6282 	case OMP_CLAUSE_FIRSTPRIVATE:
6283 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6284 	      && !by_ref
6285 	      && is_task_ctx (ctx))
6286 	    TREE_NO_WARNING (var) = 1;
6287 	  do_in = true;
6288 	  break;
6289 
6290 	case OMP_CLAUSE_PRIVATE:
6291 	case OMP_CLAUSE_COPYIN:
6292 	case OMP_CLAUSE__LOOPTEMP_:
6293 	case OMP_CLAUSE__REDUCTEMP_:
6294 	  do_in = true;
6295 	  break;
6296 
6297 	case OMP_CLAUSE_LASTPRIVATE:
6298 	  if (by_ref || omp_is_reference (val))
6299 	    {
6300 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6301 		continue;
6302 	      do_in = true;
6303 	    }
6304 	  else
6305 	    {
6306 	      do_out = true;
6307 	      if (lang_hooks.decls.omp_private_outer_ref (val))
6308 		do_in = true;
6309 	    }
6310 	  break;
6311 
6312 	case OMP_CLAUSE_REDUCTION:
6313 	case OMP_CLAUSE_IN_REDUCTION:
6314 	  do_in = true;
6315 	  if (val == OMP_CLAUSE_DECL (c))
6316 	    {
6317 	      if (is_task_ctx (ctx))
6318 		by_ref = use_pointer_for_field (val, ctx);
6319 	      else
6320 		do_out = !(by_ref || omp_is_reference (val));
6321 	    }
6322 	  else
6323 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6324 	  break;
6325 
6326 	default:
6327 	  gcc_unreachable ();
6328 	}
6329 
6330       if (do_in)
6331 	{
6332 	  ref = build_sender_ref (val, ctx);
6333 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6334 	  gimplify_assign (ref, x, ilist);
6335 	  if (is_task_ctx (ctx))
6336 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6337 	}
6338 
6339       if (do_out)
6340 	{
6341 	  ref = build_sender_ref (val, ctx);
6342 	  gimplify_assign (var, ref, olist);
6343 	}
6344     }
6345 }
6346 
6347 /* Generate code to implement SHARED from the sender (aka parent)
6348    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6349    list things that got automatically shared.  */
6350 
6351 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)6352 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6353 {
6354   tree var, ovar, nvar, t, f, x, record_type;
6355 
6356   if (ctx->record_type == NULL)
6357     return;
6358 
6359   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6360   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6361     {
6362       ovar = DECL_ABSTRACT_ORIGIN (f);
6363       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6364 	continue;
6365 
6366       nvar = maybe_lookup_decl (ovar, ctx);
6367       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6368 	continue;
6369 
6370       /* If CTX is a nested parallel directive.  Find the immediately
6371 	 enclosing parallel or workshare construct that contains a
6372 	 mapping for OVAR.  */
6373       var = lookup_decl_in_outer_ctx (ovar, ctx);
6374 
6375       t = omp_member_access_dummy_var (var);
6376       if (t)
6377 	{
6378 	  var = DECL_VALUE_EXPR (var);
6379 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6380 	  if (o != t)
6381 	    var = unshare_and_remap (var, t, o);
6382 	  else
6383 	    var = unshare_expr (var);
6384 	}
6385 
6386       if (use_pointer_for_field (ovar, ctx))
6387 	{
6388 	  x = build_sender_ref (ovar, ctx);
6389 	  var = build_fold_addr_expr (var);
6390 	  gimplify_assign (x, var, ilist);
6391 	}
6392       else
6393 	{
6394 	  x = build_sender_ref (ovar, ctx);
6395 	  gimplify_assign (x, var, ilist);
6396 
6397 	  if (!TREE_READONLY (var)
6398 	      /* We don't need to receive a new reference to a result
6399 	         or parm decl.  In fact we may not store to it as we will
6400 		 invalidate any pending RSO and generate wrong gimple
6401 		 during inlining.  */
6402 	      && !((TREE_CODE (var) == RESULT_DECL
6403 		    || TREE_CODE (var) == PARM_DECL)
6404 		   && DECL_BY_REFERENCE (var)))
6405 	    {
6406 	      x = build_sender_ref (ovar, ctx);
6407 	      gimplify_assign (var, x, olist);
6408 	    }
6409 	}
6410     }
6411 }
6412 
6413 /* Emit an OpenACC head marker call, encapulating the partitioning and
6414    other information that must be processed by the target compiler.
6415    Return the maximum number of dimensions the associated loop might
6416    be partitioned over.  */
6417 
6418 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)6419 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6420 		      gimple_seq *seq, omp_context *ctx)
6421 {
6422   unsigned levels = 0;
6423   unsigned tag = 0;
6424   tree gang_static = NULL_TREE;
6425   auto_vec<tree, 5> args;
6426 
6427   args.quick_push (build_int_cst
6428 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6429   args.quick_push (ddvar);
6430   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6431     {
6432       switch (OMP_CLAUSE_CODE (c))
6433 	{
6434 	case OMP_CLAUSE_GANG:
6435 	  tag |= OLF_DIM_GANG;
6436 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6437 	  /* static:* is represented by -1, and we can ignore it, as
6438 	     scheduling is always static.  */
6439 	  if (gang_static && integer_minus_onep (gang_static))
6440 	    gang_static = NULL_TREE;
6441 	  levels++;
6442 	  break;
6443 
6444 	case OMP_CLAUSE_WORKER:
6445 	  tag |= OLF_DIM_WORKER;
6446 	  levels++;
6447 	  break;
6448 
6449 	case OMP_CLAUSE_VECTOR:
6450 	  tag |= OLF_DIM_VECTOR;
6451 	  levels++;
6452 	  break;
6453 
6454 	case OMP_CLAUSE_SEQ:
6455 	  tag |= OLF_SEQ;
6456 	  break;
6457 
6458 	case OMP_CLAUSE_AUTO:
6459 	  tag |= OLF_AUTO;
6460 	  break;
6461 
6462 	case OMP_CLAUSE_INDEPENDENT:
6463 	  tag |= OLF_INDEPENDENT;
6464 	  break;
6465 
6466 	case OMP_CLAUSE_TILE:
6467 	  tag |= OLF_TILE;
6468 	  break;
6469 
6470 	default:
6471 	  continue;
6472 	}
6473     }
6474 
6475   if (gang_static)
6476     {
6477       if (DECL_P (gang_static))
6478 	gang_static = build_outer_var_ref (gang_static, ctx);
6479       tag |= OLF_GANG_STATIC;
6480     }
6481 
6482   /* In a parallel region, loops are implicitly INDEPENDENT.  */
6483   omp_context *tgt = enclosing_target_ctx (ctx);
6484   if (!tgt || is_oacc_parallel (tgt))
6485     tag |= OLF_INDEPENDENT;
6486 
6487   if (tag & OLF_TILE)
6488     /* Tiling could use all 3 levels.  */
6489     levels = 3;
6490   else
6491     {
6492       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6493 	 Ensure at least one level, or 2 for possible auto
6494 	 partitioning */
6495       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6496 				  << OLF_DIM_BASE) | OLF_SEQ));
6497 
6498       if (levels < 1u + maybe_auto)
6499 	levels = 1u + maybe_auto;
6500     }
6501 
6502   args.quick_push (build_int_cst (integer_type_node, levels));
6503   args.quick_push (build_int_cst (integer_type_node, tag));
6504   if (gang_static)
6505     args.quick_push (gang_static);
6506 
6507   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6508   gimple_set_location (call, loc);
6509   gimple_set_lhs (call, ddvar);
6510   gimple_seq_add_stmt (seq, call);
6511 
6512   return levels;
6513 }
6514 
6515 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
6516    partitioning level of the enclosed region.  */
6517 
6518 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)6519 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6520 			tree tofollow, gimple_seq *seq)
6521 {
6522   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6523 		     : IFN_UNIQUE_OACC_TAIL_MARK);
6524   tree marker = build_int_cst (integer_type_node, marker_kind);
6525   int nargs = 2 + (tofollow != NULL_TREE);
6526   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6527 					    marker, ddvar, tofollow);
6528   gimple_set_location (call, loc);
6529   gimple_set_lhs (call, ddvar);
6530   gimple_seq_add_stmt (seq, call);
6531 }
6532 
6533 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
6534    the loop clauses, from which we extract reductions.  Initialize
6535    HEAD and TAIL.  */
6536 
6537 static void
lower_oacc_head_tail(location_t loc,tree clauses,gimple_seq * head,gimple_seq * tail,omp_context * ctx)6538 lower_oacc_head_tail (location_t loc, tree clauses,
6539 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6540 {
6541   bool inner = false;
6542   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6543   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6544 
6545   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
6546   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6547   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6548 
6549   gcc_assert (count);
6550   for (unsigned done = 1; count; count--, done++)
6551     {
6552       gimple_seq fork_seq = NULL;
6553       gimple_seq join_seq = NULL;
6554 
6555       tree place = build_int_cst (integer_type_node, -1);
6556       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6557 						fork_kind, ddvar, place);
6558       gimple_set_location (fork, loc);
6559       gimple_set_lhs (fork, ddvar);
6560 
6561       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6562 						join_kind, ddvar, place);
6563       gimple_set_location (join, loc);
6564       gimple_set_lhs (join, ddvar);
6565 
6566       /* Mark the beginning of this level sequence.  */
6567       if (inner)
6568 	lower_oacc_loop_marker (loc, ddvar, true,
6569 				build_int_cst (integer_type_node, count),
6570 				&fork_seq);
6571       lower_oacc_loop_marker (loc, ddvar, false,
6572 			      build_int_cst (integer_type_node, done),
6573 			      &join_seq);
6574 
6575       lower_oacc_reductions (loc, clauses, place, inner,
6576 			     fork, join, &fork_seq, &join_seq,  ctx);
6577 
6578       /* Append this level to head. */
6579       gimple_seq_add_seq (head, fork_seq);
6580       /* Prepend it to tail.  */
6581       gimple_seq_add_seq (&join_seq, *tail);
6582       *tail = join_seq;
6583 
6584       inner = true;
6585     }
6586 
6587   /* Mark the end of the sequence.  */
6588   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6589   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6590 }
6591 
6592 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6593    catch handler and return it.  This prevents programs from violating the
6594    structured block semantics with throws.  */
6595 
6596 static gimple_seq
maybe_catch_exception(gimple_seq body)6597 maybe_catch_exception (gimple_seq body)
6598 {
6599   gimple *g;
6600   tree decl;
6601 
6602   if (!flag_exceptions)
6603     return body;
6604 
6605   if (lang_hooks.eh_protect_cleanup_actions != NULL)
6606     decl = lang_hooks.eh_protect_cleanup_actions ();
6607   else
6608     decl = builtin_decl_explicit (BUILT_IN_TRAP);
6609 
6610   g = gimple_build_eh_must_not_throw (decl);
6611   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6612       			GIMPLE_TRY_CATCH);
6613 
6614  return gimple_seq_alloc_with_stmt (g);
6615 }
6616 
6617 
6618 /* Routines to lower OMP directives into OMP-GIMPLE.  */
6619 
6620 /* If ctx is a worksharing context inside of a cancellable parallel
6621    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6622    and conditional branch to parallel's cancel_label to handle
6623    cancellation in the implicit barrier.  */
6624 
6625 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple * omp_return,gimple_seq * body)6626 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6627 				   gimple_seq *body)
6628 {
6629   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6630   if (gimple_omp_return_nowait_p (omp_return))
6631     return;
6632   for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6633     if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6634 	&& outer->cancellable)
6635       {
6636 	tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6637 	tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6638 	tree lhs = create_tmp_var (c_bool_type);
6639 	gimple_omp_return_set_lhs (omp_return, lhs);
6640 	tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6641 	gimple *g = gimple_build_cond (NE_EXPR, lhs,
6642 				       fold_convert (c_bool_type,
6643 						     boolean_false_node),
6644 				       outer->cancel_label, fallthru_label);
6645 	gimple_seq_add_stmt (body, g);
6646 	gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6647       }
6648     else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6649       return;
6650 }
6651 
6652 /* Find the first task_reduction or reduction clause or return NULL
6653    if there are none.  */
6654 
6655 static inline tree
omp_task_reductions_find_first(tree clauses,enum tree_code code,enum omp_clause_code ccode)6656 omp_task_reductions_find_first (tree clauses, enum tree_code code,
6657 				enum omp_clause_code ccode)
6658 {
6659   while (1)
6660     {
6661       clauses = omp_find_clause (clauses, ccode);
6662       if (clauses == NULL_TREE)
6663 	return NULL_TREE;
6664       if (ccode != OMP_CLAUSE_REDUCTION
6665 	  || code == OMP_TASKLOOP
6666 	  || OMP_CLAUSE_REDUCTION_TASK (clauses))
6667 	return clauses;
6668       clauses = OMP_CLAUSE_CHAIN (clauses);
6669     }
6670 }
6671 
6672 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
6673 				       gimple_seq *, gimple_seq *);
6674 
6675 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6676    CTX is the enclosing OMP context for the current statement.  */
6677 
6678 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)6679 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6680 {
6681   tree block, control;
6682   gimple_stmt_iterator tgsi;
6683   gomp_sections *stmt;
6684   gimple *t;
6685   gbind *new_stmt, *bind;
6686   gimple_seq ilist, dlist, olist, tred_dlist = NULL, new_body;
6687 
6688   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
6689 
6690   push_gimplify_context ();
6691 
6692   dlist = NULL;
6693   ilist = NULL;
6694 
6695   tree rclauses
6696     = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
6697 				      OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
6698   tree rtmp = NULL_TREE;
6699   if (rclauses)
6700     {
6701       tree type = build_pointer_type (pointer_sized_int_node);
6702       tree temp = create_tmp_var (type);
6703       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
6704       OMP_CLAUSE_DECL (c) = temp;
6705       OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
6706       gimple_omp_sections_set_clauses (stmt, c);
6707       lower_omp_task_reductions (ctx, OMP_SECTIONS,
6708 				 gimple_omp_sections_clauses (stmt),
6709 				 &ilist, &tred_dlist);
6710       rclauses = c;
6711       rtmp = make_ssa_name (type);
6712       gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
6713     }
6714 
6715   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
6716       			   &ilist, &dlist, ctx, NULL);
6717 
6718   new_body = gimple_omp_body (stmt);
6719   gimple_omp_set_body (stmt, NULL);
6720   tgsi = gsi_start (new_body);
6721   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
6722     {
6723       omp_context *sctx;
6724       gimple *sec_start;
6725 
6726       sec_start = gsi_stmt (tgsi);
6727       sctx = maybe_lookup_ctx (sec_start);
6728       gcc_assert (sctx);
6729 
6730       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
6731       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
6732 			    GSI_CONTINUE_LINKING);
6733       gimple_omp_set_body (sec_start, NULL);
6734 
6735       if (gsi_one_before_end_p (tgsi))
6736 	{
6737 	  gimple_seq l = NULL;
6738 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
6739 				     &l, ctx);
6740 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
6741 	  gimple_omp_section_set_last (sec_start);
6742 	}
6743 
6744       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
6745 			GSI_CONTINUE_LINKING);
6746     }
6747 
6748   block = make_node (BLOCK);
6749   bind = gimple_build_bind (NULL, new_body, block);
6750 
6751   olist = NULL;
6752   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
6753 
6754   block = make_node (BLOCK);
6755   new_stmt = gimple_build_bind (NULL, NULL, block);
6756   gsi_replace (gsi_p, new_stmt, true);
6757 
6758   pop_gimplify_context (new_stmt);
6759   gimple_bind_append_vars (new_stmt, ctx->block_vars);
6760   BLOCK_VARS (block) = gimple_bind_vars (bind);
6761   if (BLOCK_VARS (block))
6762     TREE_USED (block) = 1;
6763 
6764   new_body = NULL;
6765   gimple_seq_add_seq (&new_body, ilist);
6766   gimple_seq_add_stmt (&new_body, stmt);
6767   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6768   gimple_seq_add_stmt (&new_body, bind);
6769 
6770   control = create_tmp_var (unsigned_type_node, ".section");
6771   t = gimple_build_omp_continue (control, control);
6772   gimple_omp_sections_set_control (stmt, control);
6773   gimple_seq_add_stmt (&new_body, t);
6774 
6775   gimple_seq_add_seq (&new_body, olist);
6776   if (ctx->cancellable)
6777     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6778   gimple_seq_add_seq (&new_body, dlist);
6779 
6780   new_body = maybe_catch_exception (new_body);
6781 
6782   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6783 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6784   t = gimple_build_omp_return (nowait);
6785   gimple_seq_add_stmt (&new_body, t);
6786   gimple_seq_add_seq (&new_body, tred_dlist);
6787   maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
6788 
6789   if (rclauses)
6790     OMP_CLAUSE_DECL (rclauses) = rtmp;
6791 
6792   gimple_bind_set_body (new_stmt, new_body);
6793 }
6794 
6795 
6796 /* A subroutine of lower_omp_single.  Expand the simple form of
6797    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6798 
6799      	if (GOMP_single_start ())
6800 	  BODY;
6801 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
6802 
6803   FIXME.  It may be better to delay expanding the logic of this until
6804   pass_expand_omp.  The expanded logic may make the job more difficult
6805   to a synchronization analysis pass.  */
6806 
6807 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)6808 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6809 {
6810   location_t loc = gimple_location (single_stmt);
6811   tree tlabel = create_artificial_label (loc);
6812   tree flabel = create_artificial_label (loc);
6813   gimple *call, *cond;
6814   tree lhs, decl;
6815 
6816   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6817   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6818   call = gimple_build_call (decl, 0);
6819   gimple_call_set_lhs (call, lhs);
6820   gimple_seq_add_stmt (pre_p, call);
6821 
6822   cond = gimple_build_cond (EQ_EXPR, lhs,
6823 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6824 					      boolean_true_node),
6825 			    tlabel, flabel);
6826   gimple_seq_add_stmt (pre_p, cond);
6827   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6828   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6829   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6830 }
6831 
6832 
6833 /* A subroutine of lower_omp_single.  Expand the simple form of
6834    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6835 
6836 	#pragma omp single copyprivate (a, b, c)
6837 
6838    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6839 
6840       {
6841 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6842 	  {
6843 	    BODY;
6844 	    copyout.a = a;
6845 	    copyout.b = b;
6846 	    copyout.c = c;
6847 	    GOMP_single_copy_end (&copyout);
6848 	  }
6849 	else
6850 	  {
6851 	    a = copyout_p->a;
6852 	    b = copyout_p->b;
6853 	    c = copyout_p->c;
6854 	  }
6855 	GOMP_barrier ();
6856       }
6857 
6858   FIXME.  It may be better to delay expanding the logic of this until
6859   pass_expand_omp.  The expanded logic may make the job more difficult
6860   to a synchronization analysis pass.  */
6861 
6862 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)6863 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6864 		       omp_context *ctx)
6865 {
6866   tree ptr_type, t, l0, l1, l2, bfn_decl;
6867   gimple_seq copyin_seq;
6868   location_t loc = gimple_location (single_stmt);
6869 
6870   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6871 
6872   ptr_type = build_pointer_type (ctx->record_type);
6873   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6874 
6875   l0 = create_artificial_label (loc);
6876   l1 = create_artificial_label (loc);
6877   l2 = create_artificial_label (loc);
6878 
6879   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6880   t = build_call_expr_loc (loc, bfn_decl, 0);
6881   t = fold_convert_loc (loc, ptr_type, t);
6882   gimplify_assign (ctx->receiver_decl, t, pre_p);
6883 
6884   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6885 	      build_int_cst (ptr_type, 0));
6886   t = build3 (COND_EXPR, void_type_node, t,
6887 	      build_and_jump (&l0), build_and_jump (&l1));
6888   gimplify_and_add (t, pre_p);
6889 
6890   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6891 
6892   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6893 
6894   copyin_seq = NULL;
6895   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6896 			      &copyin_seq, ctx);
6897 
6898   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6899   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6900   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6901   gimplify_and_add (t, pre_p);
6902 
6903   t = build_and_jump (&l2);
6904   gimplify_and_add (t, pre_p);
6905 
6906   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6907 
6908   gimple_seq_add_seq (pre_p, copyin_seq);
6909 
6910   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6911 }
6912 
6913 
6914 /* Expand code for an OpenMP single directive.  */
6915 
6916 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)6917 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6918 {
6919   tree block;
6920   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6921   gbind *bind;
6922   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6923 
6924   push_gimplify_context ();
6925 
6926   block = make_node (BLOCK);
6927   bind = gimple_build_bind (NULL, NULL, block);
6928   gsi_replace (gsi_p, bind, true);
6929   bind_body = NULL;
6930   dlist = NULL;
6931   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6932 			   &bind_body, &dlist, ctx, NULL);
6933   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6934 
6935   gimple_seq_add_stmt (&bind_body, single_stmt);
6936 
6937   if (ctx->record_type)
6938     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6939   else
6940     lower_omp_single_simple (single_stmt, &bind_body);
6941 
6942   gimple_omp_set_body (single_stmt, NULL);
6943 
6944   gimple_seq_add_seq (&bind_body, dlist);
6945 
6946   bind_body = maybe_catch_exception (bind_body);
6947 
6948   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6949 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6950   gimple *g = gimple_build_omp_return (nowait);
6951   gimple_seq_add_stmt (&bind_body_tail, g);
6952   maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
6953   if (ctx->record_type)
6954     {
6955       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6956       tree clobber = build_constructor (ctx->record_type, NULL);
6957       TREE_THIS_VOLATILE (clobber) = 1;
6958       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6959 						   clobber), GSI_SAME_STMT);
6960     }
6961   gimple_seq_add_seq (&bind_body, bind_body_tail);
6962   gimple_bind_set_body (bind, bind_body);
6963 
6964   pop_gimplify_context (bind);
6965 
6966   gimple_bind_append_vars (bind, ctx->block_vars);
6967   BLOCK_VARS (block) = ctx->block_vars;
6968   if (BLOCK_VARS (block))
6969     TREE_USED (block) = 1;
6970 }
6971 
6972 
6973 /* Expand code for an OpenMP master directive.  */
6974 
6975 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)6976 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6977 {
6978   tree block, lab = NULL, x, bfn_decl;
6979   gimple *stmt = gsi_stmt (*gsi_p);
6980   gbind *bind;
6981   location_t loc = gimple_location (stmt);
6982   gimple_seq tseq;
6983 
6984   push_gimplify_context ();
6985 
6986   block = make_node (BLOCK);
6987   bind = gimple_build_bind (NULL, NULL, block);
6988   gsi_replace (gsi_p, bind, true);
6989   gimple_bind_add_stmt (bind, stmt);
6990 
6991   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6992   x = build_call_expr_loc (loc, bfn_decl, 0);
6993   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6994   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6995   tseq = NULL;
6996   gimplify_and_add (x, &tseq);
6997   gimple_bind_add_seq (bind, tseq);
6998 
6999   lower_omp (gimple_omp_body_ptr (stmt), ctx);
7000   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7001   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7002   gimple_omp_set_body (stmt, NULL);
7003 
7004   gimple_bind_add_stmt (bind, gimple_build_label (lab));
7005 
7006   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7007 
7008   pop_gimplify_context (bind);
7009 
7010   gimple_bind_append_vars (bind, ctx->block_vars);
7011   BLOCK_VARS (block) = ctx->block_vars;
7012 }
7013 
7014 /* Helper function for lower_omp_task_reductions.  For a specific PASS
7015    find out the current clause it should be processed, or return false
7016    if all have been processed already.  */
7017 
7018 static inline bool
omp_task_reduction_iterate(int pass,enum tree_code code,enum omp_clause_code ccode,tree * c,tree * decl,tree * type,tree * next)7019 omp_task_reduction_iterate (int pass, enum tree_code code,
7020 			    enum omp_clause_code ccode, tree *c, tree *decl,
7021 			    tree *type, tree *next)
7022 {
7023   for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7024     {
7025       if (ccode == OMP_CLAUSE_REDUCTION
7026 	  && code != OMP_TASKLOOP
7027 	  && !OMP_CLAUSE_REDUCTION_TASK (*c))
7028 	continue;
7029       *decl = OMP_CLAUSE_DECL (*c);
7030       *type = TREE_TYPE (*decl);
7031       if (TREE_CODE (*decl) == MEM_REF)
7032 	{
7033 	  if (pass != 1)
7034 	    continue;
7035 	}
7036       else
7037 	{
7038 	  if (omp_is_reference (*decl))
7039 	    *type = TREE_TYPE (*type);
7040 	  if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7041 	    continue;
7042 	}
7043       *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7044       return true;
7045     }
7046   *decl = NULL_TREE;
7047   *type = NULL_TREE;
7048   *next = NULL_TREE;
7049   return false;
7050 }
7051 
7052 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7053    OMP_TASKGROUP only with task modifier).  Register mapping of those in
7054    START sequence and reducing them and unregister them in the END sequence.  */
7055 
7056 static void
lower_omp_task_reductions(omp_context * ctx,enum tree_code code,tree clauses,gimple_seq * start,gimple_seq * end)7057 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7058 			   gimple_seq *start, gimple_seq *end)
7059 {
7060   enum omp_clause_code ccode
7061     = (code == OMP_TASKGROUP
7062        ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7063   tree cancellable = NULL_TREE;
7064   clauses = omp_task_reductions_find_first (clauses, code, ccode);
7065   if (clauses == NULL_TREE)
7066     return;
7067   if (code == OMP_FOR || code == OMP_SECTIONS)
7068     {
7069       for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7070 	if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7071 	    && outer->cancellable)
7072 	  {
7073 	    cancellable = error_mark_node;
7074 	    break;
7075 	  }
7076 	else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7077 	  break;
7078     }
7079   tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7080   tree *last = &TYPE_FIELDS (record_type);
7081   unsigned cnt = 0;
7082   if (cancellable)
7083     {
7084       tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7085 			       ptr_type_node);
7086       tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7087 				integer_type_node);
7088       *last = field;
7089       DECL_CHAIN (field) = ifield;
7090       last = &DECL_CHAIN (ifield);
7091       DECL_CONTEXT (field) = record_type;
7092       if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7093 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7094       DECL_CONTEXT (ifield) = record_type;
7095       if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7096 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7097     }
7098   for (int pass = 0; pass < 2; pass++)
7099     {
7100       tree decl, type, next;
7101       for (tree c = clauses;
7102 	   omp_task_reduction_iterate (pass, code, ccode,
7103 				       &c, &decl, &type, &next); c = next)
7104 	{
7105 	  ++cnt;
7106 	  tree new_type = type;
7107 	  if (ctx->outer)
7108 	    new_type = remap_type (type, &ctx->outer->cb);
7109 	  tree field
7110 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7111 			  DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7112 			  new_type);
7113 	  if (DECL_P (decl) && type == TREE_TYPE (decl))
7114 	    {
7115 	      SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7116 	      DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7117 	      TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7118 	    }
7119 	  else
7120 	    SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7121 	  DECL_CONTEXT (field) = record_type;
7122 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7123 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7124 	  *last = field;
7125 	  last = &DECL_CHAIN (field);
7126 	  tree bfield
7127 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7128 			  boolean_type_node);
7129 	  DECL_CONTEXT (bfield) = record_type;
7130 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7131 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7132 	  *last = bfield;
7133 	  last = &DECL_CHAIN (bfield);
7134 	}
7135     }
7136   *last = NULL_TREE;
7137   layout_type (record_type);
7138 
7139   /* Build up an array which registers with the runtime all the reductions
7140      and deregisters them at the end.  Format documented in libgomp/task.c.  */
7141   tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7142   tree avar = create_tmp_var_raw (atype);
7143   gimple_add_tmp_var (avar);
7144   TREE_ADDRESSABLE (avar) = 1;
7145   tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7146 		   NULL_TREE, NULL_TREE);
7147   tree t = build_int_cst (pointer_sized_int_node, cnt);
7148   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7149   gimple_seq seq = NULL;
7150   tree sz = fold_convert (pointer_sized_int_node,
7151 			  TYPE_SIZE_UNIT (record_type));
7152   int cachesz = 64;
7153   sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7154 		    build_int_cst (pointer_sized_int_node, cachesz - 1));
7155   sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7156 		    build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7157   ctx->task_reductions.create (1 + cnt);
7158   ctx->task_reduction_map = new hash_map<tree, unsigned>;
7159   ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7160 				   ? sz : NULL_TREE);
7161   sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7162   gimple_seq_add_seq (start, seq);
7163   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7164 	      NULL_TREE, NULL_TREE);
7165   gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7166   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7167 	      NULL_TREE, NULL_TREE);
7168   t = build_int_cst (pointer_sized_int_node,
7169 		     MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7170   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7171   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7172 	      NULL_TREE, NULL_TREE);
7173   t = build_int_cst (pointer_sized_int_node, -1);
7174   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7175   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7176 	      NULL_TREE, NULL_TREE);
7177   t = build_int_cst (pointer_sized_int_node, 0);
7178   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7179 
7180   /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7181      and for each task reduction checks a bool right after the private variable
7182      within that thread's chunk; if the bool is clear, it hasn't been
7183      initialized and thus isn't going to be reduced nor destructed, otherwise
7184      reduce and destruct it.  */
7185   tree idx = create_tmp_var (size_type_node);
7186   gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7187   tree num_thr_sz = create_tmp_var (size_type_node);
7188   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7189   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7190   tree lab3 = NULL_TREE;
7191   gimple *g;
7192   if (code == OMP_FOR || code == OMP_SECTIONS)
7193     {
7194       /* For worksharing constructs, only perform it in the master thread,
7195 	 with the exception of cancelled implicit barriers - then only handle
7196 	 the current thread.  */
7197       tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7198       t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7199       tree thr_num = create_tmp_var (integer_type_node);
7200       g = gimple_build_call (t, 0);
7201       gimple_call_set_lhs (g, thr_num);
7202       gimple_seq_add_stmt (end, g);
7203       if (cancellable)
7204 	{
7205 	  tree c;
7206 	  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7207 	  tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7208 	  lab3 = create_artificial_label (UNKNOWN_LOCATION);
7209 	  if (code == OMP_FOR)
7210 	    c = gimple_omp_for_clauses (ctx->stmt);
7211 	  else /* if (code == OMP_SECTIONS) */
7212 	    c = gimple_omp_sections_clauses (ctx->stmt);
7213 	  c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7214 	  cancellable = c;
7215 	  g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7216 				 lab5, lab6);
7217 	  gimple_seq_add_stmt (end, g);
7218 	  gimple_seq_add_stmt (end, gimple_build_label (lab5));
7219 	  g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7220 	  gimple_seq_add_stmt (end, g);
7221 	  g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7222 				   build_one_cst (TREE_TYPE (idx)));
7223 	  gimple_seq_add_stmt (end, g);
7224 	  gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7225 	  gimple_seq_add_stmt (end, gimple_build_label (lab6));
7226 	}
7227       g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7228       gimple_seq_add_stmt (end, g);
7229       gimple_seq_add_stmt (end, gimple_build_label (lab4));
7230     }
7231   if (code != OMP_PARALLEL)
7232     {
7233       t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7234       tree num_thr = create_tmp_var (integer_type_node);
7235       g = gimple_build_call (t, 0);
7236       gimple_call_set_lhs (g, num_thr);
7237       gimple_seq_add_stmt (end, g);
7238       g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7239       gimple_seq_add_stmt (end, g);
7240       if (cancellable)
7241 	gimple_seq_add_stmt (end, gimple_build_label (lab3));
7242     }
7243   else
7244     {
7245       tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7246 				OMP_CLAUSE__REDUCTEMP_);
7247       t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7248       t = fold_convert (size_type_node, t);
7249       gimplify_assign (num_thr_sz, t, end);
7250     }
7251   t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7252 	      NULL_TREE, NULL_TREE);
7253   tree data = create_tmp_var (pointer_sized_int_node);
7254   gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7255   gimple_seq_add_stmt (end, gimple_build_label (lab1));
7256   tree ptr;
7257   if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7258     ptr = create_tmp_var (build_pointer_type (record_type));
7259   else
7260     ptr = create_tmp_var (ptr_type_node);
7261   gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7262 
7263   tree field = TYPE_FIELDS (record_type);
7264   cnt = 0;
7265   if (cancellable)
7266     field = DECL_CHAIN (DECL_CHAIN (field));
7267   for (int pass = 0; pass < 2; pass++)
7268     {
7269       tree decl, type, next;
7270       for (tree c = clauses;
7271 	   omp_task_reduction_iterate (pass, code, ccode,
7272 				       &c, &decl, &type, &next); c = next)
7273 	{
7274 	  tree var = decl, ref;
7275 	  if (TREE_CODE (decl) == MEM_REF)
7276 	    {
7277 	      var = TREE_OPERAND (var, 0);
7278 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7279 		var = TREE_OPERAND (var, 0);
7280 	      tree v = var;
7281 	      if (TREE_CODE (var) == ADDR_EXPR)
7282 		var = TREE_OPERAND (var, 0);
7283 	      else if (TREE_CODE (var) == INDIRECT_REF)
7284 		var = TREE_OPERAND (var, 0);
7285 	      tree orig_var = var;
7286 	      if (is_variable_sized (var))
7287 		{
7288 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7289 		  var = DECL_VALUE_EXPR (var);
7290 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7291 		  var = TREE_OPERAND (var, 0);
7292 		  gcc_assert (DECL_P (var));
7293 		}
7294 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7295 	      if (orig_var != var)
7296 		gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7297 	      else if (TREE_CODE (v) == ADDR_EXPR)
7298 		t = build_fold_addr_expr (t);
7299 	      else if (TREE_CODE (v) == INDIRECT_REF)
7300 		t = build_fold_indirect_ref (t);
7301 	      if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7302 		{
7303 		  tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7304 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7305 		  t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7306 		}
7307 	      if (!integer_zerop (TREE_OPERAND (decl, 1)))
7308 		t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7309 				 fold_convert (size_type_node,
7310 					       TREE_OPERAND (decl, 1)));
7311 	    }
7312 	  else
7313 	    {
7314 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7315 	      if (!omp_is_reference (decl))
7316 		t = build_fold_addr_expr (t);
7317 	    }
7318 	  t = fold_convert (pointer_sized_int_node, t);
7319 	  seq = NULL;
7320 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
7321 	  gimple_seq_add_seq (start, seq);
7322 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7323 		      size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7324 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7325 	  t = unshare_expr (byte_position (field));
7326 	  t = fold_convert (pointer_sized_int_node, t);
7327 	  ctx->task_reduction_map->put (c, cnt);
7328 	  ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7329 					   ? t : NULL_TREE);
7330 	  seq = NULL;
7331 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
7332 	  gimple_seq_add_seq (start, seq);
7333 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7334 		      size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7335 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7336 
7337 	  tree bfield = DECL_CHAIN (field);
7338 	  tree cond;
7339 	  if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7340 	    /* In parallel or worksharing all threads unconditionally
7341 	       initialize all their task reduction private variables.  */
7342 	    cond = boolean_true_node;
7343 	  else if (TREE_TYPE (ptr) == ptr_type_node)
7344 	    {
7345 	      cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7346 			     unshare_expr (byte_position (bfield)));
7347 	      seq = NULL;
7348 	      cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7349 	      gimple_seq_add_seq (end, seq);
7350 	      tree pbool = build_pointer_type (TREE_TYPE (bfield));
7351 	      cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7352 			     build_int_cst (pbool, 0));
7353 	    }
7354 	  else
7355 	    cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7356 			   build_simple_mem_ref (ptr), bfield, NULL_TREE);
7357 	  tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7358 	  tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7359 	  tree condv = create_tmp_var (boolean_type_node);
7360 	  gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7361 	  g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7362 				 lab3, lab4);
7363 	  gimple_seq_add_stmt (end, g);
7364 	  gimple_seq_add_stmt (end, gimple_build_label (lab3));
7365 	  if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7366 	    {
7367 	      /* If this reduction doesn't need destruction and parallel
7368 		 has been cancelled, there is nothing to do for this
7369 		 reduction, so jump around the merge operation.  */
7370 	      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7371 	      g = gimple_build_cond (NE_EXPR, cancellable,
7372 				     build_zero_cst (TREE_TYPE (cancellable)),
7373 				     lab4, lab5);
7374 	      gimple_seq_add_stmt (end, g);
7375 	      gimple_seq_add_stmt (end, gimple_build_label (lab5));
7376 	    }
7377 
7378 	  tree new_var;
7379 	  if (TREE_TYPE (ptr) == ptr_type_node)
7380 	    {
7381 	      new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7382 				unshare_expr (byte_position (field)));
7383 	      seq = NULL;
7384 	      new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7385 	      gimple_seq_add_seq (end, seq);
7386 	      tree pbool = build_pointer_type (TREE_TYPE (field));
7387 	      new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7388 				build_int_cst (pbool, 0));
7389 	    }
7390 	  else
7391 	    new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7392 			      build_simple_mem_ref (ptr), field, NULL_TREE);
7393 
7394 	  enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7395 	  if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7396 	    ref = build_simple_mem_ref (ref);
7397 	  /* reduction(-:var) sums up the partial results, so it acts
7398 	     identically to reduction(+:var).  */
7399 	  if (rcode == MINUS_EXPR)
7400 	    rcode = PLUS_EXPR;
7401 	  if (TREE_CODE (decl) == MEM_REF)
7402 	    {
7403 	      tree type = TREE_TYPE (new_var);
7404 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7405 	      tree i = create_tmp_var (TREE_TYPE (v));
7406 	      tree ptype = build_pointer_type (TREE_TYPE (type));
7407 	      if (DECL_P (v))
7408 		{
7409 		  v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7410 		  tree vv = create_tmp_var (TREE_TYPE (v));
7411 		  gimplify_assign (vv, v, start);
7412 		  v = vv;
7413 		}
7414 	      ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7415 			    size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7416 	      new_var = build_fold_addr_expr (new_var);
7417 	      new_var = fold_convert (ptype, new_var);
7418 	      ref = fold_convert (ptype, ref);
7419 	      tree m = create_tmp_var (ptype);
7420 	      gimplify_assign (m, new_var, end);
7421 	      new_var = m;
7422 	      m = create_tmp_var (ptype);
7423 	      gimplify_assign (m, ref, end);
7424 	      ref = m;
7425 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7426 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
7427 	      tree endl = create_artificial_label (UNKNOWN_LOCATION);
7428 	      gimple_seq_add_stmt (end, gimple_build_label (body));
7429 	      tree priv = build_simple_mem_ref (new_var);
7430 	      tree out = build_simple_mem_ref (ref);
7431 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7432 		{
7433 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7434 		  tree decl_placeholder
7435 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7436 		  tree lab6 = NULL_TREE;
7437 		  if (cancellable)
7438 		    {
7439 		      /* If this reduction needs destruction and parallel
7440 			 has been cancelled, jump around the merge operation
7441 			 to the destruction.  */
7442 		      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7443 		      lab6 = create_artificial_label (UNKNOWN_LOCATION);
7444 		      tree zero = build_zero_cst (TREE_TYPE (cancellable));
7445 		      g = gimple_build_cond (NE_EXPR, cancellable, zero,
7446 					     lab6, lab5);
7447 		      gimple_seq_add_stmt (end, g);
7448 		      gimple_seq_add_stmt (end, gimple_build_label (lab5));
7449 		    }
7450 		  SET_DECL_VALUE_EXPR (placeholder, out);
7451 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7452 		  SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7453 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7454 		  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7455 		  gimple_seq_add_seq (end,
7456 				      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7457 		  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7458 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7459 		    {
7460 		      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7461 		      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7462 		    }
7463 		  if (cancellable)
7464 		    gimple_seq_add_stmt (end, gimple_build_label (lab6));
7465 		  tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7466 		  if (x)
7467 		    {
7468 		      gimple_seq tseq = NULL;
7469 		      gimplify_stmt (&x, &tseq);
7470 		      gimple_seq_add_seq (end, tseq);
7471 		    }
7472 		}
7473 	      else
7474 		{
7475 		  tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7476 		  out = unshare_expr (out);
7477 		  gimplify_assign (out, x, end);
7478 		}
7479 	      gimple *g
7480 		= gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7481 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
7482 	      gimple_seq_add_stmt (end, g);
7483 	      g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7484 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
7485 	      gimple_seq_add_stmt (end, g);
7486 	      g = gimple_build_assign (i, PLUS_EXPR, i,
7487 				       build_int_cst (TREE_TYPE (i), 1));
7488 	      gimple_seq_add_stmt (end, g);
7489 	      g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7490 	      gimple_seq_add_stmt (end, g);
7491 	      gimple_seq_add_stmt (end, gimple_build_label (endl));
7492 	    }
7493 	  else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7494 	    {
7495 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7496 	      tree oldv = NULL_TREE;
7497 	      tree lab6 = NULL_TREE;
7498 	      if (cancellable)
7499 		{
7500 		  /* If this reduction needs destruction and parallel
7501 		     has been cancelled, jump around the merge operation
7502 		     to the destruction.  */
7503 		  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7504 		  lab6 = create_artificial_label (UNKNOWN_LOCATION);
7505 		  tree zero = build_zero_cst (TREE_TYPE (cancellable));
7506 		  g = gimple_build_cond (NE_EXPR, cancellable, zero,
7507 					 lab6, lab5);
7508 		  gimple_seq_add_stmt (end, g);
7509 		  gimple_seq_add_stmt (end, gimple_build_label (lab5));
7510 		}
7511 	      if (omp_is_reference (decl)
7512 		  && !useless_type_conversion_p (TREE_TYPE (placeholder),
7513 						 TREE_TYPE (ref)))
7514 		ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7515 	      ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7516 	      tree refv = create_tmp_var (TREE_TYPE (ref));
7517 	      gimplify_assign (refv, ref, end);
7518 	      ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7519 	      SET_DECL_VALUE_EXPR (placeholder, ref);
7520 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7521 	      tree d = maybe_lookup_decl (decl, ctx);
7522 	      gcc_assert (d);
7523 	      if (DECL_HAS_VALUE_EXPR_P (d))
7524 		oldv = DECL_VALUE_EXPR (d);
7525 	      if (omp_is_reference (var))
7526 		{
7527 		  tree v = fold_convert (TREE_TYPE (d),
7528 					 build_fold_addr_expr (new_var));
7529 		  SET_DECL_VALUE_EXPR (d, v);
7530 		}
7531 	      else
7532 		SET_DECL_VALUE_EXPR (d, new_var);
7533 	      DECL_HAS_VALUE_EXPR_P (d) = 1;
7534 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7535 	      if (oldv)
7536 		SET_DECL_VALUE_EXPR (d, oldv);
7537 	      else
7538 		{
7539 		  SET_DECL_VALUE_EXPR (d, NULL_TREE);
7540 		  DECL_HAS_VALUE_EXPR_P (d) = 0;
7541 		}
7542 	      gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7543 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7544 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7545 		OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7546 	      if (cancellable)
7547 		gimple_seq_add_stmt (end, gimple_build_label (lab6));
7548 	      tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7549 	      if (x)
7550 		{
7551 		  gimple_seq tseq = NULL;
7552 		  gimplify_stmt (&x, &tseq);
7553 		  gimple_seq_add_seq (end, tseq);
7554 		}
7555 	    }
7556 	  else
7557 	    {
7558 	      tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7559 	      ref = unshare_expr (ref);
7560 	      gimplify_assign (ref, x, end);
7561 	    }
7562 	  gimple_seq_add_stmt (end, gimple_build_label (lab4));
7563 	  ++cnt;
7564 	  field = DECL_CHAIN (bfield);
7565 	}
7566     }
7567 
7568   if (code == OMP_TASKGROUP)
7569     {
7570       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7571       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7572       gimple_seq_add_stmt (start, g);
7573     }
7574   else
7575     {
7576       tree c;
7577       if (code == OMP_FOR)
7578 	c = gimple_omp_for_clauses (ctx->stmt);
7579       else if (code == OMP_SECTIONS)
7580 	c = gimple_omp_sections_clauses (ctx->stmt);
7581       else
7582 	c = gimple_omp_taskreg_clauses (ctx->stmt);
7583       c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7584       t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7585 			build_fold_addr_expr (avar));
7586       gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7587     }
7588 
7589   gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7590   gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7591 						 size_one_node));
7592   g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7593   gimple_seq_add_stmt (end, g);
7594   gimple_seq_add_stmt (end, gimple_build_label (lab2));
7595   if (code == OMP_FOR || code == OMP_SECTIONS)
7596     {
7597       enum built_in_function bfn
7598 	= BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7599       t = builtin_decl_explicit (bfn);
7600       tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7601       tree arg;
7602       if (cancellable)
7603 	{
7604 	  arg = create_tmp_var (c_bool_type);
7605 	  gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7606 							 cancellable));
7607 	}
7608       else
7609 	arg = build_int_cst (c_bool_type, 0);
7610       g = gimple_build_call (t, 1, arg);
7611     }
7612   else
7613     {
7614       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7615       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7616     }
7617   gimple_seq_add_stmt (end, g);
7618   t = build_constructor (atype, NULL);
7619   TREE_THIS_VOLATILE (t) = 1;
7620   gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7621 }
7622 
7623 /* Expand code for an OpenMP taskgroup directive.  */
7624 
7625 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)7626 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7627 {
7628   gimple *stmt = gsi_stmt (*gsi_p);
7629   gcall *x;
7630   gbind *bind;
7631   gimple_seq dseq = NULL;
7632   tree block = make_node (BLOCK);
7633 
7634   bind = gimple_build_bind (NULL, NULL, block);
7635   gsi_replace (gsi_p, bind, true);
7636   gimple_bind_add_stmt (bind, stmt);
7637 
7638   push_gimplify_context ();
7639 
7640   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7641 			 0);
7642   gimple_bind_add_stmt (bind, x);
7643 
7644   lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7645 			     gimple_omp_taskgroup_clauses (stmt),
7646 			     gimple_bind_body_ptr (bind), &dseq);
7647 
7648   lower_omp (gimple_omp_body_ptr (stmt), ctx);
7649   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7650   gimple_omp_set_body (stmt, NULL);
7651 
7652   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7653   gimple_bind_add_seq (bind, dseq);
7654 
7655   pop_gimplify_context (bind);
7656 
7657   gimple_bind_append_vars (bind, ctx->block_vars);
7658   BLOCK_VARS (block) = ctx->block_vars;
7659 }
7660 
7661 
7662 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
7663 
7664 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)7665 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
7666 			   omp_context *ctx)
7667 {
7668   struct omp_for_data fd;
7669   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
7670     return;
7671 
7672   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
7673   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
7674   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
7675   if (!fd.ordered)
7676     return;
7677 
7678   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7679   tree c = gimple_omp_ordered_clauses (ord_stmt);
7680   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7681       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7682     {
7683       /* Merge depend clauses from multiple adjacent
7684 	 #pragma omp ordered depend(sink:...) constructs
7685 	 into one #pragma omp ordered depend(sink:...), so that
7686 	 we can optimize them together.  */
7687       gimple_stmt_iterator gsi = *gsi_p;
7688       gsi_next (&gsi);
7689       while (!gsi_end_p (gsi))
7690 	{
7691 	  gimple *stmt = gsi_stmt (gsi);
7692 	  if (is_gimple_debug (stmt)
7693 	      || gimple_code (stmt) == GIMPLE_NOP)
7694 	    {
7695 	      gsi_next (&gsi);
7696 	      continue;
7697 	    }
7698 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
7699 	    break;
7700 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
7701 	  c = gimple_omp_ordered_clauses (ord_stmt2);
7702 	  if (c == NULL_TREE
7703 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
7704 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7705 	    break;
7706 	  while (*list_p)
7707 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
7708 	  *list_p = c;
7709 	  gsi_remove (&gsi, true);
7710 	}
7711     }
7712 
7713   /* Canonicalize sink dependence clauses into one folded clause if
7714      possible.
7715 
7716      The basic algorithm is to create a sink vector whose first
7717      element is the GCD of all the first elements, and whose remaining
7718      elements are the minimum of the subsequent columns.
7719 
7720      We ignore dependence vectors whose first element is zero because
7721      such dependencies are known to be executed by the same thread.
7722 
7723      We take into account the direction of the loop, so a minimum
7724      becomes a maximum if the loop is iterating forwards.  We also
7725      ignore sink clauses where the loop direction is unknown, or where
7726      the offsets are clearly invalid because they are not a multiple
7727      of the loop increment.
7728 
7729      For example:
7730 
7731 	#pragma omp for ordered(2)
7732 	for (i=0; i < N; ++i)
7733 	  for (j=0; j < M; ++j)
7734 	    {
7735 	      #pragma omp ordered \
7736 		depend(sink:i-8,j-2) \
7737 		depend(sink:i,j-1) \	// Completely ignored because i+0.
7738 		depend(sink:i-4,j-3) \
7739 		depend(sink:i-6,j-4)
7740 	      #pragma omp ordered depend(source)
7741 	    }
7742 
7743      Folded clause is:
7744 
7745 	depend(sink:-gcd(8,4,6),-min(2,3,4))
7746 	  -or-
7747 	depend(sink:-2,-2)
7748   */
7749 
7750   /* FIXME: Computing GCD's where the first element is zero is
7751      non-trivial in the presence of collapsed loops.  Do this later.  */
7752   if (fd.collapse > 1)
7753     return;
7754 
7755   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
7756 
7757   /* wide_int is not a POD so it must be default-constructed.  */
7758   for (unsigned i = 0; i != 2 * len - 1; ++i)
7759     new (static_cast<void*>(folded_deps + i)) wide_int ();
7760 
7761   tree folded_dep = NULL_TREE;
7762   /* TRUE if the first dimension's offset is negative.  */
7763   bool neg_offset_p = false;
7764 
7765   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7766   unsigned int i;
7767   while ((c = *list_p) != NULL)
7768     {
7769       bool remove = false;
7770 
7771       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
7772       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7773 	goto next_ordered_clause;
7774 
7775       tree vec;
7776       for (vec = OMP_CLAUSE_DECL (c), i = 0;
7777 	   vec && TREE_CODE (vec) == TREE_LIST;
7778 	   vec = TREE_CHAIN (vec), ++i)
7779 	{
7780 	  gcc_assert (i < len);
7781 
7782 	  /* omp_extract_for_data has canonicalized the condition.  */
7783 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
7784 		      || fd.loops[i].cond_code == GT_EXPR);
7785 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
7786 	  bool maybe_lexically_later = true;
7787 
7788 	  /* While the committee makes up its mind, bail if we have any
7789 	     non-constant steps.  */
7790 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
7791 	    goto lower_omp_ordered_ret;
7792 
7793 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
7794 	  if (POINTER_TYPE_P (itype))
7795 	    itype = sizetype;
7796 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
7797 					    TYPE_PRECISION (itype),
7798 					    TYPE_SIGN (itype));
7799 
7800 	  /* Ignore invalid offsets that are not multiples of the step.  */
7801 	  if (!wi::multiple_of_p (wi::abs (offset),
7802 				  wi::abs (wi::to_wide (fd.loops[i].step)),
7803 				  UNSIGNED))
7804 	    {
7805 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
7806 			  "ignoring sink clause with offset that is not "
7807 			  "a multiple of the loop step");
7808 	      remove = true;
7809 	      goto next_ordered_clause;
7810 	    }
7811 
7812 	  /* Calculate the first dimension.  The first dimension of
7813 	     the folded dependency vector is the GCD of the first
7814 	     elements, while ignoring any first elements whose offset
7815 	     is 0.  */
7816 	  if (i == 0)
7817 	    {
7818 	      /* Ignore dependence vectors whose first dimension is 0.  */
7819 	      if (offset == 0)
7820 		{
7821 		  remove = true;
7822 		  goto next_ordered_clause;
7823 		}
7824 	      else
7825 		{
7826 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
7827 		    {
7828 		      error_at (OMP_CLAUSE_LOCATION (c),
7829 				"first offset must be in opposite direction "
7830 				"of loop iterations");
7831 		      goto lower_omp_ordered_ret;
7832 		    }
7833 		  if (forward)
7834 		    offset = -offset;
7835 		  neg_offset_p = forward;
7836 		  /* Initialize the first time around.  */
7837 		  if (folded_dep == NULL_TREE)
7838 		    {
7839 		      folded_dep = c;
7840 		      folded_deps[0] = offset;
7841 		    }
7842 		  else
7843 		    folded_deps[0] = wi::gcd (folded_deps[0],
7844 					      offset, UNSIGNED);
7845 		}
7846 	    }
7847 	  /* Calculate minimum for the remaining dimensions.  */
7848 	  else
7849 	    {
7850 	      folded_deps[len + i - 1] = offset;
7851 	      if (folded_dep == c)
7852 		folded_deps[i] = offset;
7853 	      else if (maybe_lexically_later
7854 		       && !wi::eq_p (folded_deps[i], offset))
7855 		{
7856 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
7857 		    {
7858 		      unsigned int j;
7859 		      folded_dep = c;
7860 		      for (j = 1; j <= i; j++)
7861 			folded_deps[j] = folded_deps[len + j - 1];
7862 		    }
7863 		  else
7864 		    maybe_lexically_later = false;
7865 		}
7866 	    }
7867 	}
7868       gcc_assert (i == len);
7869 
7870       remove = true;
7871 
7872     next_ordered_clause:
7873       if (remove)
7874 	*list_p = OMP_CLAUSE_CHAIN (c);
7875       else
7876 	list_p = &OMP_CLAUSE_CHAIN (c);
7877     }
7878 
7879   if (folded_dep)
7880     {
7881       if (neg_offset_p)
7882 	folded_deps[0] = -folded_deps[0];
7883 
7884       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
7885       if (POINTER_TYPE_P (itype))
7886 	itype = sizetype;
7887 
7888       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
7889 	= wide_int_to_tree (itype, folded_deps[0]);
7890       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
7891       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
7892     }
7893 
7894  lower_omp_ordered_ret:
7895 
7896   /* Ordered without clauses is #pragma omp threads, while we want
7897      a nop instead if we remove all clauses.  */
7898   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
7899     gsi_replace (gsi_p, gimple_build_nop (), true);
7900 }
7901 
7902 
7903 /* Expand code for an OpenMP ordered directive.  */
7904 
7905 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)7906 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7907 {
7908   tree block;
7909   gimple *stmt = gsi_stmt (*gsi_p), *g;
7910   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
7911   gcall *x;
7912   gbind *bind;
7913   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7914 			       OMP_CLAUSE_SIMD);
7915   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7916      loop.  */
7917   bool maybe_simt
7918     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
7919   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7920 				  OMP_CLAUSE_THREADS);
7921 
7922   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7923 		       OMP_CLAUSE_DEPEND))
7924     {
7925       /* FIXME: This is needs to be moved to the expansion to verify various
7926 	 conditions only testable on cfg with dominators computed, and also
7927 	 all the depend clauses to be merged still might need to be available
7928 	 for the runtime checks.  */
7929       if (0)
7930 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
7931       return;
7932     }
7933 
7934   push_gimplify_context ();
7935 
7936   block = make_node (BLOCK);
7937   bind = gimple_build_bind (NULL, NULL, block);
7938   gsi_replace (gsi_p, bind, true);
7939   gimple_bind_add_stmt (bind, stmt);
7940 
7941   if (simd)
7942     {
7943       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
7944 				      build_int_cst (NULL_TREE, threads));
7945       cfun->has_simduid_loops = true;
7946     }
7947   else
7948     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
7949 			   0);
7950   gimple_bind_add_stmt (bind, x);
7951 
7952   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
7953   if (maybe_simt)
7954     {
7955       counter = create_tmp_var (integer_type_node);
7956       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
7957       gimple_call_set_lhs (g, counter);
7958       gimple_bind_add_stmt (bind, g);
7959 
7960       body = create_artificial_label (UNKNOWN_LOCATION);
7961       test = create_artificial_label (UNKNOWN_LOCATION);
7962       gimple_bind_add_stmt (bind, gimple_build_label (body));
7963 
7964       tree simt_pred = create_tmp_var (integer_type_node);
7965       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
7966       gimple_call_set_lhs (g, simt_pred);
7967       gimple_bind_add_stmt (bind, g);
7968 
7969       tree t = create_artificial_label (UNKNOWN_LOCATION);
7970       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
7971       gimple_bind_add_stmt (bind, g);
7972 
7973       gimple_bind_add_stmt (bind, gimple_build_label (t));
7974     }
7975   lower_omp (gimple_omp_body_ptr (stmt), ctx);
7976   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7977   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7978   gimple_omp_set_body (stmt, NULL);
7979 
7980   if (maybe_simt)
7981     {
7982       gimple_bind_add_stmt (bind, gimple_build_label (test));
7983       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
7984       gimple_bind_add_stmt (bind, g);
7985 
7986       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
7987       tree nonneg = create_tmp_var (integer_type_node);
7988       gimple_seq tseq = NULL;
7989       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
7990       gimple_bind_add_seq (bind, tseq);
7991 
7992       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
7993       gimple_call_set_lhs (g, nonneg);
7994       gimple_bind_add_stmt (bind, g);
7995 
7996       tree end = create_artificial_label (UNKNOWN_LOCATION);
7997       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
7998       gimple_bind_add_stmt (bind, g);
7999 
8000       gimple_bind_add_stmt (bind, gimple_build_label (end));
8001     }
8002   if (simd)
8003     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8004 				    build_int_cst (NULL_TREE, threads));
8005   else
8006     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8007 			   0);
8008   gimple_bind_add_stmt (bind, x);
8009 
8010   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8011 
8012   pop_gimplify_context (bind);
8013 
8014   gimple_bind_append_vars (bind, ctx->block_vars);
8015   BLOCK_VARS (block) = gimple_bind_vars (bind);
8016 }
8017 
8018 
8019 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
8020    substitution of a couple of function calls.  But in the NAMED case,
8021    requires that languages coordinate a symbol name.  It is therefore
8022    best put here in common code.  */
8023 
8024 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8025 
8026 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)8027 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8028 {
8029   tree block;
8030   tree name, lock, unlock;
8031   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8032   gbind *bind;
8033   location_t loc = gimple_location (stmt);
8034   gimple_seq tbody;
8035 
8036   name = gimple_omp_critical_name (stmt);
8037   if (name)
8038     {
8039       tree decl;
8040 
8041       if (!critical_name_mutexes)
8042 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8043 
8044       tree *n = critical_name_mutexes->get (name);
8045       if (n == NULL)
8046 	{
8047 	  char *new_str;
8048 
8049 	  decl = create_tmp_var_raw (ptr_type_node);
8050 
8051 	  new_str = ACONCAT ((".gomp_critical_user_",
8052 			      IDENTIFIER_POINTER (name), NULL));
8053 	  DECL_NAME (decl) = get_identifier (new_str);
8054 	  TREE_PUBLIC (decl) = 1;
8055 	  TREE_STATIC (decl) = 1;
8056 	  DECL_COMMON (decl) = 1;
8057 	  DECL_ARTIFICIAL (decl) = 1;
8058 	  DECL_IGNORED_P (decl) = 1;
8059 
8060 	  varpool_node::finalize_decl (decl);
8061 
8062 	  critical_name_mutexes->put (name, decl);
8063 	}
8064       else
8065 	decl = *n;
8066 
8067       /* If '#pragma omp critical' is inside offloaded region or
8068 	 inside function marked as offloadable, the symbol must be
8069 	 marked as offloadable too.  */
8070       omp_context *octx;
8071       if (cgraph_node::get (current_function_decl)->offloadable)
8072 	varpool_node::get_create (decl)->offloadable = 1;
8073       else
8074 	for (octx = ctx->outer; octx; octx = octx->outer)
8075 	  if (is_gimple_omp_offloaded (octx->stmt))
8076 	    {
8077 	      varpool_node::get_create (decl)->offloadable = 1;
8078 	      break;
8079 	    }
8080 
8081       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8082       lock = build_call_expr_loc (loc, lock, 1,
8083 				  build_fold_addr_expr_loc (loc, decl));
8084 
8085       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8086       unlock = build_call_expr_loc (loc, unlock, 1,
8087 				build_fold_addr_expr_loc (loc, decl));
8088     }
8089   else
8090     {
8091       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8092       lock = build_call_expr_loc (loc, lock, 0);
8093 
8094       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8095       unlock = build_call_expr_loc (loc, unlock, 0);
8096     }
8097 
8098   push_gimplify_context ();
8099 
8100   block = make_node (BLOCK);
8101   bind = gimple_build_bind (NULL, NULL, block);
8102   gsi_replace (gsi_p, bind, true);
8103   gimple_bind_add_stmt (bind, stmt);
8104 
8105   tbody = gimple_bind_body (bind);
8106   gimplify_and_add (lock, &tbody);
8107   gimple_bind_set_body (bind, tbody);
8108 
8109   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8110   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8111   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8112   gimple_omp_set_body (stmt, NULL);
8113 
8114   tbody = gimple_bind_body (bind);
8115   gimplify_and_add (unlock, &tbody);
8116   gimple_bind_set_body (bind, tbody);
8117 
8118   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8119 
8120   pop_gimplify_context (bind);
8121   gimple_bind_append_vars (bind, ctx->block_vars);
8122   BLOCK_VARS (block) = gimple_bind_vars (bind);
8123 }
8124 
8125 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
8126    for a lastprivate clause.  Given a loop control predicate of (V
8127    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
8128    is appended to *DLIST, iterator initialization is appended to
8129    *BODY_P.  */
8130 
8131 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,struct omp_context * ctx)8132 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8133 			   gimple_seq *dlist, struct omp_context *ctx)
8134 {
8135   tree clauses, cond, vinit;
8136   enum tree_code cond_code;
8137   gimple_seq stmts;
8138 
8139   cond_code = fd->loop.cond_code;
8140   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8141 
8142   /* When possible, use a strict equality expression.  This can let VRP
8143      type optimizations deduce the value and remove a copy.  */
8144   if (tree_fits_shwi_p (fd->loop.step))
8145     {
8146       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8147       if (step == 1 || step == -1)
8148 	cond_code = EQ_EXPR;
8149     }
8150 
8151   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8152       || gimple_omp_for_grid_phony (fd->for_stmt))
8153     cond = omp_grid_lastprivate_predicate (fd);
8154   else
8155     {
8156       tree n2 = fd->loop.n2;
8157       if (fd->collapse > 1
8158 	  && TREE_CODE (n2) != INTEGER_CST
8159 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
8160 	{
8161 	  struct omp_context *taskreg_ctx = NULL;
8162 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
8163 	    {
8164 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8165 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8166 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
8167 		{
8168 		  if (gimple_omp_for_combined_into_p (gfor))
8169 		    {
8170 		      gcc_assert (ctx->outer->outer
8171 				  && is_parallel_ctx (ctx->outer->outer));
8172 		      taskreg_ctx = ctx->outer->outer;
8173 		    }
8174 		  else
8175 		    {
8176 		      struct omp_for_data outer_fd;
8177 		      omp_extract_for_data (gfor, &outer_fd, NULL);
8178 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8179 		    }
8180 		}
8181 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8182 		taskreg_ctx = ctx->outer->outer;
8183 	    }
8184 	  else if (is_taskreg_ctx (ctx->outer))
8185 	    taskreg_ctx = ctx->outer;
8186 	  if (taskreg_ctx)
8187 	    {
8188 	      int i;
8189 	      tree taskreg_clauses
8190 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8191 	      tree innerc = omp_find_clause (taskreg_clauses,
8192 					     OMP_CLAUSE__LOOPTEMP_);
8193 	      gcc_assert (innerc);
8194 	      for (i = 0; i < fd->collapse; i++)
8195 		{
8196 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8197 					    OMP_CLAUSE__LOOPTEMP_);
8198 		  gcc_assert (innerc);
8199 		}
8200 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8201 					OMP_CLAUSE__LOOPTEMP_);
8202 	      if (innerc)
8203 		n2 = fold_convert (TREE_TYPE (n2),
8204 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
8205 						taskreg_ctx));
8206 	    }
8207 	}
8208       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
8209     }
8210 
8211   clauses = gimple_omp_for_clauses (fd->for_stmt);
8212   stmts = NULL;
8213   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
8214   if (!gimple_seq_empty_p (stmts))
8215     {
8216       gimple_seq_add_seq (&stmts, *dlist);
8217       *dlist = stmts;
8218 
8219       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
8220       vinit = fd->loop.n1;
8221       if (cond_code == EQ_EXPR
8222 	  && tree_fits_shwi_p (fd->loop.n2)
8223 	  && ! integer_zerop (fd->loop.n2))
8224 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8225       else
8226 	vinit = unshare_expr (vinit);
8227 
8228       /* Initialize the iterator variable, so that threads that don't execute
8229 	 any iterations don't execute the lastprivate clauses by accident.  */
8230       gimplify_assign (fd->loop.v, vinit, body_p);
8231     }
8232 }
8233 
8234 
8235 /* Lower code for an OMP loop directive.  */
8236 
8237 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)8238 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8239 {
8240   tree *rhs_p, block;
8241   struct omp_for_data fd, *fdp = NULL;
8242   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8243   gbind *new_stmt;
8244   gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8245   gimple_seq cnt_list = NULL;
8246   gimple_seq oacc_head = NULL, oacc_tail = NULL;
8247   size_t i;
8248 
8249   push_gimplify_context ();
8250 
8251   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
8252 
8253   block = make_node (BLOCK);
8254   new_stmt = gimple_build_bind (NULL, NULL, block);
8255   /* Replace at gsi right away, so that 'stmt' is no member
8256      of a sequence anymore as we're going to add to a different
8257      one below.  */
8258   gsi_replace (gsi_p, new_stmt, true);
8259 
8260   /* Move declaration of temporaries in the loop body before we make
8261      it go away.  */
8262   omp_for_body = gimple_omp_body (stmt);
8263   if (!gimple_seq_empty_p (omp_for_body)
8264       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8265     {
8266       gbind *inner_bind
8267 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8268       tree vars = gimple_bind_vars (inner_bind);
8269       gimple_bind_append_vars (new_stmt, vars);
8270       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8271 	 keep them on the inner_bind and it's block.  */
8272       gimple_bind_set_vars (inner_bind, NULL_TREE);
8273       if (gimple_bind_block (inner_bind))
8274 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
8275     }
8276 
8277   if (gimple_omp_for_combined_into_p (stmt))
8278     {
8279       omp_extract_for_data (stmt, &fd, NULL);
8280       fdp = &fd;
8281 
8282       /* We need two temporaries with fd.loop.v type (istart/iend)
8283 	 and then (fd.collapse - 1) temporaries with the same
8284 	 type for count2 ... countN-1 vars if not constant.  */
8285       size_t count = 2;
8286       tree type = fd.iter_type;
8287       if (fd.collapse > 1
8288 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8289 	count += fd.collapse - 1;
8290       bool taskreg_for
8291 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8292 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8293       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8294       tree simtc = NULL;
8295       tree clauses = *pc;
8296       if (taskreg_for)
8297 	outerc
8298 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8299 			     OMP_CLAUSE__LOOPTEMP_);
8300       if (ctx->simt_stmt)
8301 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8302 				 OMP_CLAUSE__LOOPTEMP_);
8303       for (i = 0; i < count; i++)
8304 	{
8305 	  tree temp;
8306 	  if (taskreg_for)
8307 	    {
8308 	      gcc_assert (outerc);
8309 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8310 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8311 					OMP_CLAUSE__LOOPTEMP_);
8312 	    }
8313 	  else
8314 	    {
8315 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
8316 		 clause, another without, make sure they have the same
8317 		 decls in _looptemp_ clauses, because the outer stmt
8318 		 they are combined into will look up just one inner_stmt.  */
8319 	      if (ctx->simt_stmt)
8320 		temp = OMP_CLAUSE_DECL (simtc);
8321 	      else
8322 		temp = create_tmp_var (type);
8323 	      insert_decl_map (&ctx->outer->cb, temp, temp);
8324 	    }
8325 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8326 	  OMP_CLAUSE_DECL (*pc) = temp;
8327 	  pc = &OMP_CLAUSE_CHAIN (*pc);
8328 	  if (ctx->simt_stmt)
8329 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8330 				     OMP_CLAUSE__LOOPTEMP_);
8331 	}
8332       *pc = clauses;
8333     }
8334 
8335   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
8336   dlist = NULL;
8337   body = NULL;
8338   tree rclauses
8339     = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8340 				      OMP_CLAUSE_REDUCTION);
8341   tree rtmp = NULL_TREE;
8342   if (rclauses)
8343     {
8344       tree type = build_pointer_type (pointer_sized_int_node);
8345       tree temp = create_tmp_var (type);
8346       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8347       OMP_CLAUSE_DECL (c) = temp;
8348       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8349       gimple_omp_for_set_clauses (stmt, c);
8350       lower_omp_task_reductions (ctx, OMP_FOR,
8351 				 gimple_omp_for_clauses (stmt),
8352 				 &tred_ilist, &tred_dlist);
8353       rclauses = c;
8354       rtmp = make_ssa_name (type);
8355       gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8356     }
8357 
8358   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8359 			   fdp);
8360   gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8361 		      gimple_omp_for_pre_body (stmt));
8362 
8363   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8364 
8365   /* Lower the header expressions.  At this point, we can assume that
8366      the header is of the form:
8367 
8368      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8369 
8370      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8371      using the .omp_data_s mapping, if needed.  */
8372   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8373     {
8374       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8375       if (!is_gimple_min_invariant (*rhs_p))
8376 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8377       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8378 	recompute_tree_invariant_for_addr_expr (*rhs_p);
8379 
8380       rhs_p = gimple_omp_for_final_ptr (stmt, i);
8381       if (!is_gimple_min_invariant (*rhs_p))
8382 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8383       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8384 	recompute_tree_invariant_for_addr_expr (*rhs_p);
8385 
8386       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8387       if (!is_gimple_min_invariant (*rhs_p))
8388 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8389     }
8390   if (rclauses)
8391     gimple_seq_add_seq (&tred_ilist, cnt_list);
8392   else
8393     gimple_seq_add_seq (&body, cnt_list);
8394 
8395   /* Once lowered, extract the bounds and clauses.  */
8396   omp_extract_for_data (stmt, &fd, NULL);
8397 
8398   if (is_gimple_omp_oacc (ctx->stmt)
8399       && !ctx_in_oacc_kernels_region (ctx))
8400     lower_oacc_head_tail (gimple_location (stmt),
8401 			  gimple_omp_for_clauses (stmt),
8402 			  &oacc_head, &oacc_tail, ctx);
8403 
8404   /* Add OpenACC partitioning and reduction markers just before the loop.  */
8405   if (oacc_head)
8406     gimple_seq_add_seq (&body, oacc_head);
8407 
8408   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
8409 
8410   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8411     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
8412       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8413 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8414 	{
8415 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8416 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8417 	    OMP_CLAUSE_LINEAR_STEP (c)
8418 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8419 						ctx);
8420 	}
8421 
8422   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8423 		     && gimple_omp_for_grid_phony (stmt));
8424   if (!phony_loop)
8425     gimple_seq_add_stmt (&body, stmt);
8426   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8427 
8428   if (!phony_loop)
8429     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8430 							   fd.loop.v));
8431 
8432   /* After the loop, add exit clauses.  */
8433   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
8434 
8435   if (ctx->cancellable)
8436     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8437 
8438   gimple_seq_add_seq (&body, dlist);
8439 
8440   if (rclauses)
8441     {
8442       gimple_seq_add_seq (&tred_ilist, body);
8443       body = tred_ilist;
8444     }
8445 
8446   body = maybe_catch_exception (body);
8447 
8448   if (!phony_loop)
8449     {
8450       /* Region exit marker goes at the end of the loop body.  */
8451       gimple *g = gimple_build_omp_return (fd.have_nowait);
8452       gimple_seq_add_stmt (&body, g);
8453 
8454       gimple_seq_add_seq (&body, tred_dlist);
8455 
8456       maybe_add_implicit_barrier_cancel (ctx, g, &body);
8457 
8458       if (rclauses)
8459 	OMP_CLAUSE_DECL (rclauses) = rtmp;
8460     }
8461 
8462   /* Add OpenACC joining and reduction markers just after the loop.  */
8463   if (oacc_tail)
8464     gimple_seq_add_seq (&body, oacc_tail);
8465 
8466   pop_gimplify_context (new_stmt);
8467 
8468   gimple_bind_append_vars (new_stmt, ctx->block_vars);
8469   maybe_remove_omp_member_access_dummy_vars (new_stmt);
8470   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8471   if (BLOCK_VARS (block))
8472     TREE_USED (block) = 1;
8473 
8474   gimple_bind_set_body (new_stmt, body);
8475   gimple_omp_set_body (stmt, NULL);
8476   gimple_omp_for_set_pre_body (stmt, NULL);
8477 }
8478 
8479 /* Callback for walk_stmts.  Check if the current statement only contains
8480    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
8481 
8482 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)8483 check_combined_parallel (gimple_stmt_iterator *gsi_p,
8484     			 bool *handled_ops_p,
8485     			 struct walk_stmt_info *wi)
8486 {
8487   int *info = (int *) wi->info;
8488   gimple *stmt = gsi_stmt (*gsi_p);
8489 
8490   *handled_ops_p = true;
8491   switch (gimple_code (stmt))
8492     {
8493     WALK_SUBSTMTS;
8494 
8495     case GIMPLE_DEBUG:
8496       break;
8497     case GIMPLE_OMP_FOR:
8498     case GIMPLE_OMP_SECTIONS:
8499       *info = *info == 0 ? 1 : -1;
8500       break;
8501     default:
8502       *info = -1;
8503       break;
8504     }
8505   return NULL;
8506 }
8507 
8508 struct omp_taskcopy_context
8509 {
8510   /* This field must be at the beginning, as we do "inheritance": Some
8511      callback functions for tree-inline.c (e.g., omp_copy_decl)
8512      receive a copy_body_data pointer that is up-casted to an
8513      omp_context pointer.  */
8514   copy_body_data cb;
8515   omp_context *ctx;
8516 };
8517 
8518 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)8519 task_copyfn_copy_decl (tree var, copy_body_data *cb)
8520 {
8521   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
8522 
8523   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8524     return create_tmp_var (TREE_TYPE (var));
8525 
8526   return var;
8527 }
8528 
8529 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)8530 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
8531 {
8532   tree name, new_fields = NULL, type, f;
8533 
8534   type = lang_hooks.types.make_type (RECORD_TYPE);
8535   name = DECL_NAME (TYPE_NAME (orig_type));
8536   name = build_decl (gimple_location (tcctx->ctx->stmt),
8537 		     TYPE_DECL, name, type);
8538   TYPE_NAME (type) = name;
8539 
8540   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
8541     {
8542       tree new_f = copy_node (f);
8543       DECL_CONTEXT (new_f) = type;
8544       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8545       TREE_CHAIN (new_f) = new_fields;
8546       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8547       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8548       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8549 		 &tcctx->cb, NULL);
8550       new_fields = new_f;
8551       tcctx->cb.decl_map->put (f, new_f);
8552     }
8553   TYPE_FIELDS (type) = nreverse (new_fields);
8554   layout_type (type);
8555   return type;
8556 }
8557 
8558 /* Create task copyfn.  */
8559 
8560 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)8561 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8562 {
8563   struct function *child_cfun;
8564   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8565   tree record_type, srecord_type, bind, list;
8566   bool record_needs_remap = false, srecord_needs_remap = false;
8567   splay_tree_node n;
8568   struct omp_taskcopy_context tcctx;
8569   location_t loc = gimple_location (task_stmt);
8570   size_t looptempno = 0;
8571 
8572   child_fn = gimple_omp_task_copy_fn (task_stmt);
8573   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8574   gcc_assert (child_cfun->cfg == NULL);
8575   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
8576 
8577   /* Reset DECL_CONTEXT on function arguments.  */
8578   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8579     DECL_CONTEXT (t) = child_fn;
8580 
8581   /* Populate the function.  */
8582   push_gimplify_context ();
8583   push_cfun (child_cfun);
8584 
8585   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8586   TREE_SIDE_EFFECTS (bind) = 1;
8587   list = NULL;
8588   DECL_SAVED_TREE (child_fn) = bind;
8589   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
8590 
8591   /* Remap src and dst argument types if needed.  */
8592   record_type = ctx->record_type;
8593   srecord_type = ctx->srecord_type;
8594   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8595     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8596       {
8597 	record_needs_remap = true;
8598 	break;
8599       }
8600   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8601     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8602       {
8603 	srecord_needs_remap = true;
8604 	break;
8605       }
8606 
8607   if (record_needs_remap || srecord_needs_remap)
8608     {
8609       memset (&tcctx, '\0', sizeof (tcctx));
8610       tcctx.cb.src_fn = ctx->cb.src_fn;
8611       tcctx.cb.dst_fn = child_fn;
8612       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8613       gcc_checking_assert (tcctx.cb.src_node);
8614       tcctx.cb.dst_node = tcctx.cb.src_node;
8615       tcctx.cb.src_cfun = ctx->cb.src_cfun;
8616       tcctx.cb.copy_decl = task_copyfn_copy_decl;
8617       tcctx.cb.eh_lp_nr = 0;
8618       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8619       tcctx.cb.decl_map = new hash_map<tree, tree>;
8620       tcctx.ctx = ctx;
8621 
8622       if (record_needs_remap)
8623 	record_type = task_copyfn_remap_type (&tcctx, record_type);
8624       if (srecord_needs_remap)
8625 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
8626     }
8627   else
8628     tcctx.cb.decl_map = NULL;
8629 
8630   arg = DECL_ARGUMENTS (child_fn);
8631   TREE_TYPE (arg) = build_pointer_type (record_type);
8632   sarg = DECL_CHAIN (arg);
8633   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
8634 
8635   /* First pass: initialize temporaries used in record_type and srecord_type
8636      sizes and field offsets.  */
8637   if (tcctx.cb.decl_map)
8638     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8639       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8640 	{
8641 	  tree *p;
8642 
8643 	  decl = OMP_CLAUSE_DECL (c);
8644 	  p = tcctx.cb.decl_map->get (decl);
8645 	  if (p == NULL)
8646 	    continue;
8647 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8648 	  sf = (tree) n->value;
8649 	  sf = *tcctx.cb.decl_map->get (sf);
8650 	  src = build_simple_mem_ref_loc (loc, sarg);
8651 	  src = omp_build_component_ref (src, sf);
8652 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
8653 	  append_to_statement_list (t, &list);
8654 	}
8655 
8656   /* Second pass: copy shared var pointers and copy construct non-VLA
8657      firstprivate vars.  */
8658   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8659     switch (OMP_CLAUSE_CODE (c))
8660       {
8661 	splay_tree_key key;
8662       case OMP_CLAUSE_SHARED:
8663 	decl = OMP_CLAUSE_DECL (c);
8664 	key = (splay_tree_key) decl;
8665 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8666 	  key = (splay_tree_key) &DECL_UID (decl);
8667 	n = splay_tree_lookup (ctx->field_map, key);
8668 	if (n == NULL)
8669 	  break;
8670 	f = (tree) n->value;
8671 	if (tcctx.cb.decl_map)
8672 	  f = *tcctx.cb.decl_map->get (f);
8673 	n = splay_tree_lookup (ctx->sfield_map, key);
8674 	sf = (tree) n->value;
8675 	if (tcctx.cb.decl_map)
8676 	  sf = *tcctx.cb.decl_map->get (sf);
8677 	src = build_simple_mem_ref_loc (loc, sarg);
8678 	src = omp_build_component_ref (src, sf);
8679 	dst = build_simple_mem_ref_loc (loc, arg);
8680 	dst = omp_build_component_ref (dst, f);
8681 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8682 	append_to_statement_list (t, &list);
8683 	break;
8684       case OMP_CLAUSE_REDUCTION:
8685       case OMP_CLAUSE_IN_REDUCTION:
8686 	decl = OMP_CLAUSE_DECL (c);
8687 	if (TREE_CODE (decl) == MEM_REF)
8688 	  {
8689 	    decl = TREE_OPERAND (decl, 0);
8690 	    if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8691 	      decl = TREE_OPERAND (decl, 0);
8692 	    if (TREE_CODE (decl) == INDIRECT_REF
8693 		|| TREE_CODE (decl) == ADDR_EXPR)
8694 	      decl = TREE_OPERAND (decl, 0);
8695 	  }
8696 	key = (splay_tree_key) decl;
8697 	n = splay_tree_lookup (ctx->field_map, key);
8698 	if (n == NULL)
8699 	  break;
8700 	f = (tree) n->value;
8701 	if (tcctx.cb.decl_map)
8702 	  f = *tcctx.cb.decl_map->get (f);
8703 	n = splay_tree_lookup (ctx->sfield_map, key);
8704 	sf = (tree) n->value;
8705 	if (tcctx.cb.decl_map)
8706 	  sf = *tcctx.cb.decl_map->get (sf);
8707 	src = build_simple_mem_ref_loc (loc, sarg);
8708 	src = omp_build_component_ref (src, sf);
8709 	if (decl != OMP_CLAUSE_DECL (c)
8710 	    && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8711 	    && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8712 	  src = build_simple_mem_ref_loc (loc, src);
8713 	dst = build_simple_mem_ref_loc (loc, arg);
8714 	dst = omp_build_component_ref (dst, f);
8715 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8716 	append_to_statement_list (t, &list);
8717 	break;
8718       case OMP_CLAUSE__LOOPTEMP_:
8719 	/* Fields for first two _looptemp_ clauses are initialized by
8720 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
8721         if (looptempno < 2)
8722 	  {
8723 	    looptempno++;
8724 	    break;
8725 	  }
8726 	/* FALLTHRU */
8727       case OMP_CLAUSE__REDUCTEMP_:
8728       case OMP_CLAUSE_FIRSTPRIVATE:
8729 	decl = OMP_CLAUSE_DECL (c);
8730 	if (is_variable_sized (decl))
8731 	  break;
8732 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8733 	if (n == NULL)
8734 	  break;
8735 	f = (tree) n->value;
8736 	if (tcctx.cb.decl_map)
8737 	  f = *tcctx.cb.decl_map->get (f);
8738 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8739 	if (n != NULL)
8740 	  {
8741 	    sf = (tree) n->value;
8742 	    if (tcctx.cb.decl_map)
8743 	      sf = *tcctx.cb.decl_map->get (sf);
8744 	    src = build_simple_mem_ref_loc (loc, sarg);
8745 	    src = omp_build_component_ref (src, sf);
8746 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
8747 	      src = build_simple_mem_ref_loc (loc, src);
8748 	  }
8749 	else
8750 	  src = decl;
8751 	dst = build_simple_mem_ref_loc (loc, arg);
8752 	dst = omp_build_component_ref (dst, f);
8753 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
8754 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8755 	else
8756 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8757 	append_to_statement_list (t, &list);
8758 	break;
8759       case OMP_CLAUSE_PRIVATE:
8760 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8761 	  break;
8762 	decl = OMP_CLAUSE_DECL (c);
8763 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8764 	f = (tree) n->value;
8765 	if (tcctx.cb.decl_map)
8766 	  f = *tcctx.cb.decl_map->get (f);
8767 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8768 	if (n != NULL)
8769 	  {
8770 	    sf = (tree) n->value;
8771 	    if (tcctx.cb.decl_map)
8772 	      sf = *tcctx.cb.decl_map->get (sf);
8773 	    src = build_simple_mem_ref_loc (loc, sarg);
8774 	    src = omp_build_component_ref (src, sf);
8775 	    if (use_pointer_for_field (decl, NULL))
8776 	      src = build_simple_mem_ref_loc (loc, src);
8777 	  }
8778 	else
8779 	  src = decl;
8780 	dst = build_simple_mem_ref_loc (loc, arg);
8781 	dst = omp_build_component_ref (dst, f);
8782 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8783 	append_to_statement_list (t, &list);
8784 	break;
8785       default:
8786 	break;
8787       }
8788 
8789   /* Last pass: handle VLA firstprivates.  */
8790   if (tcctx.cb.decl_map)
8791     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8792       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8793 	{
8794 	  tree ind, ptr, df;
8795 
8796 	  decl = OMP_CLAUSE_DECL (c);
8797 	  if (!is_variable_sized (decl))
8798 	    continue;
8799 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8800 	  if (n == NULL)
8801 	    continue;
8802 	  f = (tree) n->value;
8803 	  f = *tcctx.cb.decl_map->get (f);
8804 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
8805 	  ind = DECL_VALUE_EXPR (decl);
8806 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
8807 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
8808 	  n = splay_tree_lookup (ctx->sfield_map,
8809 				 (splay_tree_key) TREE_OPERAND (ind, 0));
8810 	  sf = (tree) n->value;
8811 	  sf = *tcctx.cb.decl_map->get (sf);
8812 	  src = build_simple_mem_ref_loc (loc, sarg);
8813 	  src = omp_build_component_ref (src, sf);
8814 	  src = build_simple_mem_ref_loc (loc, src);
8815 	  dst = build_simple_mem_ref_loc (loc, arg);
8816 	  dst = omp_build_component_ref (dst, f);
8817 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8818 	  append_to_statement_list (t, &list);
8819 	  n = splay_tree_lookup (ctx->field_map,
8820 				 (splay_tree_key) TREE_OPERAND (ind, 0));
8821 	  df = (tree) n->value;
8822 	  df = *tcctx.cb.decl_map->get (df);
8823 	  ptr = build_simple_mem_ref_loc (loc, arg);
8824 	  ptr = omp_build_component_ref (ptr, df);
8825 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
8826 		      build_fold_addr_expr_loc (loc, dst));
8827 	  append_to_statement_list (t, &list);
8828 	}
8829 
8830   t = build1 (RETURN_EXPR, void_type_node, NULL);
8831   append_to_statement_list (t, &list);
8832 
8833   if (tcctx.cb.decl_map)
8834     delete tcctx.cb.decl_map;
8835   pop_gimplify_context (NULL);
8836   BIND_EXPR_BODY (bind) = list;
8837   pop_cfun ();
8838 }
8839 
8840 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)8841 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
8842 {
8843   tree c, clauses;
8844   gimple *g;
8845   size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
8846 
8847   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
8848   gcc_assert (clauses);
8849   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8850     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8851       switch (OMP_CLAUSE_DEPEND_KIND (c))
8852 	{
8853 	case OMP_CLAUSE_DEPEND_LAST:
8854 	  /* Lowering already done at gimplification.  */
8855 	  return;
8856 	case OMP_CLAUSE_DEPEND_IN:
8857 	  cnt[2]++;
8858 	  break;
8859 	case OMP_CLAUSE_DEPEND_OUT:
8860 	case OMP_CLAUSE_DEPEND_INOUT:
8861 	  cnt[0]++;
8862 	  break;
8863 	case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8864 	  cnt[1]++;
8865 	  break;
8866 	case OMP_CLAUSE_DEPEND_DEPOBJ:
8867 	  cnt[3]++;
8868 	  break;
8869 	case OMP_CLAUSE_DEPEND_SOURCE:
8870 	case OMP_CLAUSE_DEPEND_SINK:
8871 	  /* FALLTHRU */
8872 	default:
8873 	  gcc_unreachable ();
8874 	}
8875   if (cnt[1] || cnt[3])
8876     idx = 5;
8877   size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
8878   tree type = build_array_type_nelts (ptr_type_node, total + idx);
8879   tree array = create_tmp_var (type);
8880   TREE_ADDRESSABLE (array) = 1;
8881   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8882 		   NULL_TREE);
8883   if (idx == 5)
8884     {
8885       g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
8886       gimple_seq_add_stmt (iseq, g);
8887       r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8888 		  NULL_TREE);
8889     }
8890   g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
8891   gimple_seq_add_stmt (iseq, g);
8892   for (i = 0; i < (idx == 5 ? 3 : 1); i++)
8893     {
8894       r = build4 (ARRAY_REF, ptr_type_node, array,
8895 		  size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
8896       g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
8897       gimple_seq_add_stmt (iseq, g);
8898     }
8899   for (i = 0; i < 4; i++)
8900     {
8901       if (cnt[i] == 0)
8902 	continue;
8903       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8904 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
8905 	  continue;
8906 	else
8907 	  {
8908 	    switch (OMP_CLAUSE_DEPEND_KIND (c))
8909 	      {
8910 	      case OMP_CLAUSE_DEPEND_IN:
8911 		if (i != 2)
8912 		  continue;
8913 		break;
8914 	      case OMP_CLAUSE_DEPEND_OUT:
8915 	      case OMP_CLAUSE_DEPEND_INOUT:
8916 		if (i != 0)
8917 		  continue;
8918 		break;
8919 	      case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8920 		if (i != 1)
8921 		  continue;
8922 		break;
8923 	      case OMP_CLAUSE_DEPEND_DEPOBJ:
8924 		if (i != 3)
8925 		  continue;
8926 		break;
8927 	      default:
8928 		gcc_unreachable ();
8929 	      }
8930 	    tree t = OMP_CLAUSE_DECL (c);
8931 	    t = fold_convert (ptr_type_node, t);
8932 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
8933 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
8934 			NULL_TREE, NULL_TREE);
8935 	    g = gimple_build_assign (r, t);
8936 	    gimple_seq_add_stmt (iseq, g);
8937 	  }
8938     }
8939   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8940   OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8941   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8942   OMP_CLAUSE_CHAIN (c) = *pclauses;
8943   *pclauses = c;
8944   tree clobber = build_constructor (type, NULL);
8945   TREE_THIS_VOLATILE (clobber) = 1;
8946   g = gimple_build_assign (array, clobber);
8947   gimple_seq_add_stmt (oseq, g);
8948 }
8949 
8950 /* Lower the OpenMP parallel or task directive in the current statement
8951    in GSI_P.  CTX holds context information for the directive.  */
8952 
8953 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)8954 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8955 {
8956   tree clauses;
8957   tree child_fn, t;
8958   gimple *stmt = gsi_stmt (*gsi_p);
8959   gbind *par_bind, *bind, *dep_bind = NULL;
8960   gimple_seq par_body;
8961   location_t loc = gimple_location (stmt);
8962 
8963   clauses = gimple_omp_taskreg_clauses (stmt);
8964   if (gimple_code (stmt) == GIMPLE_OMP_TASK
8965       && gimple_omp_task_taskwait_p (stmt))
8966     {
8967       par_bind = NULL;
8968       par_body = NULL;
8969     }
8970   else
8971     {
8972       par_bind
8973 	= as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
8974       par_body = gimple_bind_body (par_bind);
8975     }
8976   child_fn = ctx->cb.dst_fn;
8977   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8978       && !gimple_omp_parallel_combined_p (stmt))
8979     {
8980       struct walk_stmt_info wi;
8981       int ws_num = 0;
8982 
8983       memset (&wi, 0, sizeof (wi));
8984       wi.info = &ws_num;
8985       wi.val_only = true;
8986       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
8987       if (ws_num == 1)
8988 	gimple_omp_parallel_set_combined_p (stmt, true);
8989     }
8990   gimple_seq dep_ilist = NULL;
8991   gimple_seq dep_olist = NULL;
8992   if (gimple_code (stmt) == GIMPLE_OMP_TASK
8993       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
8994     {
8995       push_gimplify_context ();
8996       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8997       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
8998 			    &dep_ilist, &dep_olist);
8999     }
9000 
9001   if (gimple_code (stmt) == GIMPLE_OMP_TASK
9002       && gimple_omp_task_taskwait_p (stmt))
9003     {
9004       if (dep_bind)
9005 	{
9006 	  gsi_replace (gsi_p, dep_bind, true);
9007 	  gimple_bind_add_seq (dep_bind, dep_ilist);
9008 	  gimple_bind_add_stmt (dep_bind, stmt);
9009 	  gimple_bind_add_seq (dep_bind, dep_olist);
9010 	  pop_gimplify_context (dep_bind);
9011 	}
9012       return;
9013     }
9014 
9015   if (ctx->srecord_type)
9016     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9017 
9018   gimple_seq tskred_ilist = NULL;
9019   gimple_seq tskred_olist = NULL;
9020   if ((is_task_ctx (ctx)
9021        && gimple_omp_task_taskloop_p (ctx->stmt)
9022        && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
9023 			   OMP_CLAUSE_REDUCTION))
9024       || (is_parallel_ctx (ctx)
9025 	  && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9026 			      OMP_CLAUSE__REDUCTEMP_)))
9027     {
9028       if (dep_bind == NULL)
9029 	{
9030 	  push_gimplify_context ();
9031 	  dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9032 	}
9033       lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
9034 							: OMP_PARALLEL,
9035 				 gimple_omp_taskreg_clauses (ctx->stmt),
9036 				 &tskred_ilist, &tskred_olist);
9037     }
9038 
9039   push_gimplify_context ();
9040 
9041   gimple_seq par_olist = NULL;
9042   gimple_seq par_ilist = NULL;
9043   gimple_seq par_rlist = NULL;
9044   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9045     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
9046   if (phony_construct && ctx->record_type)
9047     {
9048       gcc_checking_assert (!ctx->receiver_decl);
9049       ctx->receiver_decl = create_tmp_var
9050 	(build_reference_type (ctx->record_type), ".omp_rec");
9051     }
9052   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
9053   lower_omp (&par_body, ctx);
9054   if (gimple_code (stmt) != GIMPLE_OMP_TASK)
9055     lower_reduction_clauses (clauses, &par_rlist, ctx);
9056 
9057   /* Declare all the variables created by mapping and the variables
9058      declared in the scope of the parallel body.  */
9059   record_vars_into (ctx->block_vars, child_fn);
9060   maybe_remove_omp_member_access_dummy_vars (par_bind);
9061   record_vars_into (gimple_bind_vars (par_bind), child_fn);
9062 
9063   if (ctx->record_type)
9064     {
9065       ctx->sender_decl
9066 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9067 			  : ctx->record_type, ".omp_data_o");
9068       DECL_NAMELESS (ctx->sender_decl) = 1;
9069       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9070       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
9071     }
9072 
9073   gimple_seq olist = NULL;
9074   gimple_seq ilist = NULL;
9075   lower_send_clauses (clauses, &ilist, &olist, ctx);
9076   lower_send_shared_vars (&ilist, &olist, ctx);
9077 
9078   if (ctx->record_type)
9079     {
9080       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9081       TREE_THIS_VOLATILE (clobber) = 1;
9082       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9083 							clobber));
9084     }
9085 
9086   /* Once all the expansions are done, sequence all the different
9087      fragments inside gimple_omp_body.  */
9088 
9089   gimple_seq new_body = NULL;
9090 
9091   if (ctx->record_type)
9092     {
9093       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9094       /* fixup_child_record_type might have changed receiver_decl's type.  */
9095       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9096       gimple_seq_add_stmt (&new_body,
9097 	  		   gimple_build_assign (ctx->receiver_decl, t));
9098     }
9099 
9100   gimple_seq_add_seq (&new_body, par_ilist);
9101   gimple_seq_add_seq (&new_body, par_body);
9102   gimple_seq_add_seq (&new_body, par_rlist);
9103   if (ctx->cancellable)
9104     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9105   gimple_seq_add_seq (&new_body, par_olist);
9106   new_body = maybe_catch_exception (new_body);
9107   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9108     gimple_seq_add_stmt (&new_body,
9109 			 gimple_build_omp_continue (integer_zero_node,
9110 						    integer_zero_node));
9111   if (!phony_construct)
9112     {
9113       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9114       gimple_omp_set_body (stmt, new_body);
9115     }
9116 
9117   if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9118     bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9119   else
9120     bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
9121   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9122   gimple_bind_add_seq (bind, ilist);
9123   if (!phony_construct)
9124     gimple_bind_add_stmt (bind, stmt);
9125   else
9126     gimple_bind_add_seq (bind, new_body);
9127   gimple_bind_add_seq (bind, olist);
9128 
9129   pop_gimplify_context (NULL);
9130 
9131   if (dep_bind)
9132     {
9133       gimple_bind_add_seq (dep_bind, dep_ilist);
9134       gimple_bind_add_seq (dep_bind, tskred_ilist);
9135       gimple_bind_add_stmt (dep_bind, bind);
9136       gimple_bind_add_seq (dep_bind, tskred_olist);
9137       gimple_bind_add_seq (dep_bind, dep_olist);
9138       pop_gimplify_context (dep_bind);
9139     }
9140 }
9141 
9142 /* Lower the GIMPLE_OMP_TARGET in the current statement
9143    in GSI_P.  CTX holds context information for the directive.  */
9144 
9145 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)9146 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9147 {
9148   tree clauses;
9149   tree child_fn, t, c;
9150   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9151   gbind *tgt_bind, *bind, *dep_bind = NULL;
9152   gimple_seq tgt_body, olist, ilist, fplist, new_body;
9153   location_t loc = gimple_location (stmt);
9154   bool offloaded, data_region;
9155   unsigned int map_cnt = 0;
9156 
9157   offloaded = is_gimple_omp_offloaded (stmt);
9158   switch (gimple_omp_target_kind (stmt))
9159     {
9160     case GF_OMP_TARGET_KIND_REGION:
9161     case GF_OMP_TARGET_KIND_UPDATE:
9162     case GF_OMP_TARGET_KIND_ENTER_DATA:
9163     case GF_OMP_TARGET_KIND_EXIT_DATA:
9164     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9165     case GF_OMP_TARGET_KIND_OACC_KERNELS:
9166     case GF_OMP_TARGET_KIND_OACC_UPDATE:
9167     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9168     case GF_OMP_TARGET_KIND_OACC_DECLARE:
9169       data_region = false;
9170       break;
9171     case GF_OMP_TARGET_KIND_DATA:
9172     case GF_OMP_TARGET_KIND_OACC_DATA:
9173     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9174       data_region = true;
9175       break;
9176     default:
9177       gcc_unreachable ();
9178     }
9179 
9180   clauses = gimple_omp_target_clauses (stmt);
9181 
9182   gimple_seq dep_ilist = NULL;
9183   gimple_seq dep_olist = NULL;
9184   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9185     {
9186       push_gimplify_context ();
9187       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9188       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9189 			    &dep_ilist, &dep_olist);
9190     }
9191 
9192   tgt_bind = NULL;
9193   tgt_body = NULL;
9194   if (offloaded)
9195     {
9196       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9197       tgt_body = gimple_bind_body (tgt_bind);
9198     }
9199   else if (data_region)
9200     tgt_body = gimple_omp_body (stmt);
9201   child_fn = ctx->cb.dst_fn;
9202 
9203   push_gimplify_context ();
9204   fplist = NULL;
9205 
9206   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9207     switch (OMP_CLAUSE_CODE (c))
9208       {
9209 	tree var, x;
9210 
9211       default:
9212 	break;
9213       case OMP_CLAUSE_MAP:
9214 #if CHECKING_P
9215 	/* First check what we're prepared to handle in the following.  */
9216 	switch (OMP_CLAUSE_MAP_KIND (c))
9217 	  {
9218 	  case GOMP_MAP_ALLOC:
9219 	  case GOMP_MAP_TO:
9220 	  case GOMP_MAP_FROM:
9221 	  case GOMP_MAP_TOFROM:
9222 	  case GOMP_MAP_POINTER:
9223 	  case GOMP_MAP_TO_PSET:
9224 	  case GOMP_MAP_DELETE:
9225 	  case GOMP_MAP_RELEASE:
9226 	  case GOMP_MAP_ALWAYS_TO:
9227 	  case GOMP_MAP_ALWAYS_FROM:
9228 	  case GOMP_MAP_ALWAYS_TOFROM:
9229 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
9230 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9231 	  case GOMP_MAP_STRUCT:
9232 	  case GOMP_MAP_ALWAYS_POINTER:
9233 	    break;
9234 	  case GOMP_MAP_FORCE_ALLOC:
9235 	  case GOMP_MAP_FORCE_TO:
9236 	  case GOMP_MAP_FORCE_FROM:
9237 	  case GOMP_MAP_FORCE_TOFROM:
9238 	  case GOMP_MAP_FORCE_PRESENT:
9239 	  case GOMP_MAP_FORCE_DEVICEPTR:
9240 	  case GOMP_MAP_DEVICE_RESIDENT:
9241 	  case GOMP_MAP_LINK:
9242 	    gcc_assert (is_gimple_omp_oacc (stmt));
9243 	    break;
9244 	  default:
9245 	    gcc_unreachable ();
9246 	  }
9247 #endif
9248 	  /* FALLTHRU */
9249       case OMP_CLAUSE_TO:
9250       case OMP_CLAUSE_FROM:
9251       oacc_firstprivate:
9252 	var = OMP_CLAUSE_DECL (c);
9253 	if (!DECL_P (var))
9254 	  {
9255 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9256 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9257 		    && (OMP_CLAUSE_MAP_KIND (c)
9258 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
9259 	      map_cnt++;
9260 	    continue;
9261 	  }
9262 
9263 	if (DECL_SIZE (var)
9264 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9265 	  {
9266 	    tree var2 = DECL_VALUE_EXPR (var);
9267 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9268 	    var2 = TREE_OPERAND (var2, 0);
9269 	    gcc_assert (DECL_P (var2));
9270 	    var = var2;
9271 	  }
9272 
9273 	if (offloaded
9274 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9275 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9276 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9277 	  {
9278 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9279 	      {
9280 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9281 		    && varpool_node::get_create (var)->offloadable)
9282 		  continue;
9283 
9284 		tree type = build_pointer_type (TREE_TYPE (var));
9285 		tree new_var = lookup_decl (var, ctx);
9286 		x = create_tmp_var_raw (type, get_name (new_var));
9287 		gimple_add_tmp_var (x);
9288 		x = build_simple_mem_ref (x);
9289 		SET_DECL_VALUE_EXPR (new_var, x);
9290 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9291 	      }
9292 	    continue;
9293 	  }
9294 
9295 	if (!maybe_lookup_field (var, ctx))
9296 	  continue;
9297 
9298 	/* Don't remap oacc parallel reduction variables, because the
9299 	   intermediate result must be local to each gang.  */
9300 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9301 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9302 	  {
9303 	    x = build_receiver_ref (var, true, ctx);
9304 	    tree new_var = lookup_decl (var, ctx);
9305 
9306 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9307 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9308 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9309 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9310 	      x = build_simple_mem_ref (x);
9311 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9312 	      {
9313 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9314 		if (omp_is_reference (new_var)
9315 		    && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
9316 		  {
9317 		    /* Create a local object to hold the instance
9318 		       value.  */
9319 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
9320 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9321 		    tree inst = create_tmp_var (type, id);
9322 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9323 		    x = build_fold_addr_expr (inst);
9324 		  }
9325 		gimplify_assign (new_var, x, &fplist);
9326 	      }
9327 	    else if (DECL_P (new_var))
9328 	      {
9329 		SET_DECL_VALUE_EXPR (new_var, x);
9330 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9331 	      }
9332 	    else
9333 	      gcc_unreachable ();
9334 	  }
9335 	map_cnt++;
9336 	break;
9337 
9338       case OMP_CLAUSE_FIRSTPRIVATE:
9339 	if (is_oacc_parallel (ctx))
9340 	  goto oacc_firstprivate;
9341 	map_cnt++;
9342 	var = OMP_CLAUSE_DECL (c);
9343 	if (!omp_is_reference (var)
9344 	    && !is_gimple_reg_type (TREE_TYPE (var)))
9345 	  {
9346 	    tree new_var = lookup_decl (var, ctx);
9347 	    if (is_variable_sized (var))
9348 	      {
9349 		tree pvar = DECL_VALUE_EXPR (var);
9350 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9351 		pvar = TREE_OPERAND (pvar, 0);
9352 		gcc_assert (DECL_P (pvar));
9353 		tree new_pvar = lookup_decl (pvar, ctx);
9354 		x = build_fold_indirect_ref (new_pvar);
9355 		TREE_THIS_NOTRAP (x) = 1;
9356 	      }
9357 	    else
9358 	      x = build_receiver_ref (var, true, ctx);
9359 	    SET_DECL_VALUE_EXPR (new_var, x);
9360 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9361 	  }
9362 	break;
9363 
9364       case OMP_CLAUSE_PRIVATE:
9365 	if (is_gimple_omp_oacc (ctx->stmt))
9366 	  break;
9367 	var = OMP_CLAUSE_DECL (c);
9368 	if (is_variable_sized (var))
9369 	  {
9370 	    tree new_var = lookup_decl (var, ctx);
9371 	    tree pvar = DECL_VALUE_EXPR (var);
9372 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9373 	    pvar = TREE_OPERAND (pvar, 0);
9374 	    gcc_assert (DECL_P (pvar));
9375 	    tree new_pvar = lookup_decl (pvar, ctx);
9376 	    x = build_fold_indirect_ref (new_pvar);
9377 	    TREE_THIS_NOTRAP (x) = 1;
9378 	    SET_DECL_VALUE_EXPR (new_var, x);
9379 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9380 	  }
9381 	break;
9382 
9383       case OMP_CLAUSE_USE_DEVICE_PTR:
9384       case OMP_CLAUSE_IS_DEVICE_PTR:
9385 	var = OMP_CLAUSE_DECL (c);
9386 	map_cnt++;
9387 	if (is_variable_sized (var))
9388 	  {
9389 	    tree new_var = lookup_decl (var, ctx);
9390 	    tree pvar = DECL_VALUE_EXPR (var);
9391 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9392 	    pvar = TREE_OPERAND (pvar, 0);
9393 	    gcc_assert (DECL_P (pvar));
9394 	    tree new_pvar = lookup_decl (pvar, ctx);
9395 	    x = build_fold_indirect_ref (new_pvar);
9396 	    TREE_THIS_NOTRAP (x) = 1;
9397 	    SET_DECL_VALUE_EXPR (new_var, x);
9398 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9399 	  }
9400 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9401 	  {
9402 	    tree new_var = lookup_decl (var, ctx);
9403 	    tree type = build_pointer_type (TREE_TYPE (var));
9404 	    x = create_tmp_var_raw (type, get_name (new_var));
9405 	    gimple_add_tmp_var (x);
9406 	    x = build_simple_mem_ref (x);
9407 	    SET_DECL_VALUE_EXPR (new_var, x);
9408 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9409 	  }
9410 	else
9411 	  {
9412 	    tree new_var = lookup_decl (var, ctx);
9413 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9414 	    gimple_add_tmp_var (x);
9415 	    SET_DECL_VALUE_EXPR (new_var, x);
9416 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9417 	  }
9418 	break;
9419       }
9420 
9421   if (offloaded)
9422     {
9423       target_nesting_level++;
9424       lower_omp (&tgt_body, ctx);
9425       target_nesting_level--;
9426     }
9427   else if (data_region)
9428     lower_omp (&tgt_body, ctx);
9429 
9430   if (offloaded)
9431     {
9432       /* Declare all the variables created by mapping and the variables
9433 	 declared in the scope of the target body.  */
9434       record_vars_into (ctx->block_vars, child_fn);
9435       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
9436       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9437     }
9438 
9439   olist = NULL;
9440   ilist = NULL;
9441   if (ctx->record_type)
9442     {
9443       ctx->sender_decl
9444 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
9445       DECL_NAMELESS (ctx->sender_decl) = 1;
9446       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9447       t = make_tree_vec (3);
9448       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9449       TREE_VEC_ELT (t, 1)
9450 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9451 			  ".omp_data_sizes");
9452       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9453       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9454       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9455       tree tkind_type = short_unsigned_type_node;
9456       int talign_shift = 8;
9457       TREE_VEC_ELT (t, 2)
9458 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9459 			  ".omp_data_kinds");
9460       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9461       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9462       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9463       gimple_omp_target_set_data_arg (stmt, t);
9464 
9465       vec<constructor_elt, va_gc> *vsize;
9466       vec<constructor_elt, va_gc> *vkind;
9467       vec_alloc (vsize, map_cnt);
9468       vec_alloc (vkind, map_cnt);
9469       unsigned int map_idx = 0;
9470 
9471       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9472 	switch (OMP_CLAUSE_CODE (c))
9473 	  {
9474 	    tree ovar, nc, s, purpose, var, x, type;
9475 	    unsigned int talign;
9476 
9477 	  default:
9478 	    break;
9479 
9480 	  case OMP_CLAUSE_MAP:
9481 	  case OMP_CLAUSE_TO:
9482 	  case OMP_CLAUSE_FROM:
9483 	  oacc_firstprivate_map:
9484 	    nc = c;
9485 	    ovar = OMP_CLAUSE_DECL (c);
9486 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9487 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9488 		    || (OMP_CLAUSE_MAP_KIND (c)
9489 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9490 	      break;
9491 	    if (!DECL_P (ovar))
9492 	      {
9493 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9494 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9495 		  {
9496 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9497 					 == get_base_address (ovar));
9498 		    nc = OMP_CLAUSE_CHAIN (c);
9499 		    ovar = OMP_CLAUSE_DECL (nc);
9500 		  }
9501 		else
9502 		  {
9503 		    tree x = build_sender_ref (ovar, ctx);
9504 		    tree v
9505 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9506 		    gimplify_assign (x, v, &ilist);
9507 		    nc = NULL_TREE;
9508 		  }
9509 	      }
9510 	    else
9511 	      {
9512 		if (DECL_SIZE (ovar)
9513 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9514 		  {
9515 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
9516 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9517 		    ovar2 = TREE_OPERAND (ovar2, 0);
9518 		    gcc_assert (DECL_P (ovar2));
9519 		    ovar = ovar2;
9520 		  }
9521 		if (!maybe_lookup_field (ovar, ctx))
9522 		  continue;
9523 	      }
9524 
9525 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9526 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9527 	      talign = DECL_ALIGN_UNIT (ovar);
9528 	    if (nc)
9529 	      {
9530 		var = lookup_decl_in_outer_ctx (ovar, ctx);
9531 		x = build_sender_ref (ovar, ctx);
9532 
9533 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9534 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9535 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9536 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9537 		  {
9538 		    gcc_assert (offloaded);
9539 		    tree avar
9540 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9541 		    mark_addressable (avar);
9542 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9543 		    talign = DECL_ALIGN_UNIT (avar);
9544 		    avar = build_fold_addr_expr (avar);
9545 		    gimplify_assign (x, avar, &ilist);
9546 		  }
9547 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9548 		  {
9549 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9550 		    if (!omp_is_reference (var))
9551 		      {
9552 			if (is_gimple_reg (var)
9553 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9554 			  TREE_NO_WARNING (var) = 1;
9555 			var = build_fold_addr_expr (var);
9556 		      }
9557 		    else
9558 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9559 		    gimplify_assign (x, var, &ilist);
9560 		  }
9561 		else if (is_gimple_reg (var))
9562 		  {
9563 		    gcc_assert (offloaded);
9564 		    tree avar = create_tmp_var (TREE_TYPE (var));
9565 		    mark_addressable (avar);
9566 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9567 		    if (GOMP_MAP_COPY_TO_P (map_kind)
9568 			|| map_kind == GOMP_MAP_POINTER
9569 			|| map_kind == GOMP_MAP_TO_PSET
9570 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9571 		      {
9572 			/* If we need to initialize a temporary
9573 			   with VAR because it is not addressable, and
9574 			   the variable hasn't been initialized yet, then
9575 			   we'll get a warning for the store to avar.
9576 			   Don't warn in that case, the mapping might
9577 			   be implicit.  */
9578 			TREE_NO_WARNING (var) = 1;
9579 			gimplify_assign (avar, var, &ilist);
9580 		      }
9581 		    avar = build_fold_addr_expr (avar);
9582 		    gimplify_assign (x, avar, &ilist);
9583 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
9584 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9585 			&& !TYPE_READONLY (TREE_TYPE (var)))
9586 		      {
9587 			x = unshare_expr (x);
9588 			x = build_simple_mem_ref (x);
9589 			gimplify_assign (var, x, &olist);
9590 		      }
9591 		  }
9592 		else
9593 		  {
9594 		    var = build_fold_addr_expr (var);
9595 		    gimplify_assign (x, var, &ilist);
9596 		  }
9597 	      }
9598 	    s = NULL_TREE;
9599 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9600 	      {
9601 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9602 		s = TREE_TYPE (ovar);
9603 		if (TREE_CODE (s) == REFERENCE_TYPE)
9604 		  s = TREE_TYPE (s);
9605 		s = TYPE_SIZE_UNIT (s);
9606 	      }
9607 	    else
9608 	      s = OMP_CLAUSE_SIZE (c);
9609 	    if (s == NULL_TREE)
9610 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9611 	    s = fold_convert (size_type_node, s);
9612 	    purpose = size_int (map_idx++);
9613 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9614 	    if (TREE_CODE (s) != INTEGER_CST)
9615 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9616 
9617 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
9618 	    switch (OMP_CLAUSE_CODE (c))
9619 	      {
9620 	      case OMP_CLAUSE_MAP:
9621 		tkind = OMP_CLAUSE_MAP_KIND (c);
9622 		tkind_zero = tkind;
9623 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9624 		  switch (tkind)
9625 		    {
9626 		    case GOMP_MAP_ALLOC:
9627 		    case GOMP_MAP_TO:
9628 		    case GOMP_MAP_FROM:
9629 		    case GOMP_MAP_TOFROM:
9630 		    case GOMP_MAP_ALWAYS_TO:
9631 		    case GOMP_MAP_ALWAYS_FROM:
9632 		    case GOMP_MAP_ALWAYS_TOFROM:
9633 		    case GOMP_MAP_RELEASE:
9634 		    case GOMP_MAP_FORCE_TO:
9635 		    case GOMP_MAP_FORCE_FROM:
9636 		    case GOMP_MAP_FORCE_TOFROM:
9637 		    case GOMP_MAP_FORCE_PRESENT:
9638 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
9639 		      break;
9640 		    case GOMP_MAP_DELETE:
9641 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
9642 		    default:
9643 		      break;
9644 		    }
9645 		if (tkind_zero != tkind)
9646 		  {
9647 		    if (integer_zerop (s))
9648 		      tkind = tkind_zero;
9649 		    else if (integer_nonzerop (s))
9650 		      tkind_zero = tkind;
9651 		  }
9652 		break;
9653 	      case OMP_CLAUSE_FIRSTPRIVATE:
9654 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9655 		tkind = GOMP_MAP_TO;
9656 		tkind_zero = tkind;
9657 		break;
9658 	      case OMP_CLAUSE_TO:
9659 		tkind = GOMP_MAP_TO;
9660 		tkind_zero = tkind;
9661 		break;
9662 	      case OMP_CLAUSE_FROM:
9663 		tkind = GOMP_MAP_FROM;
9664 		tkind_zero = tkind;
9665 		break;
9666 	      default:
9667 		gcc_unreachable ();
9668 	      }
9669 	    gcc_checking_assert (tkind
9670 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9671 	    gcc_checking_assert (tkind_zero
9672 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9673 	    talign = ceil_log2 (talign);
9674 	    tkind |= talign << talign_shift;
9675 	    tkind_zero |= talign << talign_shift;
9676 	    gcc_checking_assert (tkind
9677 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9678 	    gcc_checking_assert (tkind_zero
9679 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9680 	    if (tkind == tkind_zero)
9681 	      x = build_int_cstu (tkind_type, tkind);
9682 	    else
9683 	      {
9684 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
9685 		x = build3 (COND_EXPR, tkind_type,
9686 			    fold_build2 (EQ_EXPR, boolean_type_node,
9687 					 unshare_expr (s), size_zero_node),
9688 			    build_int_cstu (tkind_type, tkind_zero),
9689 			    build_int_cstu (tkind_type, tkind));
9690 	      }
9691 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
9692 	    if (nc && nc != c)
9693 	      c = nc;
9694 	    break;
9695 
9696 	  case OMP_CLAUSE_FIRSTPRIVATE:
9697 	    if (is_oacc_parallel (ctx))
9698 	      goto oacc_firstprivate_map;
9699 	    ovar = OMP_CLAUSE_DECL (c);
9700 	    if (omp_is_reference (ovar))
9701 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9702 	    else
9703 	      talign = DECL_ALIGN_UNIT (ovar);
9704 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
9705 	    x = build_sender_ref (ovar, ctx);
9706 	    tkind = GOMP_MAP_FIRSTPRIVATE;
9707 	    type = TREE_TYPE (ovar);
9708 	    if (omp_is_reference (ovar))
9709 	      type = TREE_TYPE (type);
9710 	    if ((INTEGRAL_TYPE_P (type)
9711 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
9712 		|| TREE_CODE (type) == POINTER_TYPE)
9713 	      {
9714 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9715 		tree t = var;
9716 		if (omp_is_reference (var))
9717 		  t = build_simple_mem_ref (var);
9718 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9719 		  TREE_NO_WARNING (var) = 1;
9720 		if (TREE_CODE (type) != POINTER_TYPE)
9721 		  t = fold_convert (pointer_sized_int_node, t);
9722 		t = fold_convert (TREE_TYPE (x), t);
9723 		gimplify_assign (x, t, &ilist);
9724 	      }
9725 	    else if (omp_is_reference (var))
9726 	      gimplify_assign (x, var, &ilist);
9727 	    else if (is_gimple_reg (var))
9728 	      {
9729 		tree avar = create_tmp_var (TREE_TYPE (var));
9730 		mark_addressable (avar);
9731 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9732 		  TREE_NO_WARNING (var) = 1;
9733 		gimplify_assign (avar, var, &ilist);
9734 		avar = build_fold_addr_expr (avar);
9735 		gimplify_assign (x, avar, &ilist);
9736 	      }
9737 	    else
9738 	      {
9739 		var = build_fold_addr_expr (var);
9740 		gimplify_assign (x, var, &ilist);
9741 	      }
9742 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
9743 	      s = size_int (0);
9744 	    else if (omp_is_reference (ovar))
9745 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9746 	    else
9747 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9748 	    s = fold_convert (size_type_node, s);
9749 	    purpose = size_int (map_idx++);
9750 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9751 	    if (TREE_CODE (s) != INTEGER_CST)
9752 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9753 
9754 	    gcc_checking_assert (tkind
9755 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9756 	    talign = ceil_log2 (talign);
9757 	    tkind |= talign << talign_shift;
9758 	    gcc_checking_assert (tkind
9759 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9760 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9761 				    build_int_cstu (tkind_type, tkind));
9762 	    break;
9763 
9764 	  case OMP_CLAUSE_USE_DEVICE_PTR:
9765 	  case OMP_CLAUSE_IS_DEVICE_PTR:
9766 	    ovar = OMP_CLAUSE_DECL (c);
9767 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
9768 	    x = build_sender_ref (ovar, ctx);
9769 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9770 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
9771 	    else
9772 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9773 	    type = TREE_TYPE (ovar);
9774 	    if (TREE_CODE (type) == ARRAY_TYPE)
9775 	      var = build_fold_addr_expr (var);
9776 	    else
9777 	      {
9778 		if (omp_is_reference (ovar))
9779 		  {
9780 		    type = TREE_TYPE (type);
9781 		    if (TREE_CODE (type) != ARRAY_TYPE)
9782 		      var = build_simple_mem_ref (var);
9783 		    var = fold_convert (TREE_TYPE (x), var);
9784 		  }
9785 	      }
9786 	    gimplify_assign (x, var, &ilist);
9787 	    s = size_int (0);
9788 	    purpose = size_int (map_idx++);
9789 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9790 	    gcc_checking_assert (tkind
9791 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9792 	    gcc_checking_assert (tkind
9793 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9794 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9795 				    build_int_cstu (tkind_type, tkind));
9796 	    break;
9797 	  }
9798 
9799       gcc_assert (map_idx == map_cnt);
9800 
9801       DECL_INITIAL (TREE_VEC_ELT (t, 1))
9802 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9803       DECL_INITIAL (TREE_VEC_ELT (t, 2))
9804 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9805       for (int i = 1; i <= 2; i++)
9806 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
9807 	  {
9808 	    gimple_seq initlist = NULL;
9809 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9810 					  TREE_VEC_ELT (t, i)),
9811 				  &initlist, true, NULL_TREE);
9812 	    gimple_seq_add_seq (&ilist, initlist);
9813 
9814 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
9815 					      NULL);
9816 	    TREE_THIS_VOLATILE (clobber) = 1;
9817 	    gimple_seq_add_stmt (&olist,
9818 				 gimple_build_assign (TREE_VEC_ELT (t, i),
9819 						      clobber));
9820 	  }
9821 
9822       tree clobber = build_constructor (ctx->record_type, NULL);
9823       TREE_THIS_VOLATILE (clobber) = 1;
9824       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9825 							clobber));
9826     }
9827 
9828   /* Once all the expansions are done, sequence all the different
9829      fragments inside gimple_omp_body.  */
9830 
9831   new_body = NULL;
9832 
9833   if (offloaded
9834       && ctx->record_type)
9835     {
9836       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9837       /* fixup_child_record_type might have changed receiver_decl's type.  */
9838       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9839       gimple_seq_add_stmt (&new_body,
9840 	  		   gimple_build_assign (ctx->receiver_decl, t));
9841     }
9842   gimple_seq_add_seq (&new_body, fplist);
9843 
9844   if (offloaded || data_region)
9845     {
9846       tree prev = NULL_TREE;
9847       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9848 	switch (OMP_CLAUSE_CODE (c))
9849 	  {
9850 	    tree var, x;
9851 	  default:
9852 	    break;
9853 	  case OMP_CLAUSE_FIRSTPRIVATE:
9854 	    if (is_gimple_omp_oacc (ctx->stmt))
9855 	      break;
9856 	    var = OMP_CLAUSE_DECL (c);
9857 	    if (omp_is_reference (var)
9858 		|| is_gimple_reg_type (TREE_TYPE (var)))
9859 	      {
9860 		tree new_var = lookup_decl (var, ctx);
9861 		tree type;
9862 		type = TREE_TYPE (var);
9863 		if (omp_is_reference (var))
9864 		  type = TREE_TYPE (type);
9865 		if ((INTEGRAL_TYPE_P (type)
9866 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
9867 		    || TREE_CODE (type) == POINTER_TYPE)
9868 		  {
9869 		    x = build_receiver_ref (var, false, ctx);
9870 		    if (TREE_CODE (type) != POINTER_TYPE)
9871 		      x = fold_convert (pointer_sized_int_node, x);
9872 		    x = fold_convert (type, x);
9873 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9874 				   fb_rvalue);
9875 		    if (omp_is_reference (var))
9876 		      {
9877 			tree v = create_tmp_var_raw (type, get_name (var));
9878 			gimple_add_tmp_var (v);
9879 			TREE_ADDRESSABLE (v) = 1;
9880 			gimple_seq_add_stmt (&new_body,
9881 					     gimple_build_assign (v, x));
9882 			x = build_fold_addr_expr (v);
9883 		      }
9884 		    gimple_seq_add_stmt (&new_body,
9885 					 gimple_build_assign (new_var, x));
9886 		  }
9887 		else
9888 		  {
9889 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
9890 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9891 				   fb_rvalue);
9892 		    gimple_seq_add_stmt (&new_body,
9893 					 gimple_build_assign (new_var, x));
9894 		  }
9895 	      }
9896 	    else if (is_variable_sized (var))
9897 	      {
9898 		tree pvar = DECL_VALUE_EXPR (var);
9899 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9900 		pvar = TREE_OPERAND (pvar, 0);
9901 		gcc_assert (DECL_P (pvar));
9902 		tree new_var = lookup_decl (pvar, ctx);
9903 		x = build_receiver_ref (var, false, ctx);
9904 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9905 		gimple_seq_add_stmt (&new_body,
9906 				     gimple_build_assign (new_var, x));
9907 	      }
9908 	    break;
9909 	  case OMP_CLAUSE_PRIVATE:
9910 	    if (is_gimple_omp_oacc (ctx->stmt))
9911 	      break;
9912 	    var = OMP_CLAUSE_DECL (c);
9913 	    if (omp_is_reference (var))
9914 	      {
9915 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9916 		tree new_var = lookup_decl (var, ctx);
9917 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
9918 		if (TREE_CONSTANT (x))
9919 		  {
9920 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
9921 					    get_name (var));
9922 		    gimple_add_tmp_var (x);
9923 		    TREE_ADDRESSABLE (x) = 1;
9924 		    x = build_fold_addr_expr_loc (clause_loc, x);
9925 		  }
9926 		else
9927 		  break;
9928 
9929 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
9930 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9931 		gimple_seq_add_stmt (&new_body,
9932 				     gimple_build_assign (new_var, x));
9933 	      }
9934 	    break;
9935 	  case OMP_CLAUSE_USE_DEVICE_PTR:
9936 	  case OMP_CLAUSE_IS_DEVICE_PTR:
9937 	    var = OMP_CLAUSE_DECL (c);
9938 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9939 	      x = build_sender_ref (var, ctx);
9940 	    else
9941 	      x = build_receiver_ref (var, false, ctx);
9942 	    if (is_variable_sized (var))
9943 	      {
9944 		tree pvar = DECL_VALUE_EXPR (var);
9945 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9946 		pvar = TREE_OPERAND (pvar, 0);
9947 		gcc_assert (DECL_P (pvar));
9948 		tree new_var = lookup_decl (pvar, ctx);
9949 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9950 		gimple_seq_add_stmt (&new_body,
9951 				     gimple_build_assign (new_var, x));
9952 	      }
9953 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9954 	      {
9955 		tree new_var = lookup_decl (var, ctx);
9956 		new_var = DECL_VALUE_EXPR (new_var);
9957 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
9958 		new_var = TREE_OPERAND (new_var, 0);
9959 		gcc_assert (DECL_P (new_var));
9960 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9961 		gimple_seq_add_stmt (&new_body,
9962 				     gimple_build_assign (new_var, x));
9963 	      }
9964 	    else
9965 	      {
9966 		tree type = TREE_TYPE (var);
9967 		tree new_var = lookup_decl (var, ctx);
9968 		if (omp_is_reference (var))
9969 		  {
9970 		    type = TREE_TYPE (type);
9971 		    if (TREE_CODE (type) != ARRAY_TYPE)
9972 		      {
9973 			tree v = create_tmp_var_raw (type, get_name (var));
9974 			gimple_add_tmp_var (v);
9975 			TREE_ADDRESSABLE (v) = 1;
9976 			x = fold_convert (type, x);
9977 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9978 				       fb_rvalue);
9979 			gimple_seq_add_stmt (&new_body,
9980 					     gimple_build_assign (v, x));
9981 			x = build_fold_addr_expr (v);
9982 		      }
9983 		  }
9984 		new_var = DECL_VALUE_EXPR (new_var);
9985 		x = fold_convert (TREE_TYPE (new_var), x);
9986 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9987 		gimple_seq_add_stmt (&new_body,
9988 				     gimple_build_assign (new_var, x));
9989 	      }
9990 	    break;
9991 	  }
9992       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9993 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9994 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
9995 	 or references to VLAs.  */
9996       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9997 	switch (OMP_CLAUSE_CODE (c))
9998 	  {
9999 	    tree var;
10000 	  default:
10001 	    break;
10002 	  case OMP_CLAUSE_MAP:
10003 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10004 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10005 	      {
10006 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10007 		poly_int64 offset = 0;
10008 		gcc_assert (prev);
10009 		var = OMP_CLAUSE_DECL (c);
10010 		if (DECL_P (var)
10011 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
10012 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
10013 								      ctx))
10014 		    && varpool_node::get_create (var)->offloadable)
10015 		  break;
10016 		if (TREE_CODE (var) == INDIRECT_REF
10017 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
10018 		  var = TREE_OPERAND (var, 0);
10019 		if (TREE_CODE (var) == COMPONENT_REF)
10020 		  {
10021 		    var = get_addr_base_and_unit_offset (var, &offset);
10022 		    gcc_assert (var != NULL_TREE && DECL_P (var));
10023 		  }
10024 		else if (DECL_SIZE (var)
10025 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10026 		  {
10027 		    tree var2 = DECL_VALUE_EXPR (var);
10028 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10029 		    var2 = TREE_OPERAND (var2, 0);
10030 		    gcc_assert (DECL_P (var2));
10031 		    var = var2;
10032 		  }
10033 		tree new_var = lookup_decl (var, ctx), x;
10034 		tree type = TREE_TYPE (new_var);
10035 		bool is_ref;
10036 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
10037 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10038 			== COMPONENT_REF))
10039 		  {
10040 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
10041 		    is_ref = true;
10042 		    new_var = build2 (MEM_REF, type,
10043 				      build_fold_addr_expr (new_var),
10044 				      build_int_cst (build_pointer_type (type),
10045 						     offset));
10046 		  }
10047 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
10048 		  {
10049 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
10050 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
10051 		    new_var = build2 (MEM_REF, type,
10052 				      build_fold_addr_expr (new_var),
10053 				      build_int_cst (build_pointer_type (type),
10054 						     offset));
10055 		  }
10056 		else
10057 		  is_ref = omp_is_reference (var);
10058 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10059 		  is_ref = false;
10060 		bool ref_to_array = false;
10061 		if (is_ref)
10062 		  {
10063 		    type = TREE_TYPE (type);
10064 		    if (TREE_CODE (type) == ARRAY_TYPE)
10065 		      {
10066 			type = build_pointer_type (type);
10067 			ref_to_array = true;
10068 		      }
10069 		  }
10070 		else if (TREE_CODE (type) == ARRAY_TYPE)
10071 		  {
10072 		    tree decl2 = DECL_VALUE_EXPR (new_var);
10073 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
10074 		    decl2 = TREE_OPERAND (decl2, 0);
10075 		    gcc_assert (DECL_P (decl2));
10076 		    new_var = decl2;
10077 		    type = TREE_TYPE (new_var);
10078 		  }
10079 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10080 		x = fold_convert_loc (clause_loc, type, x);
10081 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10082 		  {
10083 		    tree bias = OMP_CLAUSE_SIZE (c);
10084 		    if (DECL_P (bias))
10085 		      bias = lookup_decl (bias, ctx);
10086 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
10087 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10088 					    bias);
10089 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10090 					 TREE_TYPE (x), x, bias);
10091 		  }
10092 		if (ref_to_array)
10093 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10094 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10095 		if (is_ref && !ref_to_array)
10096 		  {
10097 		    tree t = create_tmp_var_raw (type, get_name (var));
10098 		    gimple_add_tmp_var (t);
10099 		    TREE_ADDRESSABLE (t) = 1;
10100 		    gimple_seq_add_stmt (&new_body,
10101 					 gimple_build_assign (t, x));
10102 		    x = build_fold_addr_expr_loc (clause_loc, t);
10103 		  }
10104 		gimple_seq_add_stmt (&new_body,
10105 				     gimple_build_assign (new_var, x));
10106 		prev = NULL_TREE;
10107 	      }
10108 	    else if (OMP_CLAUSE_CHAIN (c)
10109 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10110 			== OMP_CLAUSE_MAP
10111 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10112 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
10113 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10114 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10115 	      prev = c;
10116 	    break;
10117 	  case OMP_CLAUSE_PRIVATE:
10118 	    var = OMP_CLAUSE_DECL (c);
10119 	    if (is_variable_sized (var))
10120 	      {
10121 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10122 		tree new_var = lookup_decl (var, ctx);
10123 		tree pvar = DECL_VALUE_EXPR (var);
10124 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10125 		pvar = TREE_OPERAND (pvar, 0);
10126 		gcc_assert (DECL_P (pvar));
10127 		tree new_pvar = lookup_decl (pvar, ctx);
10128 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10129 		tree al = size_int (DECL_ALIGN (var));
10130 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10131 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10132 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10133 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10134 		gimple_seq_add_stmt (&new_body,
10135 				     gimple_build_assign (new_pvar, x));
10136 	      }
10137 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10138 	      {
10139 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10140 		tree new_var = lookup_decl (var, ctx);
10141 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10142 		if (TREE_CONSTANT (x))
10143 		  break;
10144 		else
10145 		  {
10146 		    tree atmp
10147 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10148 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10149 		    tree al = size_int (TYPE_ALIGN (rtype));
10150 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10151 		  }
10152 
10153 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10154 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10155 		gimple_seq_add_stmt (&new_body,
10156 				     gimple_build_assign (new_var, x));
10157 	      }
10158 	    break;
10159 	  }
10160 
10161       gimple_seq fork_seq = NULL;
10162       gimple_seq join_seq = NULL;
10163 
10164       if (is_oacc_parallel (ctx))
10165 	{
10166 	  /* If there are reductions on the offloaded region itself, treat
10167 	     them as a dummy GANG loop.  */
10168 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
10169 
10170 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10171 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
10172 	}
10173 
10174       gimple_seq_add_seq (&new_body, fork_seq);
10175       gimple_seq_add_seq (&new_body, tgt_body);
10176       gimple_seq_add_seq (&new_body, join_seq);
10177 
10178       if (offloaded)
10179 	{
10180 	  new_body = maybe_catch_exception (new_body);
10181 	  gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10182 	}
10183       gimple_omp_set_body (stmt, new_body);
10184     }
10185 
10186   bind = gimple_build_bind (NULL, NULL,
10187 			    tgt_bind ? gimple_bind_block (tgt_bind)
10188 				     : NULL_TREE);
10189   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10190   gimple_bind_add_seq (bind, ilist);
10191   gimple_bind_add_stmt (bind, stmt);
10192   gimple_bind_add_seq (bind, olist);
10193 
10194   pop_gimplify_context (NULL);
10195 
10196   if (dep_bind)
10197     {
10198       gimple_bind_add_seq (dep_bind, dep_ilist);
10199       gimple_bind_add_stmt (dep_bind, bind);
10200       gimple_bind_add_seq (dep_bind, dep_olist);
10201       pop_gimplify_context (dep_bind);
10202     }
10203 }
10204 
10205 /* Expand code for an OpenMP teams directive.  */
10206 
10207 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)10208 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10209 {
10210   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10211   push_gimplify_context ();
10212 
10213   tree block = make_node (BLOCK);
10214   gbind *bind = gimple_build_bind (NULL, NULL, block);
10215   gsi_replace (gsi_p, bind, true);
10216   gimple_seq bind_body = NULL;
10217   gimple_seq dlist = NULL;
10218   gimple_seq olist = NULL;
10219 
10220   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10221 				    OMP_CLAUSE_NUM_TEAMS);
10222   if (num_teams == NULL_TREE)
10223     num_teams = build_int_cst (unsigned_type_node, 0);
10224   else
10225     {
10226       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10227       num_teams = fold_convert (unsigned_type_node, num_teams);
10228       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
10229     }
10230   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10231 				       OMP_CLAUSE_THREAD_LIMIT);
10232   if (thread_limit == NULL_TREE)
10233     thread_limit = build_int_cst (unsigned_type_node, 0);
10234   else
10235     {
10236       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10237       thread_limit = fold_convert (unsigned_type_node, thread_limit);
10238       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10239 		     fb_rvalue);
10240     }
10241 
10242   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10243 			   &bind_body, &dlist, ctx, NULL);
10244   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10245   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
10246   if (!gimple_omp_teams_grid_phony (teams_stmt))
10247     {
10248       gimple_seq_add_stmt (&bind_body, teams_stmt);
10249       location_t loc = gimple_location (teams_stmt);
10250       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10251       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10252       gimple_set_location (call, loc);
10253       gimple_seq_add_stmt (&bind_body, call);
10254     }
10255 
10256   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10257   gimple_omp_set_body (teams_stmt, NULL);
10258   gimple_seq_add_seq (&bind_body, olist);
10259   gimple_seq_add_seq (&bind_body, dlist);
10260   if (!gimple_omp_teams_grid_phony (teams_stmt))
10261     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10262   gimple_bind_set_body (bind, bind_body);
10263 
10264   pop_gimplify_context (bind);
10265 
10266   gimple_bind_append_vars (bind, ctx->block_vars);
10267   BLOCK_VARS (block) = ctx->block_vars;
10268   if (BLOCK_VARS (block))
10269     TREE_USED (block) = 1;
10270 }
10271 
10272 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
10273 
10274 static void
lower_omp_grid_body(gimple_stmt_iterator * gsi_p,omp_context * ctx)10275 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10276 {
10277   gimple *stmt = gsi_stmt (*gsi_p);
10278   lower_omp (gimple_omp_body_ptr (stmt), ctx);
10279   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10280 		       gimple_build_omp_return (false));
10281 }
10282 
10283 
10284 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
10285    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
10286    of OMP context, but with task_shared_vars set.  */
10287 
10288 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)10289 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10290     			void *data)
10291 {
10292   tree t = *tp;
10293 
10294   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
10295   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10296     return t;
10297 
10298   if (task_shared_vars
10299       && DECL_P (t)
10300       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10301     return t;
10302 
10303   /* If a global variable has been privatized, TREE_CONSTANT on
10304      ADDR_EXPR might be wrong.  */
10305   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10306     recompute_tree_invariant_for_addr_expr (t);
10307 
10308   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10309   return NULL_TREE;
10310 }
10311 
10312 /* Data to be communicated between lower_omp_regimplify_operands and
10313    lower_omp_regimplify_operands_p.  */
10314 
10315 struct lower_omp_regimplify_operands_data
10316 {
10317   omp_context *ctx;
10318   vec<tree> *decls;
10319 };
10320 
10321 /* Helper function for lower_omp_regimplify_operands.  Find
10322    omp_member_access_dummy_var vars and adjust temporarily their
10323    DECL_VALUE_EXPRs if needed.  */
10324 
10325 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)10326 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10327 				 void *data)
10328 {
10329   tree t = omp_member_access_dummy_var (*tp);
10330   if (t)
10331     {
10332       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10333       lower_omp_regimplify_operands_data *ldata
10334 	= (lower_omp_regimplify_operands_data *) wi->info;
10335       tree o = maybe_lookup_decl (t, ldata->ctx);
10336       if (o != t)
10337 	{
10338 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10339 	  ldata->decls->safe_push (*tp);
10340 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10341 	  SET_DECL_VALUE_EXPR (*tp, v);
10342 	}
10343     }
10344   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10345   return NULL_TREE;
10346 }
10347 
10348 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10349    of omp_member_access_dummy_var vars during regimplification.  */
10350 
10351 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)10352 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10353 			       gimple_stmt_iterator *gsi_p)
10354 {
10355   auto_vec<tree, 10> decls;
10356   if (ctx)
10357     {
10358       struct walk_stmt_info wi;
10359       memset (&wi, '\0', sizeof (wi));
10360       struct lower_omp_regimplify_operands_data data;
10361       data.ctx = ctx;
10362       data.decls = &decls;
10363       wi.info = &data;
10364       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10365     }
10366   gimple_regimplify_operands (stmt, gsi_p);
10367   while (!decls.is_empty ())
10368     {
10369       tree t = decls.pop ();
10370       tree v = decls.pop ();
10371       SET_DECL_VALUE_EXPR (t, v);
10372     }
10373 }
10374 
10375 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)10376 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10377 {
10378   gimple *stmt = gsi_stmt (*gsi_p);
10379   struct walk_stmt_info wi;
10380   gcall *call_stmt;
10381 
10382   if (gimple_has_location (stmt))
10383     input_location = gimple_location (stmt);
10384 
10385   if (task_shared_vars)
10386     memset (&wi, '\0', sizeof (wi));
10387 
10388   /* If we have issued syntax errors, avoid doing any heavy lifting.
10389      Just replace the OMP directives with a NOP to avoid
10390      confusing RTL expansion.  */
10391   if (seen_error () && is_gimple_omp (stmt))
10392     {
10393       gsi_replace (gsi_p, gimple_build_nop (), true);
10394       return;
10395     }
10396 
10397   switch (gimple_code (stmt))
10398     {
10399     case GIMPLE_COND:
10400       {
10401 	gcond *cond_stmt = as_a <gcond *> (stmt);
10402 	if ((ctx || task_shared_vars)
10403 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10404 			   lower_omp_regimplify_p,
10405 			   ctx ? NULL : &wi, NULL)
10406 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10407 			      lower_omp_regimplify_p,
10408 			      ctx ? NULL : &wi, NULL)))
10409 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10410       }
10411       break;
10412     case GIMPLE_CATCH:
10413       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10414       break;
10415     case GIMPLE_EH_FILTER:
10416       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10417       break;
10418     case GIMPLE_TRY:
10419       lower_omp (gimple_try_eval_ptr (stmt), ctx);
10420       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10421       break;
10422     case GIMPLE_TRANSACTION:
10423       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
10424 		 ctx);
10425       break;
10426     case GIMPLE_BIND:
10427       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
10428       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
10429       break;
10430     case GIMPLE_OMP_PARALLEL:
10431     case GIMPLE_OMP_TASK:
10432       ctx = maybe_lookup_ctx (stmt);
10433       gcc_assert (ctx);
10434       if (ctx->cancellable)
10435 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10436       lower_omp_taskreg (gsi_p, ctx);
10437       break;
10438     case GIMPLE_OMP_FOR:
10439       ctx = maybe_lookup_ctx (stmt);
10440       gcc_assert (ctx);
10441       if (ctx->cancellable)
10442 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10443       lower_omp_for (gsi_p, ctx);
10444       break;
10445     case GIMPLE_OMP_SECTIONS:
10446       ctx = maybe_lookup_ctx (stmt);
10447       gcc_assert (ctx);
10448       if (ctx->cancellable)
10449 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10450       lower_omp_sections (gsi_p, ctx);
10451       break;
10452     case GIMPLE_OMP_SINGLE:
10453       ctx = maybe_lookup_ctx (stmt);
10454       gcc_assert (ctx);
10455       lower_omp_single (gsi_p, ctx);
10456       break;
10457     case GIMPLE_OMP_MASTER:
10458       ctx = maybe_lookup_ctx (stmt);
10459       gcc_assert (ctx);
10460       lower_omp_master (gsi_p, ctx);
10461       break;
10462     case GIMPLE_OMP_TASKGROUP:
10463       ctx = maybe_lookup_ctx (stmt);
10464       gcc_assert (ctx);
10465       lower_omp_taskgroup (gsi_p, ctx);
10466       break;
10467     case GIMPLE_OMP_ORDERED:
10468       ctx = maybe_lookup_ctx (stmt);
10469       gcc_assert (ctx);
10470       lower_omp_ordered (gsi_p, ctx);
10471       break;
10472     case GIMPLE_OMP_CRITICAL:
10473       ctx = maybe_lookup_ctx (stmt);
10474       gcc_assert (ctx);
10475       lower_omp_critical (gsi_p, ctx);
10476       break;
10477     case GIMPLE_OMP_ATOMIC_LOAD:
10478       if ((ctx || task_shared_vars)
10479 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10480 			  as_a <gomp_atomic_load *> (stmt)),
10481 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10482 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10483       break;
10484     case GIMPLE_OMP_TARGET:
10485       ctx = maybe_lookup_ctx (stmt);
10486       gcc_assert (ctx);
10487       lower_omp_target (gsi_p, ctx);
10488       break;
10489     case GIMPLE_OMP_TEAMS:
10490       ctx = maybe_lookup_ctx (stmt);
10491       gcc_assert (ctx);
10492       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10493 	lower_omp_taskreg (gsi_p, ctx);
10494       else
10495 	lower_omp_teams (gsi_p, ctx);
10496       break;
10497     case GIMPLE_OMP_GRID_BODY:
10498       ctx = maybe_lookup_ctx (stmt);
10499       gcc_assert (ctx);
10500       lower_omp_grid_body (gsi_p, ctx);
10501       break;
10502     case GIMPLE_CALL:
10503       tree fndecl;
10504       call_stmt = as_a <gcall *> (stmt);
10505       fndecl = gimple_call_fndecl (call_stmt);
10506       if (fndecl
10507 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10508 	switch (DECL_FUNCTION_CODE (fndecl))
10509 	  {
10510 	  case BUILT_IN_GOMP_BARRIER:
10511 	    if (ctx == NULL)
10512 	      break;
10513 	    /* FALLTHRU */
10514 	  case BUILT_IN_GOMP_CANCEL:
10515 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
10516 	    omp_context *cctx;
10517 	    cctx = ctx;
10518 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10519 	      cctx = cctx->outer;
10520 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10521 	    if (!cctx->cancellable)
10522 	      {
10523 		if (DECL_FUNCTION_CODE (fndecl)
10524 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
10525 		  {
10526 		    stmt = gimple_build_nop ();
10527 		    gsi_replace (gsi_p, stmt, false);
10528 		  }
10529 		break;
10530 	      }
10531 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10532 	      {
10533 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10534 		gimple_call_set_fndecl (call_stmt, fndecl);
10535 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10536 	      }
10537 	    tree lhs;
10538 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10539 	    gimple_call_set_lhs (call_stmt, lhs);
10540 	    tree fallthru_label;
10541 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10542 	    gimple *g;
10543 	    g = gimple_build_label (fallthru_label);
10544 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10545 	    g = gimple_build_cond (NE_EXPR, lhs,
10546 				   fold_convert (TREE_TYPE (lhs),
10547 						 boolean_false_node),
10548 				   cctx->cancel_label, fallthru_label);
10549 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10550 	    break;
10551 	  default:
10552 	    break;
10553 	  }
10554       /* FALLTHRU */
10555     default:
10556       if ((ctx || task_shared_vars)
10557 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
10558 			     ctx ? NULL : &wi))
10559 	{
10560 	  /* Just remove clobbers, this should happen only if we have
10561 	     "privatized" local addressable variables in SIMD regions,
10562 	     the clobber isn't needed in that case and gimplifying address
10563 	     of the ARRAY_REF into a pointer and creating MEM_REF based
10564 	     clobber would create worse code than we get with the clobber
10565 	     dropped.  */
10566 	  if (gimple_clobber_p (stmt))
10567 	    {
10568 	      gsi_replace (gsi_p, gimple_build_nop (), true);
10569 	      break;
10570 	    }
10571 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10572 	}
10573       break;
10574     }
10575 }
10576 
10577 static void
lower_omp(gimple_seq * body,omp_context * ctx)10578 lower_omp (gimple_seq *body, omp_context *ctx)
10579 {
10580   location_t saved_location = input_location;
10581   gimple_stmt_iterator gsi;
10582   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10583     lower_omp_1 (&gsi, ctx);
10584   /* During gimplification, we haven't folded statments inside offloading
10585      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
10586   if (target_nesting_level || taskreg_nesting_level)
10587     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10588       fold_stmt (&gsi);
10589   input_location = saved_location;
10590 }
10591 
10592 /* Main entry point.  */
10593 
10594 static unsigned int
execute_lower_omp(void)10595 execute_lower_omp (void)
10596 {
10597   gimple_seq body;
10598   int i;
10599   omp_context *ctx;
10600 
10601   /* This pass always runs, to provide PROP_gimple_lomp.
10602      But often, there is nothing to do.  */
10603   if (flag_openacc == 0 && flag_openmp == 0
10604       && flag_openmp_simd == 0)
10605     return 0;
10606 
10607   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10608 				 delete_omp_context);
10609 
10610   body = gimple_body (current_function_decl);
10611 
10612   if (hsa_gen_requested_p ())
10613     omp_grid_gridify_all_targets (&body);
10614 
10615   scan_omp (&body, NULL);
10616   gcc_assert (taskreg_nesting_level == 0);
10617   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
10618     finish_taskreg_scan (ctx);
10619   taskreg_contexts.release ();
10620 
10621   if (all_contexts->root)
10622     {
10623       if (task_shared_vars)
10624 	push_gimplify_context ();
10625       lower_omp (&body, NULL);
10626       if (task_shared_vars)
10627 	pop_gimplify_context (NULL);
10628     }
10629 
10630   if (all_contexts)
10631     {
10632       splay_tree_delete (all_contexts);
10633       all_contexts = NULL;
10634     }
10635   BITMAP_FREE (task_shared_vars);
10636   BITMAP_FREE (global_nonaddressable_vars);
10637 
10638   /* If current function is a method, remove artificial dummy VAR_DECL created
10639      for non-static data member privatization, they aren't needed for
10640      debuginfo nor anything else, have been already replaced everywhere in the
10641      IL and cause problems with LTO.  */
10642   if (DECL_ARGUMENTS (current_function_decl)
10643       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
10644       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
10645 	  == POINTER_TYPE))
10646     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
10647   return 0;
10648 }
10649 
10650 namespace {
10651 
10652 const pass_data pass_data_lower_omp =
10653 {
10654   GIMPLE_PASS, /* type */
10655   "omplower", /* name */
10656   OPTGROUP_OMP, /* optinfo_flags */
10657   TV_NONE, /* tv_id */
10658   PROP_gimple_any, /* properties_required */
10659   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
10660   0, /* properties_destroyed */
10661   0, /* todo_flags_start */
10662   0, /* todo_flags_finish */
10663 };
10664 
10665 class pass_lower_omp : public gimple_opt_pass
10666 {
10667 public:
pass_lower_omp(gcc::context * ctxt)10668   pass_lower_omp (gcc::context *ctxt)
10669     : gimple_opt_pass (pass_data_lower_omp, ctxt)
10670   {}
10671 
10672   /* opt_pass methods: */
execute(function *)10673   virtual unsigned int execute (function *) { return execute_lower_omp (); }
10674 
10675 }; // class pass_lower_omp
10676 
10677 } // anon namespace
10678 
10679 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)10680 make_pass_lower_omp (gcc::context *ctxt)
10681 {
10682   return new pass_lower_omp (ctxt);
10683 }
10684 
10685 /* The following is a utility to diagnose structured block violations.
10686    It is not part of the "omplower" pass, as that's invoked too late.  It
10687    should be invoked by the respective front ends after gimplification.  */
10688 
10689 static splay_tree all_labels;
10690 
10691 /* Check for mismatched contexts and generate an error if needed.  Return
10692    true if an error is detected.  */
10693 
10694 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)10695 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10696 	       gimple *branch_ctx, gimple *label_ctx)
10697 {
10698   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
10699   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
10700 
10701   if (label_ctx == branch_ctx)
10702     return false;
10703 
10704   const char* kind = NULL;
10705 
10706   if (flag_openacc)
10707     {
10708       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
10709 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
10710 	{
10711 	  gcc_checking_assert (kind == NULL);
10712 	  kind = "OpenACC";
10713 	}
10714     }
10715   if (kind == NULL)
10716     {
10717       gcc_checking_assert (flag_openmp || flag_openmp_simd);
10718       kind = "OpenMP";
10719     }
10720 
10721   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10722      so we could traverse it and issue a correct "exit" or "enter" error
10723      message upon a structured block violation.
10724 
10725      We built the context by building a list with tree_cons'ing, but there is
10726      no easy counterpart in gimple tuples.  It seems like far too much work
10727      for issuing exit/enter error messages.  If someone really misses the
10728      distinct error message... patches welcome.  */
10729 
10730 #if 0
10731   /* Try to avoid confusing the user by producing and error message
10732      with correct "exit" or "enter" verbiage.  We prefer "exit"
10733      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
10734   if (branch_ctx == NULL)
10735     exit_p = false;
10736   else
10737     {
10738       while (label_ctx)
10739 	{
10740 	  if (TREE_VALUE (label_ctx) == branch_ctx)
10741 	    {
10742 	      exit_p = false;
10743 	      break;
10744 	    }
10745 	  label_ctx = TREE_CHAIN (label_ctx);
10746 	}
10747     }
10748 
10749   if (exit_p)
10750     error ("invalid exit from %s structured block", kind);
10751   else
10752     error ("invalid entry to %s structured block", kind);
10753 #endif
10754 
10755   /* If it's obvious we have an invalid entry, be specific about the error.  */
10756   if (branch_ctx == NULL)
10757     error ("invalid entry to %s structured block", kind);
10758   else
10759     {
10760       /* Otherwise, be vague and lazy, but efficient.  */
10761       error ("invalid branch to/from %s structured block", kind);
10762     }
10763 
10764   gsi_replace (gsi_p, gimple_build_nop (), false);
10765   return true;
10766 }
10767 
10768 /* Pass 1: Create a minimal tree of structured blocks, and record
10769    where each label is found.  */
10770 
10771 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10772 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10773     	       struct walk_stmt_info *wi)
10774 {
10775   gimple *context = (gimple *) wi->info;
10776   gimple *inner_context;
10777   gimple *stmt = gsi_stmt (*gsi_p);
10778 
10779   *handled_ops_p = true;
10780 
10781   switch (gimple_code (stmt))
10782     {
10783     WALK_SUBSTMTS;
10784 
10785     case GIMPLE_OMP_PARALLEL:
10786     case GIMPLE_OMP_TASK:
10787     case GIMPLE_OMP_SECTIONS:
10788     case GIMPLE_OMP_SINGLE:
10789     case GIMPLE_OMP_SECTION:
10790     case GIMPLE_OMP_MASTER:
10791     case GIMPLE_OMP_ORDERED:
10792     case GIMPLE_OMP_CRITICAL:
10793     case GIMPLE_OMP_TARGET:
10794     case GIMPLE_OMP_TEAMS:
10795     case GIMPLE_OMP_TASKGROUP:
10796       /* The minimal context here is just the current OMP construct.  */
10797       inner_context = stmt;
10798       wi->info = inner_context;
10799       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10800       wi->info = context;
10801       break;
10802 
10803     case GIMPLE_OMP_FOR:
10804       inner_context = stmt;
10805       wi->info = inner_context;
10806       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10807 	 walk them.  */
10808       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10809 	  	       diagnose_sb_1, NULL, wi);
10810       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10811       wi->info = context;
10812       break;
10813 
10814     case GIMPLE_LABEL:
10815       splay_tree_insert (all_labels,
10816 			 (splay_tree_key) gimple_label_label (
10817 					    as_a <glabel *> (stmt)),
10818 			 (splay_tree_value) context);
10819       break;
10820 
10821     default:
10822       break;
10823     }
10824 
10825   return NULL_TREE;
10826 }
10827 
10828 /* Pass 2: Check each branch and see if its context differs from that of
10829    the destination label's context.  */
10830 
10831 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10832 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10833     	       struct walk_stmt_info *wi)
10834 {
10835   gimple *context = (gimple *) wi->info;
10836   splay_tree_node n;
10837   gimple *stmt = gsi_stmt (*gsi_p);
10838 
10839   *handled_ops_p = true;
10840 
10841   switch (gimple_code (stmt))
10842     {
10843     WALK_SUBSTMTS;
10844 
10845     case GIMPLE_OMP_PARALLEL:
10846     case GIMPLE_OMP_TASK:
10847     case GIMPLE_OMP_SECTIONS:
10848     case GIMPLE_OMP_SINGLE:
10849     case GIMPLE_OMP_SECTION:
10850     case GIMPLE_OMP_MASTER:
10851     case GIMPLE_OMP_ORDERED:
10852     case GIMPLE_OMP_CRITICAL:
10853     case GIMPLE_OMP_TARGET:
10854     case GIMPLE_OMP_TEAMS:
10855     case GIMPLE_OMP_TASKGROUP:
10856       wi->info = stmt;
10857       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10858       wi->info = context;
10859       break;
10860 
10861     case GIMPLE_OMP_FOR:
10862       wi->info = stmt;
10863       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10864 	 walk them.  */
10865       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10866 			   diagnose_sb_2, NULL, wi);
10867       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10868       wi->info = context;
10869       break;
10870 
10871     case GIMPLE_COND:
10872 	{
10873 	  gcond *cond_stmt = as_a <gcond *> (stmt);
10874 	  tree lab = gimple_cond_true_label (cond_stmt);
10875 	  if (lab)
10876 	    {
10877 	      n = splay_tree_lookup (all_labels,
10878 				     (splay_tree_key) lab);
10879 	      diagnose_sb_0 (gsi_p, context,
10880 			     n ? (gimple *) n->value : NULL);
10881 	    }
10882 	  lab = gimple_cond_false_label (cond_stmt);
10883 	  if (lab)
10884 	    {
10885 	      n = splay_tree_lookup (all_labels,
10886 				     (splay_tree_key) lab);
10887 	      diagnose_sb_0 (gsi_p, context,
10888 			     n ? (gimple *) n->value : NULL);
10889 	    }
10890 	}
10891       break;
10892 
10893     case GIMPLE_GOTO:
10894       {
10895 	tree lab = gimple_goto_dest (stmt);
10896 	if (TREE_CODE (lab) != LABEL_DECL)
10897 	  break;
10898 
10899 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10900 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
10901       }
10902       break;
10903 
10904     case GIMPLE_SWITCH:
10905       {
10906 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
10907 	unsigned int i;
10908 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
10909 	  {
10910 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
10911 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10912 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
10913 	      break;
10914 	  }
10915       }
10916       break;
10917 
10918     case GIMPLE_RETURN:
10919       diagnose_sb_0 (gsi_p, context, NULL);
10920       break;
10921 
10922     default:
10923       break;
10924     }
10925 
10926   return NULL_TREE;
10927 }
10928 
10929 static unsigned int
diagnose_omp_structured_block_errors(void)10930 diagnose_omp_structured_block_errors (void)
10931 {
10932   struct walk_stmt_info wi;
10933   gimple_seq body = gimple_body (current_function_decl);
10934 
10935   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10936 
10937   memset (&wi, 0, sizeof (wi));
10938   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
10939 
10940   memset (&wi, 0, sizeof (wi));
10941   wi.want_locations = true;
10942   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10943 
10944   gimple_set_body (current_function_decl, body);
10945 
10946   splay_tree_delete (all_labels);
10947   all_labels = NULL;
10948 
10949   return 0;
10950 }
10951 
10952 namespace {
10953 
10954 const pass_data pass_data_diagnose_omp_blocks =
10955 {
10956   GIMPLE_PASS, /* type */
10957   "*diagnose_omp_blocks", /* name */
10958   OPTGROUP_OMP, /* optinfo_flags */
10959   TV_NONE, /* tv_id */
10960   PROP_gimple_any, /* properties_required */
10961   0, /* properties_provided */
10962   0, /* properties_destroyed */
10963   0, /* todo_flags_start */
10964   0, /* todo_flags_finish */
10965 };
10966 
10967 class pass_diagnose_omp_blocks : public gimple_opt_pass
10968 {
10969 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)10970   pass_diagnose_omp_blocks (gcc::context *ctxt)
10971     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
10972   {}
10973 
10974   /* opt_pass methods: */
gate(function *)10975   virtual bool gate (function *)
10976   {
10977     return flag_openacc || flag_openmp || flag_openmp_simd;
10978   }
execute(function *)10979   virtual unsigned int execute (function *)
10980     {
10981       return diagnose_omp_structured_block_errors ();
10982     }
10983 
10984 }; // class pass_diagnose_omp_blocks
10985 
10986 } // anon namespace
10987 
10988 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)10989 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10990 {
10991   return new pass_diagnose_omp_blocks (ctxt);
10992 }
10993 
10994 
10995 #include "gt-omp-low.h"
10996