xref: /netbsd/external/gpl3/gcc.old/dist/gcc/omp-low.c (revision 0fc04c29)
1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2019 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65    phases.  The first phase scans the function looking for OMP statements
66    and then for variables that must be replaced to satisfy data sharing
67    clauses.  The second phase expands code for the constructs, as well as
68    re-gimplifying things when variables have been replaced with complex
69    expressions.
70 
71    Final code generation is done by pass_expand_omp.  The flowgraph is
72    scanned for regions which are then moved to a new
73    function, to be invoked by the thread library, or offloaded.  */
74 
75 /* Context structure.  Used to store information about each parallel
76    directive in the code.  */
77 
78 struct omp_context
79 {
80   /* This field must be at the beginning, as we do "inheritance": Some
81      callback functions for tree-inline.c (e.g., omp_copy_decl)
82      receive a copy_body_data pointer that is up-casted to an
83      omp_context pointer.  */
84   copy_body_data cb;
85 
86   /* The tree of contexts corresponding to the encountered constructs.  */
87   struct omp_context *outer;
88   gimple *stmt;
89 
90   /* Map variables to fields in a structure that allows communication
91      between sending and receiving threads.  */
92   splay_tree field_map;
93   tree record_type;
94   tree sender_decl;
95   tree receiver_decl;
96 
97   /* These are used just by task contexts, if task firstprivate fn is
98      needed.  srecord_type is used to communicate from the thread
99      that encountered the task construct to task firstprivate fn,
100      record_type is allocated by GOMP_task, initialized by task firstprivate
101      fn and passed to the task body fn.  */
102   splay_tree sfield_map;
103   tree srecord_type;
104 
105   /* A chain of variables to add to the top-level block surrounding the
106      construct.  In the case of a parallel, this is in the child function.  */
107   tree block_vars;
108 
109   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110      barriers should jump to during omplower pass.  */
111   tree cancel_label;
112 
113   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114      otherwise.  */
115   gimple *simt_stmt;
116 
117   /* For task reductions registered in this context, a vector containing
118      the length of the private copies block (if constant, otherwise NULL)
119      and then offsets (if constant, otherwise NULL) for each entry.  */
120   vec<tree> task_reductions;
121 
122   /* And a hash map from the reduction clauses to the registered array
123      elts.  */
124   hash_map<tree, unsigned> *task_reduction_map;
125 
126   /* Nesting depth of this context.  Used to beautify error messages re
127      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
128      reserved for the main body of the function.  */
129   int depth;
130 
131   /* True if this parallel directive is nested within another.  */
132   bool is_nested;
133 
134   /* True if this construct can be cancelled.  */
135   bool cancellable;
136 };
137 
138 static splay_tree all_contexts;
139 static int taskreg_nesting_level;
140 static int target_nesting_level;
141 static bitmap task_shared_vars;
142 static bitmap global_nonaddressable_vars;
143 static vec<omp_context *> taskreg_contexts;
144 
145 static void scan_omp (gimple_seq *, omp_context *);
146 static tree scan_omp_1_op (tree *, int *, void *);
147 
148 #define WALK_SUBSTMTS  \
149     case GIMPLE_BIND: \
150     case GIMPLE_TRY: \
151     case GIMPLE_CATCH: \
152     case GIMPLE_EH_FILTER: \
153     case GIMPLE_TRANSACTION: \
154       /* The sub-statements for these should be walked.  */ \
155       *handled_ops_p = false; \
156       break;
157 
158 /* Return true if CTX corresponds to an oacc parallel region.  */
159 
160 static bool
161 is_oacc_parallel (omp_context *ctx)
162 {
163   enum gimple_code outer_type = gimple_code (ctx->stmt);
164   return ((outer_type == GIMPLE_OMP_TARGET)
165 	  && (gimple_omp_target_kind (ctx->stmt)
166 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
167 }
168 
169 /* Return true if CTX corresponds to an oacc kernels region.  */
170 
171 static bool
172 is_oacc_kernels (omp_context *ctx)
173 {
174   enum gimple_code outer_type = gimple_code (ctx->stmt);
175   return ((outer_type == GIMPLE_OMP_TARGET)
176 	  && (gimple_omp_target_kind (ctx->stmt)
177 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
178 }
179 
180 /* If DECL is the artificial dummy VAR_DECL created for non-static
181    data member privatization, return the underlying "this" parameter,
182    otherwise return NULL.  */
183 
184 tree
185 omp_member_access_dummy_var (tree decl)
186 {
187   if (!VAR_P (decl)
188       || !DECL_ARTIFICIAL (decl)
189       || !DECL_IGNORED_P (decl)
190       || !DECL_HAS_VALUE_EXPR_P (decl)
191       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
192     return NULL_TREE;
193 
194   tree v = DECL_VALUE_EXPR (decl);
195   if (TREE_CODE (v) != COMPONENT_REF)
196     return NULL_TREE;
197 
198   while (1)
199     switch (TREE_CODE (v))
200       {
201       case COMPONENT_REF:
202       case MEM_REF:
203       case INDIRECT_REF:
204       CASE_CONVERT:
205       case POINTER_PLUS_EXPR:
206 	v = TREE_OPERAND (v, 0);
207 	continue;
208       case PARM_DECL:
209 	if (DECL_CONTEXT (v) == current_function_decl
210 	    && DECL_ARTIFICIAL (v)
211 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
212 	  return v;
213 	return NULL_TREE;
214       default:
215 	return NULL_TREE;
216       }
217 }
218 
219 /* Helper for unshare_and_remap, called through walk_tree.  */
220 
221 static tree
222 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
223 {
224   tree *pair = (tree *) data;
225   if (*tp == pair[0])
226     {
227       *tp = unshare_expr (pair[1]);
228       *walk_subtrees = 0;
229     }
230   else if (IS_TYPE_OR_DECL_P (*tp))
231     *walk_subtrees = 0;
232   return NULL_TREE;
233 }
234 
235 /* Return unshare_expr (X) with all occurrences of FROM
236    replaced with TO.  */
237 
238 static tree
239 unshare_and_remap (tree x, tree from, tree to)
240 {
241   tree pair[2] = { from, to };
242   x = unshare_expr (x);
243   walk_tree (&x, unshare_and_remap_1, pair, NULL);
244   return x;
245 }
246 
247 /* Convenience function for calling scan_omp_1_op on tree operands.  */
248 
249 static inline tree
250 scan_omp_op (tree *tp, omp_context *ctx)
251 {
252   struct walk_stmt_info wi;
253 
254   memset (&wi, 0, sizeof (wi));
255   wi.info = ctx;
256   wi.want_locations = true;
257 
258   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
259 }
260 
261 static void lower_omp (gimple_seq *, omp_context *);
262 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
263 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
264 
265 /* Return true if CTX is for an omp parallel.  */
266 
267 static inline bool
268 is_parallel_ctx (omp_context *ctx)
269 {
270   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
271 }
272 
273 
274 /* Return true if CTX is for an omp task.  */
275 
276 static inline bool
277 is_task_ctx (omp_context *ctx)
278 {
279   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
280 }
281 
282 
283 /* Return true if CTX is for an omp taskloop.  */
284 
285 static inline bool
286 is_taskloop_ctx (omp_context *ctx)
287 {
288   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
289 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
290 }
291 
292 
293 /* Return true if CTX is for a host omp teams.  */
294 
295 static inline bool
296 is_host_teams_ctx (omp_context *ctx)
297 {
298   return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
299 	 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
300 }
301 
302 /* Return true if CTX is for an omp parallel or omp task or host omp teams
303    (the last one is strictly not a task region in OpenMP speak, but we
304    need to treat it similarly).  */
305 
306 static inline bool
307 is_taskreg_ctx (omp_context *ctx)
308 {
309   return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
310 }
311 
312 /* Return true if EXPR is variable sized.  */
313 
314 static inline bool
315 is_variable_sized (const_tree expr)
316 {
317   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
318 }
319 
320 /* Lookup variables.  The "maybe" form
321    allows for the variable form to not have been entered, otherwise we
322    assert that the variable must have been entered.  */
323 
324 static inline tree
325 lookup_decl (tree var, omp_context *ctx)
326 {
327   tree *n = ctx->cb.decl_map->get (var);
328   return *n;
329 }
330 
331 static inline tree
332 maybe_lookup_decl (const_tree var, omp_context *ctx)
333 {
334   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
335   return n ? *n : NULL_TREE;
336 }
337 
338 static inline tree
339 lookup_field (tree var, omp_context *ctx)
340 {
341   splay_tree_node n;
342   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
343   return (tree) n->value;
344 }
345 
346 static inline tree
347 lookup_sfield (splay_tree_key key, omp_context *ctx)
348 {
349   splay_tree_node n;
350   n = splay_tree_lookup (ctx->sfield_map
351 			 ? ctx->sfield_map : ctx->field_map, key);
352   return (tree) n->value;
353 }
354 
355 static inline tree
356 lookup_sfield (tree var, omp_context *ctx)
357 {
358   return lookup_sfield ((splay_tree_key) var, ctx);
359 }
360 
361 static inline tree
362 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
363 {
364   splay_tree_node n;
365   n = splay_tree_lookup (ctx->field_map, key);
366   return n ? (tree) n->value : NULL_TREE;
367 }
368 
369 static inline tree
370 maybe_lookup_field (tree var, omp_context *ctx)
371 {
372   return maybe_lookup_field ((splay_tree_key) var, ctx);
373 }
374 
375 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
376    the parallel context if DECL is to be shared.  */
377 
378 static bool
379 use_pointer_for_field (tree decl, omp_context *shared_ctx)
380 {
381   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
382       || TYPE_ATOMIC (TREE_TYPE (decl)))
383     return true;
384 
385   /* We can only use copy-in/copy-out semantics for shared variables
386      when we know the value is not accessible from an outer scope.  */
387   if (shared_ctx)
388     {
389       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
390 
391       /* ??? Trivially accessible from anywhere.  But why would we even
392 	 be passing an address in this case?  Should we simply assert
393 	 this to be false, or should we have a cleanup pass that removes
394 	 these from the list of mappings?  */
395       if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
396 	return true;
397 
398       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
399 	 without analyzing the expression whether or not its location
400 	 is accessible to anyone else.  In the case of nested parallel
401 	 regions it certainly may be.  */
402       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
403 	return true;
404 
405       /* Do not use copy-in/copy-out for variables that have their
406 	 address taken.  */
407       if (is_global_var (decl))
408 	{
409 	  /* For file scope vars, track whether we've seen them as
410 	     non-addressable initially and in that case, keep the same
411 	     answer for the duration of the pass, even when they are made
412 	     addressable later on e.g. through reduction expansion.  Global
413 	     variables which weren't addressable before the pass will not
414 	     have their privatized copies address taken.  See PR91216.  */
415 	  if (!TREE_ADDRESSABLE (decl))
416 	    {
417 	      if (!global_nonaddressable_vars)
418 		global_nonaddressable_vars = BITMAP_ALLOC (NULL);
419 	      bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
420 	    }
421 	  else if (!global_nonaddressable_vars
422 		   || !bitmap_bit_p (global_nonaddressable_vars,
423 				     DECL_UID (decl)))
424 	    return true;
425 	}
426       else if (TREE_ADDRESSABLE (decl))
427 	return true;
428 
429       /* lower_send_shared_vars only uses copy-in, but not copy-out
430 	 for these.  */
431       if (TREE_READONLY (decl)
432 	  || ((TREE_CODE (decl) == RESULT_DECL
433 	       || TREE_CODE (decl) == PARM_DECL)
434 	      && DECL_BY_REFERENCE (decl)))
435 	return false;
436 
437       /* Disallow copy-in/out in nested parallel if
438 	 decl is shared in outer parallel, otherwise
439 	 each thread could store the shared variable
440 	 in its own copy-in location, making the
441 	 variable no longer really shared.  */
442       if (shared_ctx->is_nested)
443 	{
444 	  omp_context *up;
445 
446 	  for (up = shared_ctx->outer; up; up = up->outer)
447 	    if ((is_taskreg_ctx (up)
448 		 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
449 		     && is_gimple_omp_offloaded (up->stmt)))
450 		&& maybe_lookup_decl (decl, up))
451 	      break;
452 
453 	  if (up)
454 	    {
455 	      tree c;
456 
457 	      if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
458 		{
459 		  for (c = gimple_omp_target_clauses (up->stmt);
460 		       c; c = OMP_CLAUSE_CHAIN (c))
461 		    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
462 			&& OMP_CLAUSE_DECL (c) == decl)
463 		      break;
464 		}
465 	      else
466 		for (c = gimple_omp_taskreg_clauses (up->stmt);
467 		     c; c = OMP_CLAUSE_CHAIN (c))
468 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
469 		      && OMP_CLAUSE_DECL (c) == decl)
470 		    break;
471 
472 	      if (c)
473 		goto maybe_mark_addressable_and_ret;
474 	    }
475 	}
476 
477       /* For tasks avoid using copy-in/out.  As tasks can be
478 	 deferred or executed in different thread, when GOMP_task
479 	 returns, the task hasn't necessarily terminated.  */
480       if (is_task_ctx (shared_ctx))
481 	{
482 	  tree outer;
483 	maybe_mark_addressable_and_ret:
484 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
485 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
486 	    {
487 	      /* Taking address of OUTER in lower_send_shared_vars
488 		 might need regimplification of everything that uses the
489 		 variable.  */
490 	      if (!task_shared_vars)
491 		task_shared_vars = BITMAP_ALLOC (NULL);
492 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
493 	      TREE_ADDRESSABLE (outer) = 1;
494 	    }
495 	  return true;
496 	}
497     }
498 
499   return false;
500 }
501 
502 /* Construct a new automatic decl similar to VAR.  */
503 
504 static tree
505 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
506 {
507   tree copy = copy_var_decl (var, name, type);
508 
509   DECL_CONTEXT (copy) = current_function_decl;
510   DECL_CHAIN (copy) = ctx->block_vars;
511   /* If VAR is listed in task_shared_vars, it means it wasn't
512      originally addressable and is just because task needs to take
513      it's address.  But we don't need to take address of privatizations
514      from that var.  */
515   if (TREE_ADDRESSABLE (var)
516       && ((task_shared_vars
517 	   && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
518 	  || (global_nonaddressable_vars
519 	      && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
520     TREE_ADDRESSABLE (copy) = 0;
521   ctx->block_vars = copy;
522 
523   return copy;
524 }
525 
526 static tree
527 omp_copy_decl_1 (tree var, omp_context *ctx)
528 {
529   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
530 }
531 
532 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
533    as appropriate.  */
534 static tree
535 omp_build_component_ref (tree obj, tree field)
536 {
537   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
538   if (TREE_THIS_VOLATILE (field))
539     TREE_THIS_VOLATILE (ret) |= 1;
540   if (TREE_READONLY (field))
541     TREE_READONLY (ret) |= 1;
542   return ret;
543 }
544 
545 /* Build tree nodes to access the field for VAR on the receiver side.  */
546 
547 static tree
548 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
549 {
550   tree x, field = lookup_field (var, ctx);
551 
552   /* If the receiver record type was remapped in the child function,
553      remap the field into the new record type.  */
554   x = maybe_lookup_field (field, ctx);
555   if (x != NULL)
556     field = x;
557 
558   x = build_simple_mem_ref (ctx->receiver_decl);
559   TREE_THIS_NOTRAP (x) = 1;
560   x = omp_build_component_ref (x, field);
561   if (by_ref)
562     {
563       x = build_simple_mem_ref (x);
564       TREE_THIS_NOTRAP (x) = 1;
565     }
566 
567   return x;
568 }
569 
570 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
571    of a parallel, this is a component reference; for workshare constructs
572    this is some variable.  */
573 
574 static tree
575 build_outer_var_ref (tree var, omp_context *ctx,
576 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
577 {
578   tree x;
579   omp_context *outer = ctx->outer;
580   while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
581     outer = outer->outer;
582 
583   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
584     x = var;
585   else if (is_variable_sized (var))
586     {
587       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
588       x = build_outer_var_ref (x, ctx, code);
589       x = build_simple_mem_ref (x);
590     }
591   else if (is_taskreg_ctx (ctx))
592     {
593       bool by_ref = use_pointer_for_field (var, NULL);
594       x = build_receiver_ref (var, by_ref, ctx);
595     }
596   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
597 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
598 	   || (code == OMP_CLAUSE_PRIVATE
599 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
600 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
601 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
602     {
603       /* #pragma omp simd isn't a worksharing construct, and can reference
604 	 even private vars in its linear etc. clauses.
605 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
606 	 to private vars in all worksharing constructs.  */
607       x = NULL_TREE;
608       if (outer && is_taskreg_ctx (outer))
609 	x = lookup_decl (var, outer);
610       else if (outer)
611 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
612       if (x == NULL_TREE)
613 	x = var;
614     }
615   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
616     {
617       gcc_assert (outer);
618       splay_tree_node n
619 	= splay_tree_lookup (outer->field_map,
620 			     (splay_tree_key) &DECL_UID (var));
621       if (n == NULL)
622 	{
623 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
624 	    x = var;
625 	  else
626 	    x = lookup_decl (var, outer);
627 	}
628       else
629 	{
630 	  tree field = (tree) n->value;
631 	  /* If the receiver record type was remapped in the child function,
632 	     remap the field into the new record type.  */
633 	  x = maybe_lookup_field (field, outer);
634 	  if (x != NULL)
635 	    field = x;
636 
637 	  x = build_simple_mem_ref (outer->receiver_decl);
638 	  x = omp_build_component_ref (x, field);
639 	  if (use_pointer_for_field (var, outer))
640 	    x = build_simple_mem_ref (x);
641 	}
642     }
643   else if (outer)
644     {
645       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
646 	{
647 	  outer = outer->outer;
648 	  gcc_assert (outer
649 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
650 	}
651       x = lookup_decl (var, outer);
652     }
653   else if (omp_is_reference (var))
654     /* This can happen with orphaned constructs.  If var is reference, it is
655        possible it is shared and as such valid.  */
656     x = var;
657   else if (omp_member_access_dummy_var (var))
658     x = var;
659   else
660     gcc_unreachable ();
661 
662   if (x == var)
663     {
664       tree t = omp_member_access_dummy_var (var);
665       if (t)
666 	{
667 	  x = DECL_VALUE_EXPR (var);
668 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
669 	  if (o != t)
670 	    x = unshare_and_remap (x, t, o);
671 	  else
672 	    x = unshare_expr (x);
673 	}
674     }
675 
676   if (omp_is_reference (var))
677     x = build_simple_mem_ref (x);
678 
679   return x;
680 }
681 
682 /* Build tree nodes to access the field for VAR on the sender side.  */
683 
684 static tree
685 build_sender_ref (splay_tree_key key, omp_context *ctx)
686 {
687   tree field = lookup_sfield (key, ctx);
688   return omp_build_component_ref (ctx->sender_decl, field);
689 }
690 
691 static tree
692 build_sender_ref (tree var, omp_context *ctx)
693 {
694   return build_sender_ref ((splay_tree_key) var, ctx);
695 }
696 
697 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
698    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
699 
700 static void
701 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
702 {
703   tree field, type, sfield = NULL_TREE;
704   splay_tree_key key = (splay_tree_key) var;
705 
706   if ((mask & 8) != 0)
707     {
708       key = (splay_tree_key) &DECL_UID (var);
709       gcc_checking_assert (key != (splay_tree_key) var);
710     }
711   gcc_assert ((mask & 1) == 0
712 	      || !splay_tree_lookup (ctx->field_map, key));
713   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
714 	      || !splay_tree_lookup (ctx->sfield_map, key));
715   gcc_assert ((mask & 3) == 3
716 	      || !is_gimple_omp_oacc (ctx->stmt));
717 
718   type = TREE_TYPE (var);
719   /* Prevent redeclaring the var in the split-off function with a restrict
720      pointer type.  Note that we only clear type itself, restrict qualifiers in
721      the pointed-to type will be ignored by points-to analysis.  */
722   if (POINTER_TYPE_P (type)
723       && TYPE_RESTRICT (type))
724     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
725 
726   if (mask & 4)
727     {
728       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
729       type = build_pointer_type (build_pointer_type (type));
730     }
731   else if (by_ref)
732     type = build_pointer_type (type);
733   else if ((mask & 3) == 1 && omp_is_reference (var))
734     type = TREE_TYPE (type);
735 
736   field = build_decl (DECL_SOURCE_LOCATION (var),
737 		      FIELD_DECL, DECL_NAME (var), type);
738 
739   /* Remember what variable this field was created for.  This does have a
740      side effect of making dwarf2out ignore this member, so for helpful
741      debugging we clear it later in delete_omp_context.  */
742   DECL_ABSTRACT_ORIGIN (field) = var;
743   if (type == TREE_TYPE (var))
744     {
745       SET_DECL_ALIGN (field, DECL_ALIGN (var));
746       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
747       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
748     }
749   else
750     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
751 
752   if ((mask & 3) == 3)
753     {
754       insert_field_into_struct (ctx->record_type, field);
755       if (ctx->srecord_type)
756 	{
757 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
758 			       FIELD_DECL, DECL_NAME (var), type);
759 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
760 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
761 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
762 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
763 	  insert_field_into_struct (ctx->srecord_type, sfield);
764 	}
765     }
766   else
767     {
768       if (ctx->srecord_type == NULL_TREE)
769 	{
770 	  tree t;
771 
772 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
773 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
774 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
775 	    {
776 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
777 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
778 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
779 	      insert_field_into_struct (ctx->srecord_type, sfield);
780 	      splay_tree_insert (ctx->sfield_map,
781 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
782 				 (splay_tree_value) sfield);
783 	    }
784 	}
785       sfield = field;
786       insert_field_into_struct ((mask & 1) ? ctx->record_type
787 				: ctx->srecord_type, field);
788     }
789 
790   if (mask & 1)
791     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
792   if ((mask & 2) && ctx->sfield_map)
793     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
794 }
795 
796 static tree
797 install_var_local (tree var, omp_context *ctx)
798 {
799   tree new_var = omp_copy_decl_1 (var, ctx);
800   insert_decl_map (&ctx->cb, var, new_var);
801   return new_var;
802 }
803 
804 /* Adjust the replacement for DECL in CTX for the new context.  This means
805    copying the DECL_VALUE_EXPR, and fixing up the type.  */
806 
807 static void
808 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
809 {
810   tree new_decl, size;
811 
812   new_decl = lookup_decl (decl, ctx);
813 
814   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
815 
816   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
817       && DECL_HAS_VALUE_EXPR_P (decl))
818     {
819       tree ve = DECL_VALUE_EXPR (decl);
820       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
821       SET_DECL_VALUE_EXPR (new_decl, ve);
822       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
823     }
824 
825   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
826     {
827       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
828       if (size == error_mark_node)
829 	size = TYPE_SIZE (TREE_TYPE (new_decl));
830       DECL_SIZE (new_decl) = size;
831 
832       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
833       if (size == error_mark_node)
834 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
835       DECL_SIZE_UNIT (new_decl) = size;
836     }
837 }
838 
839 /* The callback for remap_decl.  Search all containing contexts for a
840    mapping of the variable; this avoids having to duplicate the splay
841    tree ahead of time.  We know a mapping doesn't already exist in the
842    given context.  Create new mappings to implement default semantics.  */
843 
844 static tree
845 omp_copy_decl (tree var, copy_body_data *cb)
846 {
847   omp_context *ctx = (omp_context *) cb;
848   tree new_var;
849 
850   if (TREE_CODE (var) == LABEL_DECL)
851     {
852       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
853 	return var;
854       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
855       DECL_CONTEXT (new_var) = current_function_decl;
856       insert_decl_map (&ctx->cb, var, new_var);
857       return new_var;
858     }
859 
860   while (!is_taskreg_ctx (ctx))
861     {
862       ctx = ctx->outer;
863       if (ctx == NULL)
864 	return var;
865       new_var = maybe_lookup_decl (var, ctx);
866       if (new_var)
867 	return new_var;
868     }
869 
870   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
871     return var;
872 
873   return error_mark_node;
874 }
875 
876 /* Create a new context, with OUTER_CTX being the surrounding context.  */
877 
878 static omp_context *
879 new_omp_context (gimple *stmt, omp_context *outer_ctx)
880 {
881   omp_context *ctx = XCNEW (omp_context);
882 
883   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
884 		     (splay_tree_value) ctx);
885   ctx->stmt = stmt;
886 
887   if (outer_ctx)
888     {
889       ctx->outer = outer_ctx;
890       ctx->cb = outer_ctx->cb;
891       ctx->cb.block = NULL;
892       ctx->depth = outer_ctx->depth + 1;
893     }
894   else
895     {
896       ctx->cb.src_fn = current_function_decl;
897       ctx->cb.dst_fn = current_function_decl;
898       ctx->cb.src_node = cgraph_node::get (current_function_decl);
899       gcc_checking_assert (ctx->cb.src_node);
900       ctx->cb.dst_node = ctx->cb.src_node;
901       ctx->cb.src_cfun = cfun;
902       ctx->cb.copy_decl = omp_copy_decl;
903       ctx->cb.eh_lp_nr = 0;
904       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
905       ctx->cb.adjust_array_error_bounds = true;
906       ctx->cb.dont_remap_vla_if_no_change = true;
907       ctx->depth = 1;
908     }
909 
910   ctx->cb.decl_map = new hash_map<tree, tree>;
911 
912   return ctx;
913 }
914 
915 static gimple_seq maybe_catch_exception (gimple_seq);
916 
917 /* Finalize task copyfn.  */
918 
919 static void
920 finalize_task_copyfn (gomp_task *task_stmt)
921 {
922   struct function *child_cfun;
923   tree child_fn;
924   gimple_seq seq = NULL, new_seq;
925   gbind *bind;
926 
927   child_fn = gimple_omp_task_copy_fn (task_stmt);
928   if (child_fn == NULL_TREE)
929     return;
930 
931   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
932   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
933 
934   push_cfun (child_cfun);
935   bind = gimplify_body (child_fn, false);
936   gimple_seq_add_stmt (&seq, bind);
937   new_seq = maybe_catch_exception (seq);
938   if (new_seq != seq)
939     {
940       bind = gimple_build_bind (NULL, new_seq, NULL);
941       seq = NULL;
942       gimple_seq_add_stmt (&seq, bind);
943     }
944   gimple_set_body (child_fn, seq);
945   pop_cfun ();
946 
947   /* Inform the callgraph about the new function.  */
948   cgraph_node *node = cgraph_node::get_create (child_fn);
949   node->parallelized_function = 1;
950   cgraph_node::add_new_function (child_fn, false);
951 }
952 
953 /* Destroy a omp_context data structures.  Called through the splay tree
954    value delete callback.  */
955 
956 static void
957 delete_omp_context (splay_tree_value value)
958 {
959   omp_context *ctx = (omp_context *) value;
960 
961   delete ctx->cb.decl_map;
962 
963   if (ctx->field_map)
964     splay_tree_delete (ctx->field_map);
965   if (ctx->sfield_map)
966     splay_tree_delete (ctx->sfield_map);
967 
968   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
969      it produces corrupt debug information.  */
970   if (ctx->record_type)
971     {
972       tree t;
973       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
974 	DECL_ABSTRACT_ORIGIN (t) = NULL;
975     }
976   if (ctx->srecord_type)
977     {
978       tree t;
979       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
980 	DECL_ABSTRACT_ORIGIN (t) = NULL;
981     }
982 
983   if (is_task_ctx (ctx))
984     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
985 
986   if (ctx->task_reduction_map)
987     {
988       ctx->task_reductions.release ();
989       delete ctx->task_reduction_map;
990     }
991 
992   XDELETE (ctx);
993 }
994 
995 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
996    context.  */
997 
998 static void
999 fixup_child_record_type (omp_context *ctx)
1000 {
1001   tree f, type = ctx->record_type;
1002 
1003   if (!ctx->receiver_decl)
1004     return;
1005   /* ??? It isn't sufficient to just call remap_type here, because
1006      variably_modified_type_p doesn't work the way we expect for
1007      record types.  Testing each field for whether it needs remapping
1008      and creating a new record by hand works, however.  */
1009   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1010     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1011       break;
1012   if (f)
1013     {
1014       tree name, new_fields = NULL;
1015 
1016       type = lang_hooks.types.make_type (RECORD_TYPE);
1017       name = DECL_NAME (TYPE_NAME (ctx->record_type));
1018       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1019 			 TYPE_DECL, name, type);
1020       TYPE_NAME (type) = name;
1021 
1022       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1023 	{
1024 	  tree new_f = copy_node (f);
1025 	  DECL_CONTEXT (new_f) = type;
1026 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1027 	  DECL_CHAIN (new_f) = new_fields;
1028 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1029 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1030 		     &ctx->cb, NULL);
1031 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1032 		     &ctx->cb, NULL);
1033 	  new_fields = new_f;
1034 
1035 	  /* Arrange to be able to look up the receiver field
1036 	     given the sender field.  */
1037 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1038 			     (splay_tree_value) new_f);
1039 	}
1040       TYPE_FIELDS (type) = nreverse (new_fields);
1041       layout_type (type);
1042     }
1043 
1044   /* In a target region we never modify any of the pointers in *.omp_data_i,
1045      so attempt to help the optimizers.  */
1046   if (is_gimple_omp_offloaded (ctx->stmt))
1047     type = build_qualified_type (type, TYPE_QUAL_CONST);
1048 
1049   TREE_TYPE (ctx->receiver_decl)
1050     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1051 }
1052 
1053 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1054    specified by CLAUSES.  */
1055 
1056 static void
1057 scan_sharing_clauses (tree clauses, omp_context *ctx)
1058 {
1059   tree c, decl;
1060   bool scan_array_reductions = false;
1061 
1062   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1063     {
1064       bool by_ref;
1065 
1066       switch (OMP_CLAUSE_CODE (c))
1067 	{
1068 	case OMP_CLAUSE_PRIVATE:
1069 	  decl = OMP_CLAUSE_DECL (c);
1070 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1071 	    goto do_private;
1072 	  else if (!is_variable_sized (decl))
1073 	    install_var_local (decl, ctx);
1074 	  break;
1075 
1076 	case OMP_CLAUSE_SHARED:
1077 	  decl = OMP_CLAUSE_DECL (c);
1078 	  /* Ignore shared directives in teams construct inside of
1079 	     target construct.  */
1080 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1081 	      && !is_host_teams_ctx (ctx))
1082 	    {
1083 	      /* Global variables don't need to be copied,
1084 		 the receiver side will use them directly.  */
1085 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1086 	      if (is_global_var (odecl))
1087 		break;
1088 	      insert_decl_map (&ctx->cb, decl, odecl);
1089 	      break;
1090 	    }
1091 	  gcc_assert (is_taskreg_ctx (ctx));
1092 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1093 		      || !is_variable_sized (decl));
1094 	  /* Global variables don't need to be copied,
1095 	     the receiver side will use them directly.  */
1096 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1097 	    break;
1098 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1099 	    {
1100 	      use_pointer_for_field (decl, ctx);
1101 	      break;
1102 	    }
1103 	  by_ref = use_pointer_for_field (decl, NULL);
1104 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1105 	      || TREE_ADDRESSABLE (decl)
1106 	      || by_ref
1107 	      || omp_is_reference (decl))
1108 	    {
1109 	      by_ref = use_pointer_for_field (decl, ctx);
1110 	      install_var_field (decl, by_ref, 3, ctx);
1111 	      install_var_local (decl, ctx);
1112 	      break;
1113 	    }
1114 	  /* We don't need to copy const scalar vars back.  */
1115 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1116 	  goto do_private;
1117 
1118 	case OMP_CLAUSE_REDUCTION:
1119 	case OMP_CLAUSE_IN_REDUCTION:
1120 	  decl = OMP_CLAUSE_DECL (c);
1121 	  if (TREE_CODE (decl) == MEM_REF)
1122 	    {
1123 	      tree t = TREE_OPERAND (decl, 0);
1124 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1125 		t = TREE_OPERAND (t, 0);
1126 	      if (TREE_CODE (t) == INDIRECT_REF
1127 		  || TREE_CODE (t) == ADDR_EXPR)
1128 		t = TREE_OPERAND (t, 0);
1129 	      install_var_local (t, ctx);
1130 	      if (is_taskreg_ctx (ctx)
1131 		  && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1132 		      || (is_task_ctx (ctx)
1133 			  && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1134 			      || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1135 				  && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1136 				      == POINTER_TYPE)))))
1137 		  && !is_variable_sized (t)
1138 		  && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1139 		      || (!OMP_CLAUSE_REDUCTION_TASK (c)
1140 			  && !is_task_ctx (ctx))))
1141 		{
1142 		  by_ref = use_pointer_for_field (t, NULL);
1143 		  if (is_task_ctx (ctx)
1144 		      && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1145 		      && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1146 		    {
1147 		      install_var_field (t, false, 1, ctx);
1148 		      install_var_field (t, by_ref, 2, ctx);
1149 		    }
1150 		  else
1151 		    install_var_field (t, by_ref, 3, ctx);
1152 		}
1153 	      break;
1154 	    }
1155 	  if (is_task_ctx (ctx)
1156 	      || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1157 		  && OMP_CLAUSE_REDUCTION_TASK (c)
1158 		  && is_parallel_ctx (ctx)))
1159 	    {
1160 	      /* Global variables don't need to be copied,
1161 		 the receiver side will use them directly.  */
1162 	      if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1163 		{
1164 		  by_ref = use_pointer_for_field (decl, ctx);
1165 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1166 		    install_var_field (decl, by_ref, 3, ctx);
1167 		}
1168 	      install_var_local (decl, ctx);
1169 	      break;
1170 	    }
1171 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1172 	      && OMP_CLAUSE_REDUCTION_TASK (c))
1173 	    {
1174 	      install_var_local (decl, ctx);
1175 	      break;
1176 	    }
1177 	  goto do_private;
1178 
1179 	case OMP_CLAUSE_LASTPRIVATE:
1180 	  /* Let the corresponding firstprivate clause create
1181 	     the variable.  */
1182 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1183 	    break;
1184 	  /* FALLTHRU */
1185 
1186 	case OMP_CLAUSE_FIRSTPRIVATE:
1187 	case OMP_CLAUSE_LINEAR:
1188 	  decl = OMP_CLAUSE_DECL (c);
1189 	do_private:
1190 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1191 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1192 	      && is_gimple_omp_offloaded (ctx->stmt))
1193 	    {
1194 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1195 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1196 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1197 		install_var_field (decl, true, 3, ctx);
1198 	      else
1199 		install_var_field (decl, false, 3, ctx);
1200 	    }
1201 	  if (is_variable_sized (decl))
1202 	    {
1203 	      if (is_task_ctx (ctx))
1204 		install_var_field (decl, false, 1, ctx);
1205 	      break;
1206 	    }
1207 	  else if (is_taskreg_ctx (ctx))
1208 	    {
1209 	      bool global
1210 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1211 	      by_ref = use_pointer_for_field (decl, NULL);
1212 
1213 	      if (is_task_ctx (ctx)
1214 		  && (global || by_ref || omp_is_reference (decl)))
1215 		{
1216 		  install_var_field (decl, false, 1, ctx);
1217 		  if (!global)
1218 		    install_var_field (decl, by_ref, 2, ctx);
1219 		}
1220 	      else if (!global)
1221 		install_var_field (decl, by_ref, 3, ctx);
1222 	    }
1223 	  install_var_local (decl, ctx);
1224 	  break;
1225 
1226 	case OMP_CLAUSE_USE_DEVICE_PTR:
1227 	  decl = OMP_CLAUSE_DECL (c);
1228 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1229 	    install_var_field (decl, true, 3, ctx);
1230 	  else
1231 	    install_var_field (decl, false, 3, ctx);
1232 	  if (DECL_SIZE (decl)
1233 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1234 	    {
1235 	      tree decl2 = DECL_VALUE_EXPR (decl);
1236 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1237 	      decl2 = TREE_OPERAND (decl2, 0);
1238 	      gcc_assert (DECL_P (decl2));
1239 	      install_var_local (decl2, ctx);
1240 	    }
1241 	  install_var_local (decl, ctx);
1242 	  break;
1243 
1244 	case OMP_CLAUSE_IS_DEVICE_PTR:
1245 	  decl = OMP_CLAUSE_DECL (c);
1246 	  goto do_private;
1247 
1248 	case OMP_CLAUSE__LOOPTEMP_:
1249 	case OMP_CLAUSE__REDUCTEMP_:
1250 	  gcc_assert (is_taskreg_ctx (ctx));
1251 	  decl = OMP_CLAUSE_DECL (c);
1252 	  install_var_field (decl, false, 3, ctx);
1253 	  install_var_local (decl, ctx);
1254 	  break;
1255 
1256 	case OMP_CLAUSE_COPYPRIVATE:
1257 	case OMP_CLAUSE_COPYIN:
1258 	  decl = OMP_CLAUSE_DECL (c);
1259 	  by_ref = use_pointer_for_field (decl, NULL);
1260 	  install_var_field (decl, by_ref, 3, ctx);
1261 	  break;
1262 
1263 	case OMP_CLAUSE_FINAL:
1264 	case OMP_CLAUSE_IF:
1265 	case OMP_CLAUSE_NUM_THREADS:
1266 	case OMP_CLAUSE_NUM_TEAMS:
1267 	case OMP_CLAUSE_THREAD_LIMIT:
1268 	case OMP_CLAUSE_DEVICE:
1269 	case OMP_CLAUSE_SCHEDULE:
1270 	case OMP_CLAUSE_DIST_SCHEDULE:
1271 	case OMP_CLAUSE_DEPEND:
1272 	case OMP_CLAUSE_PRIORITY:
1273 	case OMP_CLAUSE_GRAINSIZE:
1274 	case OMP_CLAUSE_NUM_TASKS:
1275 	case OMP_CLAUSE_NUM_GANGS:
1276 	case OMP_CLAUSE_NUM_WORKERS:
1277 	case OMP_CLAUSE_VECTOR_LENGTH:
1278 	  if (ctx->outer)
1279 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1280 	  break;
1281 
1282 	case OMP_CLAUSE_TO:
1283 	case OMP_CLAUSE_FROM:
1284 	case OMP_CLAUSE_MAP:
1285 	  if (ctx->outer)
1286 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1287 	  decl = OMP_CLAUSE_DECL (c);
1288 	  /* Global variables with "omp declare target" attribute
1289 	     don't need to be copied, the receiver side will use them
1290 	     directly.  However, global variables with "omp declare target link"
1291 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1292 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1293 	      && DECL_P (decl)
1294 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1295 		   && (OMP_CLAUSE_MAP_KIND (c)
1296 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1297 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1298 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1299 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1300 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1301 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1302 	      && varpool_node::get_create (decl)->offloadable
1303 	      && !lookup_attribute ("omp declare target link",
1304 				    DECL_ATTRIBUTES (decl)))
1305 	    break;
1306 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1307 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1308 	    {
1309 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1310 		 not offloaded; there is nothing to map for those.  */
1311 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1312 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1313 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1314 		break;
1315 	    }
1316 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1317 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1318 		  || (OMP_CLAUSE_MAP_KIND (c)
1319 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1320 	    {
1321 	      if (TREE_CODE (decl) == COMPONENT_REF
1322 		  || (TREE_CODE (decl) == INDIRECT_REF
1323 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1324 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1325 			  == REFERENCE_TYPE)))
1326 		break;
1327 	      if (DECL_SIZE (decl)
1328 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1329 		{
1330 		  tree decl2 = DECL_VALUE_EXPR (decl);
1331 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1332 		  decl2 = TREE_OPERAND (decl2, 0);
1333 		  gcc_assert (DECL_P (decl2));
1334 		  install_var_local (decl2, ctx);
1335 		}
1336 	      install_var_local (decl, ctx);
1337 	      break;
1338 	    }
1339 	  if (DECL_P (decl))
1340 	    {
1341 	      if (DECL_SIZE (decl)
1342 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1343 		{
1344 		  tree decl2 = DECL_VALUE_EXPR (decl);
1345 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1346 		  decl2 = TREE_OPERAND (decl2, 0);
1347 		  gcc_assert (DECL_P (decl2));
1348 		  install_var_field (decl2, true, 3, ctx);
1349 		  install_var_local (decl2, ctx);
1350 		  install_var_local (decl, ctx);
1351 		}
1352 	      else
1353 		{
1354 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1355 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1356 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1357 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1358 		    install_var_field (decl, true, 7, ctx);
1359 		  else
1360 		    install_var_field (decl, true, 3, ctx);
1361 		  if (is_gimple_omp_offloaded (ctx->stmt)
1362 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1363 		    install_var_local (decl, ctx);
1364 		}
1365 	    }
1366 	  else
1367 	    {
1368 	      tree base = get_base_address (decl);
1369 	      tree nc = OMP_CLAUSE_CHAIN (c);
1370 	      if (DECL_P (base)
1371 		  && nc != NULL_TREE
1372 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1373 		  && OMP_CLAUSE_DECL (nc) == base
1374 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1375 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1376 		{
1377 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1378 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1379 		}
1380 	      else
1381 		{
1382 		  if (ctx->outer)
1383 		    {
1384 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1385 		      decl = OMP_CLAUSE_DECL (c);
1386 		    }
1387 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1388 						  (splay_tree_key) decl));
1389 		  tree field
1390 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1391 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1392 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1393 		  insert_field_into_struct (ctx->record_type, field);
1394 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1395 				     (splay_tree_value) field);
1396 		}
1397 	    }
1398 	  break;
1399 
1400 	case OMP_CLAUSE__GRIDDIM_:
1401 	  if (ctx->outer)
1402 	    {
1403 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1404 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1405 	    }
1406 	  break;
1407 
1408 	case OMP_CLAUSE_NOWAIT:
1409 	case OMP_CLAUSE_ORDERED:
1410 	case OMP_CLAUSE_COLLAPSE:
1411 	case OMP_CLAUSE_UNTIED:
1412 	case OMP_CLAUSE_MERGEABLE:
1413 	case OMP_CLAUSE_PROC_BIND:
1414 	case OMP_CLAUSE_SAFELEN:
1415 	case OMP_CLAUSE_SIMDLEN:
1416 	case OMP_CLAUSE_THREADS:
1417 	case OMP_CLAUSE_SIMD:
1418 	case OMP_CLAUSE_NOGROUP:
1419 	case OMP_CLAUSE_DEFAULTMAP:
1420 	case OMP_CLAUSE_ASYNC:
1421 	case OMP_CLAUSE_WAIT:
1422 	case OMP_CLAUSE_GANG:
1423 	case OMP_CLAUSE_WORKER:
1424 	case OMP_CLAUSE_VECTOR:
1425 	case OMP_CLAUSE_INDEPENDENT:
1426 	case OMP_CLAUSE_AUTO:
1427 	case OMP_CLAUSE_SEQ:
1428 	case OMP_CLAUSE_TILE:
1429 	case OMP_CLAUSE__SIMT_:
1430 	case OMP_CLAUSE_DEFAULT:
1431 	case OMP_CLAUSE_NONTEMPORAL:
1432 	case OMP_CLAUSE_IF_PRESENT:
1433 	case OMP_CLAUSE_FINALIZE:
1434 	case OMP_CLAUSE_TASK_REDUCTION:
1435 	  break;
1436 
1437 	case OMP_CLAUSE_ALIGNED:
1438 	  decl = OMP_CLAUSE_DECL (c);
1439 	  if (is_global_var (decl)
1440 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1441 	    install_var_local (decl, ctx);
1442 	  break;
1443 
1444 	case OMP_CLAUSE__CACHE_:
1445 	default:
1446 	  gcc_unreachable ();
1447 	}
1448     }
1449 
1450   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1451     {
1452       switch (OMP_CLAUSE_CODE (c))
1453 	{
1454 	case OMP_CLAUSE_LASTPRIVATE:
1455 	  /* Let the corresponding firstprivate clause create
1456 	     the variable.  */
1457 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1458 	    scan_array_reductions = true;
1459 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1460 	    break;
1461 	  /* FALLTHRU */
1462 
1463 	case OMP_CLAUSE_FIRSTPRIVATE:
1464 	case OMP_CLAUSE_PRIVATE:
1465 	case OMP_CLAUSE_LINEAR:
1466 	case OMP_CLAUSE_IS_DEVICE_PTR:
1467 	  decl = OMP_CLAUSE_DECL (c);
1468 	  if (is_variable_sized (decl))
1469 	    {
1470 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1471 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1472 		  && is_gimple_omp_offloaded (ctx->stmt))
1473 		{
1474 		  tree decl2 = DECL_VALUE_EXPR (decl);
1475 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1476 		  decl2 = TREE_OPERAND (decl2, 0);
1477 		  gcc_assert (DECL_P (decl2));
1478 		  install_var_local (decl2, ctx);
1479 		  fixup_remapped_decl (decl2, ctx, false);
1480 		}
1481 	      install_var_local (decl, ctx);
1482 	    }
1483 	  fixup_remapped_decl (decl, ctx,
1484 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1485 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1486 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1487 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1488 	    scan_array_reductions = true;
1489 	  break;
1490 
1491 	case OMP_CLAUSE_REDUCTION:
1492 	case OMP_CLAUSE_IN_REDUCTION:
1493 	  decl = OMP_CLAUSE_DECL (c);
1494 	  if (TREE_CODE (decl) != MEM_REF)
1495 	    {
1496 	      if (is_variable_sized (decl))
1497 		install_var_local (decl, ctx);
1498 	      fixup_remapped_decl (decl, ctx, false);
1499 	    }
1500 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1501 	    scan_array_reductions = true;
1502 	  break;
1503 
1504 	case OMP_CLAUSE_TASK_REDUCTION:
1505 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1506 	    scan_array_reductions = true;
1507 	  break;
1508 
1509 	case OMP_CLAUSE_SHARED:
1510 	  /* Ignore shared directives in teams construct inside of
1511 	     target construct.  */
1512 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1513 	      && !is_host_teams_ctx (ctx))
1514 	    break;
1515 	  decl = OMP_CLAUSE_DECL (c);
1516 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1517 	    break;
1518 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1519 	    {
1520 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1521 								 ctx->outer)))
1522 		break;
1523 	      bool by_ref = use_pointer_for_field (decl, ctx);
1524 	      install_var_field (decl, by_ref, 11, ctx);
1525 	      break;
1526 	    }
1527 	  fixup_remapped_decl (decl, ctx, false);
1528 	  break;
1529 
1530 	case OMP_CLAUSE_MAP:
1531 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1532 	    break;
1533 	  decl = OMP_CLAUSE_DECL (c);
1534 	  if (DECL_P (decl)
1535 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1536 		   && (OMP_CLAUSE_MAP_KIND (c)
1537 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1538 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1539 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1540 	      && varpool_node::get_create (decl)->offloadable)
1541 	    break;
1542 	  if (DECL_P (decl))
1543 	    {
1544 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1545 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1546 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1547 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1548 		{
1549 		  tree new_decl = lookup_decl (decl, ctx);
1550 		  TREE_TYPE (new_decl)
1551 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1552 		}
1553 	      else if (DECL_SIZE (decl)
1554 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1555 		{
1556 		  tree decl2 = DECL_VALUE_EXPR (decl);
1557 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1558 		  decl2 = TREE_OPERAND (decl2, 0);
1559 		  gcc_assert (DECL_P (decl2));
1560 		  fixup_remapped_decl (decl2, ctx, false);
1561 		  fixup_remapped_decl (decl, ctx, true);
1562 		}
1563 	      else
1564 		fixup_remapped_decl (decl, ctx, false);
1565 	    }
1566 	  break;
1567 
1568 	case OMP_CLAUSE_COPYPRIVATE:
1569 	case OMP_CLAUSE_COPYIN:
1570 	case OMP_CLAUSE_DEFAULT:
1571 	case OMP_CLAUSE_IF:
1572 	case OMP_CLAUSE_NUM_THREADS:
1573 	case OMP_CLAUSE_NUM_TEAMS:
1574 	case OMP_CLAUSE_THREAD_LIMIT:
1575 	case OMP_CLAUSE_DEVICE:
1576 	case OMP_CLAUSE_SCHEDULE:
1577 	case OMP_CLAUSE_DIST_SCHEDULE:
1578 	case OMP_CLAUSE_NOWAIT:
1579 	case OMP_CLAUSE_ORDERED:
1580 	case OMP_CLAUSE_COLLAPSE:
1581 	case OMP_CLAUSE_UNTIED:
1582 	case OMP_CLAUSE_FINAL:
1583 	case OMP_CLAUSE_MERGEABLE:
1584 	case OMP_CLAUSE_PROC_BIND:
1585 	case OMP_CLAUSE_SAFELEN:
1586 	case OMP_CLAUSE_SIMDLEN:
1587 	case OMP_CLAUSE_ALIGNED:
1588 	case OMP_CLAUSE_DEPEND:
1589 	case OMP_CLAUSE__LOOPTEMP_:
1590 	case OMP_CLAUSE__REDUCTEMP_:
1591 	case OMP_CLAUSE_TO:
1592 	case OMP_CLAUSE_FROM:
1593 	case OMP_CLAUSE_PRIORITY:
1594 	case OMP_CLAUSE_GRAINSIZE:
1595 	case OMP_CLAUSE_NUM_TASKS:
1596 	case OMP_CLAUSE_THREADS:
1597 	case OMP_CLAUSE_SIMD:
1598 	case OMP_CLAUSE_NOGROUP:
1599 	case OMP_CLAUSE_DEFAULTMAP:
1600 	case OMP_CLAUSE_USE_DEVICE_PTR:
1601 	case OMP_CLAUSE_NONTEMPORAL:
1602 	case OMP_CLAUSE_ASYNC:
1603 	case OMP_CLAUSE_WAIT:
1604 	case OMP_CLAUSE_NUM_GANGS:
1605 	case OMP_CLAUSE_NUM_WORKERS:
1606 	case OMP_CLAUSE_VECTOR_LENGTH:
1607 	case OMP_CLAUSE_GANG:
1608 	case OMP_CLAUSE_WORKER:
1609 	case OMP_CLAUSE_VECTOR:
1610 	case OMP_CLAUSE_INDEPENDENT:
1611 	case OMP_CLAUSE_AUTO:
1612 	case OMP_CLAUSE_SEQ:
1613 	case OMP_CLAUSE_TILE:
1614 	case OMP_CLAUSE__GRIDDIM_:
1615 	case OMP_CLAUSE__SIMT_:
1616 	case OMP_CLAUSE_IF_PRESENT:
1617 	case OMP_CLAUSE_FINALIZE:
1618 	  break;
1619 
1620 	case OMP_CLAUSE__CACHE_:
1621 	default:
1622 	  gcc_unreachable ();
1623 	}
1624     }
1625 
1626   gcc_checking_assert (!scan_array_reductions
1627 		       || !is_gimple_omp_oacc (ctx->stmt));
1628   if (scan_array_reductions)
1629     {
1630       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1631 	if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1632 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1633 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1634 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1635 	  {
1636 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1637 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1638 	  }
1639 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1640 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1641 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1642 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1643 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1644 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1645     }
1646 }
1647 
1648 /* Create a new name for omp child function.  Returns an identifier. */
1649 
1650 static tree
1651 create_omp_child_function_name (bool task_copy)
1652 {
1653   return clone_function_name_numbered (current_function_decl,
1654 				       task_copy ? "_omp_cpyfn" : "_omp_fn");
1655 }
1656 
1657 /* Return true if CTX may belong to offloaded code: either if current function
1658    is offloaded, or any enclosing context corresponds to a target region.  */
1659 
1660 static bool
1661 omp_maybe_offloaded_ctx (omp_context *ctx)
1662 {
1663   if (cgraph_node::get (current_function_decl)->offloadable)
1664     return true;
1665   for (; ctx; ctx = ctx->outer)
1666     if (is_gimple_omp_offloaded (ctx->stmt))
1667       return true;
1668   return false;
1669 }
1670 
1671 /* Build a decl for the omp child function.  It'll not contain a body
1672    yet, just the bare decl.  */
1673 
1674 static void
1675 create_omp_child_function (omp_context *ctx, bool task_copy)
1676 {
1677   tree decl, type, name, t;
1678 
1679   name = create_omp_child_function_name (task_copy);
1680   if (task_copy)
1681     type = build_function_type_list (void_type_node, ptr_type_node,
1682 				     ptr_type_node, NULL_TREE);
1683   else
1684     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1685 
1686   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1687 
1688   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1689 		       || !task_copy);
1690   if (!task_copy)
1691     ctx->cb.dst_fn = decl;
1692   else
1693     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1694 
1695   TREE_STATIC (decl) = 1;
1696   TREE_USED (decl) = 1;
1697   DECL_ARTIFICIAL (decl) = 1;
1698   DECL_IGNORED_P (decl) = 0;
1699   TREE_PUBLIC (decl) = 0;
1700   DECL_UNINLINABLE (decl) = 1;
1701   DECL_EXTERNAL (decl) = 0;
1702   DECL_CONTEXT (decl) = NULL_TREE;
1703   DECL_INITIAL (decl) = make_node (BLOCK);
1704   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1705   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1706   /* Remove omp declare simd attribute from the new attributes.  */
1707   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1708     {
1709       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1710 	a = a2;
1711       a = TREE_CHAIN (a);
1712       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1713 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1714 	  *p = TREE_CHAIN (*p);
1715 	else
1716 	  {
1717 	    tree chain = TREE_CHAIN (*p);
1718 	    *p = copy_node (*p);
1719 	    p = &TREE_CHAIN (*p);
1720 	    *p = chain;
1721 	  }
1722     }
1723   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1724     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1725   DECL_FUNCTION_SPECIFIC_TARGET (decl)
1726     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1727   DECL_FUNCTION_VERSIONED (decl)
1728     = DECL_FUNCTION_VERSIONED (current_function_decl);
1729 
1730   if (omp_maybe_offloaded_ctx (ctx))
1731     {
1732       cgraph_node::get_create (decl)->offloadable = 1;
1733       if (ENABLE_OFFLOADING)
1734 	g->have_offload = true;
1735     }
1736 
1737   if (cgraph_node::get_create (decl)->offloadable
1738       && !lookup_attribute ("omp declare target",
1739                            DECL_ATTRIBUTES (current_function_decl)))
1740     {
1741       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1742 				 ? "omp target entrypoint"
1743 				 : "omp declare target");
1744       DECL_ATTRIBUTES (decl)
1745 	= tree_cons (get_identifier (target_attr),
1746 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1747     }
1748 
1749   t = build_decl (DECL_SOURCE_LOCATION (decl),
1750 		  RESULT_DECL, NULL_TREE, void_type_node);
1751   DECL_ARTIFICIAL (t) = 1;
1752   DECL_IGNORED_P (t) = 1;
1753   DECL_CONTEXT (t) = decl;
1754   DECL_RESULT (decl) = t;
1755 
1756   tree data_name = get_identifier (".omp_data_i");
1757   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1758 		  ptr_type_node);
1759   DECL_ARTIFICIAL (t) = 1;
1760   DECL_NAMELESS (t) = 1;
1761   DECL_ARG_TYPE (t) = ptr_type_node;
1762   DECL_CONTEXT (t) = current_function_decl;
1763   TREE_USED (t) = 1;
1764   TREE_READONLY (t) = 1;
1765   DECL_ARGUMENTS (decl) = t;
1766   if (!task_copy)
1767     ctx->receiver_decl = t;
1768   else
1769     {
1770       t = build_decl (DECL_SOURCE_LOCATION (decl),
1771 		      PARM_DECL, get_identifier (".omp_data_o"),
1772 		      ptr_type_node);
1773       DECL_ARTIFICIAL (t) = 1;
1774       DECL_NAMELESS (t) = 1;
1775       DECL_ARG_TYPE (t) = ptr_type_node;
1776       DECL_CONTEXT (t) = current_function_decl;
1777       TREE_USED (t) = 1;
1778       TREE_ADDRESSABLE (t) = 1;
1779       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1780       DECL_ARGUMENTS (decl) = t;
1781     }
1782 
1783   /* Allocate memory for the function structure.  The call to
1784      allocate_struct_function clobbers CFUN, so we need to restore
1785      it afterward.  */
1786   push_struct_function (decl);
1787   cfun->function_end_locus = gimple_location (ctx->stmt);
1788   init_tree_ssa (cfun);
1789   pop_cfun ();
1790 }
1791 
1792 /* Callback for walk_gimple_seq.  Check if combined parallel
1793    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1794 
1795 tree
1796 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1797 		       bool *handled_ops_p,
1798 		       struct walk_stmt_info *wi)
1799 {
1800   gimple *stmt = gsi_stmt (*gsi_p);
1801 
1802   *handled_ops_p = true;
1803   switch (gimple_code (stmt))
1804     {
1805     WALK_SUBSTMTS;
1806 
1807     case GIMPLE_OMP_FOR:
1808       if (gimple_omp_for_combined_into_p (stmt)
1809 	  && gimple_omp_for_kind (stmt)
1810 	     == *(const enum gf_mask *) (wi->info))
1811 	{
1812 	  wi->info = stmt;
1813 	  return integer_zero_node;
1814 	}
1815       break;
1816     default:
1817       break;
1818     }
1819   return NULL;
1820 }
1821 
1822 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task.  */
1823 
1824 static void
1825 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1826 			      omp_context *outer_ctx)
1827 {
1828   struct walk_stmt_info wi;
1829 
1830   memset (&wi, 0, sizeof (wi));
1831   wi.val_only = true;
1832   wi.info = (void *) &msk;
1833   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1834   if (wi.info != (void *) &msk)
1835     {
1836       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1837       struct omp_for_data fd;
1838       omp_extract_for_data (for_stmt, &fd, NULL);
1839       /* We need two temporaries with fd.loop.v type (istart/iend)
1840 	 and then (fd.collapse - 1) temporaries with the same
1841 	 type for count2 ... countN-1 vars if not constant.  */
1842       size_t count = 2, i;
1843       tree type = fd.iter_type;
1844       if (fd.collapse > 1
1845 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1846 	{
1847 	  count += fd.collapse - 1;
1848 	  /* If there are lastprivate clauses on the inner
1849 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1850 	     of iterations (product of count1 ... countN-1).  */
1851 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1852 			       OMP_CLAUSE_LASTPRIVATE))
1853 	    count++;
1854 	  else if (msk == GF_OMP_FOR_KIND_FOR
1855 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1856 				       OMP_CLAUSE_LASTPRIVATE))
1857 	    count++;
1858 	}
1859       for (i = 0; i < count; i++)
1860 	{
1861 	  tree temp = create_tmp_var (type);
1862 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1863 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1864 	  OMP_CLAUSE_DECL (c) = temp;
1865 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1866 	  gimple_omp_taskreg_set_clauses (stmt, c);
1867 	}
1868     }
1869   if (msk == GF_OMP_FOR_KIND_TASKLOOP
1870       && omp_find_clause (gimple_omp_task_clauses (stmt),
1871 			  OMP_CLAUSE_REDUCTION))
1872     {
1873       tree type = build_pointer_type (pointer_sized_int_node);
1874       tree temp = create_tmp_var (type);
1875       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1876       insert_decl_map (&outer_ctx->cb, temp, temp);
1877       OMP_CLAUSE_DECL (c) = temp;
1878       OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1879       gimple_omp_task_set_clauses (stmt, c);
1880     }
1881 }
1882 
1883 /* Scan an OpenMP parallel directive.  */
1884 
1885 static void
1886 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1887 {
1888   omp_context *ctx;
1889   tree name;
1890   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1891 
1892   /* Ignore parallel directives with empty bodies, unless there
1893      are copyin clauses.  */
1894   if (optimize > 0
1895       && empty_body_p (gimple_omp_body (stmt))
1896       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1897 			  OMP_CLAUSE_COPYIN) == NULL)
1898     {
1899       gsi_replace (gsi, gimple_build_nop (), false);
1900       return;
1901     }
1902 
1903   if (gimple_omp_parallel_combined_p (stmt))
1904     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1905   for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1906 				 OMP_CLAUSE_REDUCTION);
1907        c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
1908     if (OMP_CLAUSE_REDUCTION_TASK (c))
1909       {
1910 	tree type = build_pointer_type (pointer_sized_int_node);
1911 	tree temp = create_tmp_var (type);
1912 	tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1913 	if (outer_ctx)
1914 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1915 	OMP_CLAUSE_DECL (c) = temp;
1916 	OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
1917 	gimple_omp_parallel_set_clauses (stmt, c);
1918 	break;
1919       }
1920     else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
1921       break;
1922 
1923   ctx = new_omp_context (stmt, outer_ctx);
1924   taskreg_contexts.safe_push (ctx);
1925   if (taskreg_nesting_level > 1)
1926     ctx->is_nested = true;
1927   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1928   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1929   name = create_tmp_var_name (".omp_data_s");
1930   name = build_decl (gimple_location (stmt),
1931 		     TYPE_DECL, name, ctx->record_type);
1932   DECL_ARTIFICIAL (name) = 1;
1933   DECL_NAMELESS (name) = 1;
1934   TYPE_NAME (ctx->record_type) = name;
1935   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1936   if (!gimple_omp_parallel_grid_phony (stmt))
1937     {
1938       create_omp_child_function (ctx, false);
1939       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1940     }
1941 
1942   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1943   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1944 
1945   if (TYPE_FIELDS (ctx->record_type) == NULL)
1946     ctx->record_type = ctx->receiver_decl = NULL;
1947 }
1948 
1949 /* Scan an OpenMP task directive.  */
1950 
1951 static void
1952 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1953 {
1954   omp_context *ctx;
1955   tree name, t;
1956   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1957 
1958   /* Ignore task directives with empty bodies, unless they have depend
1959      clause.  */
1960   if (optimize > 0
1961       && gimple_omp_body (stmt)
1962       && empty_body_p (gimple_omp_body (stmt))
1963       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1964     {
1965       gsi_replace (gsi, gimple_build_nop (), false);
1966       return;
1967     }
1968 
1969   if (gimple_omp_task_taskloop_p (stmt))
1970     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1971 
1972   ctx = new_omp_context (stmt, outer_ctx);
1973 
1974   if (gimple_omp_task_taskwait_p (stmt))
1975     {
1976       scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1977       return;
1978     }
1979 
1980   taskreg_contexts.safe_push (ctx);
1981   if (taskreg_nesting_level > 1)
1982     ctx->is_nested = true;
1983   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1984   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1985   name = create_tmp_var_name (".omp_data_s");
1986   name = build_decl (gimple_location (stmt),
1987 		     TYPE_DECL, name, ctx->record_type);
1988   DECL_ARTIFICIAL (name) = 1;
1989   DECL_NAMELESS (name) = 1;
1990   TYPE_NAME (ctx->record_type) = name;
1991   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1992   create_omp_child_function (ctx, false);
1993   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1994 
1995   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1996 
1997   if (ctx->srecord_type)
1998     {
1999       name = create_tmp_var_name (".omp_data_a");
2000       name = build_decl (gimple_location (stmt),
2001 			 TYPE_DECL, name, ctx->srecord_type);
2002       DECL_ARTIFICIAL (name) = 1;
2003       DECL_NAMELESS (name) = 1;
2004       TYPE_NAME (ctx->srecord_type) = name;
2005       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2006       create_omp_child_function (ctx, true);
2007     }
2008 
2009   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2010 
2011   if (TYPE_FIELDS (ctx->record_type) == NULL)
2012     {
2013       ctx->record_type = ctx->receiver_decl = NULL;
2014       t = build_int_cst (long_integer_type_node, 0);
2015       gimple_omp_task_set_arg_size (stmt, t);
2016       t = build_int_cst (long_integer_type_node, 1);
2017       gimple_omp_task_set_arg_align (stmt, t);
2018     }
2019 }
2020 
2021 /* Helper function for finish_taskreg_scan, called through walk_tree.
2022    If maybe_lookup_decl_in_outer_context returns non-NULL for some
2023    tree, replace it in the expression.  */
2024 
2025 static tree
2026 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2027 {
2028   if (VAR_P (*tp))
2029     {
2030       omp_context *ctx = (omp_context *) data;
2031       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2032       if (t != *tp)
2033 	{
2034 	  if (DECL_HAS_VALUE_EXPR_P (t))
2035 	    t = unshare_expr (DECL_VALUE_EXPR (t));
2036 	  *tp = t;
2037 	}
2038       *walk_subtrees = 0;
2039     }
2040   else if (IS_TYPE_OR_DECL_P (*tp))
2041     *walk_subtrees = 0;
2042   return NULL_TREE;
2043 }
2044 
2045 /* If any decls have been made addressable during scan_omp,
2046    adjust their fields if needed, and layout record types
2047    of parallel/task constructs.  */
2048 
2049 static void
2050 finish_taskreg_scan (omp_context *ctx)
2051 {
2052   if (ctx->record_type == NULL_TREE)
2053     return;
2054 
2055   /* If any task_shared_vars were needed, verify all
2056      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2057      statements if use_pointer_for_field hasn't changed
2058      because of that.  If it did, update field types now.  */
2059   if (task_shared_vars)
2060     {
2061       tree c;
2062 
2063       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2064 	   c; c = OMP_CLAUSE_CHAIN (c))
2065 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2066 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2067 	  {
2068 	    tree decl = OMP_CLAUSE_DECL (c);
2069 
2070 	    /* Global variables don't need to be copied,
2071 	       the receiver side will use them directly.  */
2072 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2073 	      continue;
2074 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2075 		|| !use_pointer_for_field (decl, ctx))
2076 	      continue;
2077 	    tree field = lookup_field (decl, ctx);
2078 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2079 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2080 	      continue;
2081 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2082 	    TREE_THIS_VOLATILE (field) = 0;
2083 	    DECL_USER_ALIGN (field) = 0;
2084 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2085 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2086 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2087 	    if (ctx->srecord_type)
2088 	      {
2089 		tree sfield = lookup_sfield (decl, ctx);
2090 		TREE_TYPE (sfield) = TREE_TYPE (field);
2091 		TREE_THIS_VOLATILE (sfield) = 0;
2092 		DECL_USER_ALIGN (sfield) = 0;
2093 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2094 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2095 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2096 	      }
2097 	  }
2098     }
2099 
2100   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2101     {
2102       tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2103       tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2104       if (c)
2105 	{
2106 	  /* Move the _reductemp_ clause first.  GOMP_parallel_reductions
2107 	     expects to find it at the start of data.  */
2108 	  tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2109 	  tree *p = &TYPE_FIELDS (ctx->record_type);
2110 	  while (*p)
2111 	    if (*p == f)
2112 	      {
2113 		*p = DECL_CHAIN (*p);
2114 		break;
2115 	      }
2116 	    else
2117 	      p = &DECL_CHAIN (*p);
2118 	  DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2119 	  TYPE_FIELDS (ctx->record_type) = f;
2120 	}
2121       layout_type (ctx->record_type);
2122       fixup_child_record_type (ctx);
2123     }
2124   else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2125     {
2126       layout_type (ctx->record_type);
2127       fixup_child_record_type (ctx);
2128     }
2129   else
2130     {
2131       location_t loc = gimple_location (ctx->stmt);
2132       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2133       /* Move VLA fields to the end.  */
2134       p = &TYPE_FIELDS (ctx->record_type);
2135       while (*p)
2136 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2137 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2138 	  {
2139 	    *q = *p;
2140 	    *p = TREE_CHAIN (*p);
2141 	    TREE_CHAIN (*q) = NULL_TREE;
2142 	    q = &TREE_CHAIN (*q);
2143 	  }
2144 	else
2145 	  p = &DECL_CHAIN (*p);
2146       *p = vla_fields;
2147       if (gimple_omp_task_taskloop_p (ctx->stmt))
2148 	{
2149 	  /* Move fields corresponding to first and second _looptemp_
2150 	     clause first.  There are filled by GOMP_taskloop
2151 	     and thus need to be in specific positions.  */
2152 	  tree clauses = gimple_omp_task_clauses (ctx->stmt);
2153 	  tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2154 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2155 				     OMP_CLAUSE__LOOPTEMP_);
2156 	  tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2157 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2158 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2159 	  tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2160 	  p = &TYPE_FIELDS (ctx->record_type);
2161 	  while (*p)
2162 	    if (*p == f1 || *p == f2 || *p == f3)
2163 	      *p = DECL_CHAIN (*p);
2164 	    else
2165 	      p = &DECL_CHAIN (*p);
2166 	  DECL_CHAIN (f1) = f2;
2167 	  if (c3)
2168 	    {
2169 	      DECL_CHAIN (f2) = f3;
2170 	      DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2171 	    }
2172 	  else
2173 	    DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2174 	  TYPE_FIELDS (ctx->record_type) = f1;
2175 	  if (ctx->srecord_type)
2176 	    {
2177 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2178 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2179 	      if (c3)
2180 		f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2181 	      p = &TYPE_FIELDS (ctx->srecord_type);
2182 	      while (*p)
2183 		if (*p == f1 || *p == f2 || *p == f3)
2184 		  *p = DECL_CHAIN (*p);
2185 		else
2186 		  p = &DECL_CHAIN (*p);
2187 	      DECL_CHAIN (f1) = f2;
2188 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2189 	      if (c3)
2190 		{
2191 		  DECL_CHAIN (f2) = f3;
2192 		  DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2193 		}
2194 	      else
2195 		DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2196 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2197 	    }
2198 	}
2199       layout_type (ctx->record_type);
2200       fixup_child_record_type (ctx);
2201       if (ctx->srecord_type)
2202 	layout_type (ctx->srecord_type);
2203       tree t = fold_convert_loc (loc, long_integer_type_node,
2204 				 TYPE_SIZE_UNIT (ctx->record_type));
2205       if (TREE_CODE (t) != INTEGER_CST)
2206 	{
2207 	  t = unshare_expr (t);
2208 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2209 	}
2210       gimple_omp_task_set_arg_size (ctx->stmt, t);
2211       t = build_int_cst (long_integer_type_node,
2212 			 TYPE_ALIGN_UNIT (ctx->record_type));
2213       gimple_omp_task_set_arg_align (ctx->stmt, t);
2214     }
2215 }
2216 
2217 /* Find the enclosing offload context.  */
2218 
2219 static omp_context *
2220 enclosing_target_ctx (omp_context *ctx)
2221 {
2222   for (; ctx; ctx = ctx->outer)
2223     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2224       break;
2225 
2226   return ctx;
2227 }
2228 
2229 /* Return true if ctx is part of an oacc kernels region.  */
2230 
2231 static bool
2232 ctx_in_oacc_kernels_region (omp_context *ctx)
2233 {
2234   for (;ctx != NULL; ctx = ctx->outer)
2235     {
2236       gimple *stmt = ctx->stmt;
2237       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2238 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2239 	return true;
2240     }
2241 
2242   return false;
2243 }
2244 
2245 /* Check the parallelism clauses inside a kernels regions.
2246    Until kernels handling moves to use the same loop indirection
2247    scheme as parallel, we need to do this checking early.  */
2248 
2249 static unsigned
2250 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2251 {
2252   bool checking = true;
2253   unsigned outer_mask = 0;
2254   unsigned this_mask = 0;
2255   bool has_seq = false, has_auto = false;
2256 
2257   if (ctx->outer)
2258     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2259   if (!stmt)
2260     {
2261       checking = false;
2262       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2263 	return outer_mask;
2264       stmt = as_a <gomp_for *> (ctx->stmt);
2265     }
2266 
2267   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2268     {
2269       switch (OMP_CLAUSE_CODE (c))
2270 	{
2271 	case OMP_CLAUSE_GANG:
2272 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2273 	  break;
2274 	case OMP_CLAUSE_WORKER:
2275 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2276 	  break;
2277 	case OMP_CLAUSE_VECTOR:
2278 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2279 	  break;
2280 	case OMP_CLAUSE_SEQ:
2281 	  has_seq = true;
2282 	  break;
2283 	case OMP_CLAUSE_AUTO:
2284 	  has_auto = true;
2285 	  break;
2286 	default:
2287 	  break;
2288 	}
2289     }
2290 
2291   if (checking)
2292     {
2293       if (has_seq && (this_mask || has_auto))
2294 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2295 		  " OpenACC loop specifiers");
2296       else if (has_auto && this_mask)
2297 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2298 		  " OpenACC loop specifiers");
2299 
2300       if (this_mask & outer_mask)
2301 	error_at (gimple_location (stmt), "inner loop uses same"
2302 		  " OpenACC parallelism as containing loop");
2303     }
2304 
2305   return outer_mask | this_mask;
2306 }
2307 
2308 /* Scan a GIMPLE_OMP_FOR.  */
2309 
2310 static omp_context *
2311 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2312 {
2313   omp_context *ctx;
2314   size_t i;
2315   tree clauses = gimple_omp_for_clauses (stmt);
2316 
2317   ctx = new_omp_context (stmt, outer_ctx);
2318 
2319   if (is_gimple_omp_oacc (stmt))
2320     {
2321       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2322 
2323       if (!tgt || is_oacc_parallel (tgt))
2324 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2325 	  {
2326 	    char const *check = NULL;
2327 
2328 	    switch (OMP_CLAUSE_CODE (c))
2329 	      {
2330 	      case OMP_CLAUSE_GANG:
2331 		check = "gang";
2332 		break;
2333 
2334 	      case OMP_CLAUSE_WORKER:
2335 		check = "worker";
2336 		break;
2337 
2338 	      case OMP_CLAUSE_VECTOR:
2339 		check = "vector";
2340 		break;
2341 
2342 	      default:
2343 		break;
2344 	      }
2345 
2346 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2347 	      error_at (gimple_location (stmt),
2348 			"argument not permitted on %qs clause in"
2349 			" OpenACC %<parallel%>", check);
2350 	  }
2351 
2352       if (tgt && is_oacc_kernels (tgt))
2353 	{
2354 	  /* Strip out reductions, as they are not handled yet.  */
2355 	  tree *prev_ptr = &clauses;
2356 
2357 	  while (tree probe = *prev_ptr)
2358 	    {
2359 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2360 
2361 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2362 		*prev_ptr = *next_ptr;
2363 	      else
2364 		prev_ptr = next_ptr;
2365 	    }
2366 
2367 	  gimple_omp_for_set_clauses (stmt, clauses);
2368 	  check_oacc_kernel_gwv (stmt, ctx);
2369 	}
2370     }
2371 
2372   scan_sharing_clauses (clauses, ctx);
2373 
2374   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2375   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2376     {
2377       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2378       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2379       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2380       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2381     }
2382   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2383   return ctx;
2384 }
2385 
2386 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2387 
2388 static void
2389 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2390 	       omp_context *outer_ctx)
2391 {
2392   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2393   gsi_replace (gsi, bind, false);
2394   gimple_seq seq = NULL;
2395   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2396   tree cond = create_tmp_var_raw (integer_type_node);
2397   DECL_CONTEXT (cond) = current_function_decl;
2398   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2399   gimple_bind_set_vars (bind, cond);
2400   gimple_call_set_lhs (g, cond);
2401   gimple_seq_add_stmt (&seq, g);
2402   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2403   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2404   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2405   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2406   gimple_seq_add_stmt (&seq, g);
2407   g = gimple_build_label (lab1);
2408   gimple_seq_add_stmt (&seq, g);
2409   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2410   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2411   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2412   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2413   gimple_omp_for_set_clauses (new_stmt, clause);
2414   gimple_seq_add_stmt (&seq, new_stmt);
2415   g = gimple_build_goto (lab3);
2416   gimple_seq_add_stmt (&seq, g);
2417   g = gimple_build_label (lab2);
2418   gimple_seq_add_stmt (&seq, g);
2419   gimple_seq_add_stmt (&seq, stmt);
2420   g = gimple_build_label (lab3);
2421   gimple_seq_add_stmt (&seq, g);
2422   gimple_bind_set_body (bind, seq);
2423   update_stmt (bind);
2424   scan_omp_for (new_stmt, outer_ctx);
2425   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2426 }
2427 
2428 /* Scan an OpenMP sections directive.  */
2429 
2430 static void
2431 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2432 {
2433   omp_context *ctx;
2434 
2435   ctx = new_omp_context (stmt, outer_ctx);
2436   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2437   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2438 }
2439 
2440 /* Scan an OpenMP single directive.  */
2441 
2442 static void
2443 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2444 {
2445   omp_context *ctx;
2446   tree name;
2447 
2448   ctx = new_omp_context (stmt, outer_ctx);
2449   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2450   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2451   name = create_tmp_var_name (".omp_copy_s");
2452   name = build_decl (gimple_location (stmt),
2453 		     TYPE_DECL, name, ctx->record_type);
2454   TYPE_NAME (ctx->record_type) = name;
2455 
2456   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2457   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2458 
2459   if (TYPE_FIELDS (ctx->record_type) == NULL)
2460     ctx->record_type = NULL;
2461   else
2462     layout_type (ctx->record_type);
2463 }
2464 
2465 /* Scan a GIMPLE_OMP_TARGET.  */
2466 
2467 static void
2468 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2469 {
2470   omp_context *ctx;
2471   tree name;
2472   bool offloaded = is_gimple_omp_offloaded (stmt);
2473   tree clauses = gimple_omp_target_clauses (stmt);
2474 
2475   ctx = new_omp_context (stmt, outer_ctx);
2476   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2477   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2478   name = create_tmp_var_name (".omp_data_t");
2479   name = build_decl (gimple_location (stmt),
2480 		     TYPE_DECL, name, ctx->record_type);
2481   DECL_ARTIFICIAL (name) = 1;
2482   DECL_NAMELESS (name) = 1;
2483   TYPE_NAME (ctx->record_type) = name;
2484   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2485 
2486   if (offloaded)
2487     {
2488       create_omp_child_function (ctx, false);
2489       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2490     }
2491 
2492   scan_sharing_clauses (clauses, ctx);
2493   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2494 
2495   if (TYPE_FIELDS (ctx->record_type) == NULL)
2496     ctx->record_type = ctx->receiver_decl = NULL;
2497   else
2498     {
2499       TYPE_FIELDS (ctx->record_type)
2500 	= nreverse (TYPE_FIELDS (ctx->record_type));
2501       if (flag_checking)
2502 	{
2503 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2504 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2505 	       field;
2506 	       field = DECL_CHAIN (field))
2507 	    gcc_assert (DECL_ALIGN (field) == align);
2508 	}
2509       layout_type (ctx->record_type);
2510       if (offloaded)
2511 	fixup_child_record_type (ctx);
2512     }
2513 }
2514 
2515 /* Scan an OpenMP teams directive.  */
2516 
2517 static void
2518 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2519 {
2520   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2521 
2522   if (!gimple_omp_teams_host (stmt))
2523     {
2524       scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2525       scan_omp (gimple_omp_body_ptr (stmt), ctx);
2526       return;
2527     }
2528   taskreg_contexts.safe_push (ctx);
2529   gcc_assert (taskreg_nesting_level == 1);
2530   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2531   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2532   tree name = create_tmp_var_name (".omp_data_s");
2533   name = build_decl (gimple_location (stmt),
2534 		     TYPE_DECL, name, ctx->record_type);
2535   DECL_ARTIFICIAL (name) = 1;
2536   DECL_NAMELESS (name) = 1;
2537   TYPE_NAME (ctx->record_type) = name;
2538   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2539   create_omp_child_function (ctx, false);
2540   gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2541 
2542   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2543   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2544 
2545   if (TYPE_FIELDS (ctx->record_type) == NULL)
2546     ctx->record_type = ctx->receiver_decl = NULL;
2547 }
2548 
2549 /* Check nesting restrictions.  */
2550 static bool
2551 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2552 {
2553   tree c;
2554 
2555   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2556     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2557        the original copy of its contents.  */
2558     return true;
2559 
2560   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2561      inside an OpenACC CTX.  */
2562   if (!(is_gimple_omp (stmt)
2563 	&& is_gimple_omp_oacc (stmt))
2564       /* Except for atomic codes that we share with OpenMP.  */
2565       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2566 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2567     {
2568       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2569 	{
2570 	  error_at (gimple_location (stmt),
2571 		    "non-OpenACC construct inside of OpenACC routine");
2572 	  return false;
2573 	}
2574       else
2575 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2576 	  if (is_gimple_omp (octx->stmt)
2577 	      && is_gimple_omp_oacc (octx->stmt))
2578 	    {
2579 	      error_at (gimple_location (stmt),
2580 			"non-OpenACC construct inside of OpenACC region");
2581 	      return false;
2582 	    }
2583     }
2584 
2585   if (ctx != NULL)
2586     {
2587       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2588 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2589 	{
2590 	  c = NULL_TREE;
2591 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2592 	    {
2593 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2594 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2595 		{
2596 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2597 		      && (ctx->outer == NULL
2598 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2599 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2600 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2601 			      != GF_OMP_FOR_KIND_FOR)
2602 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2603 		    {
2604 		      error_at (gimple_location (stmt),
2605 				"%<ordered simd threads%> must be closely "
2606 				"nested inside of %<for simd%> region");
2607 		      return false;
2608 		    }
2609 		  return true;
2610 		}
2611 	    }
2612 	  else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2613 		   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
2614 	    return true;
2615 	  error_at (gimple_location (stmt),
2616 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2617 		    " or %<#pragma omp atomic%> may not be nested inside"
2618 		    " %<simd%> region");
2619 	  return false;
2620 	}
2621       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2622 	{
2623 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2624 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2625 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2626 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2627 	    {
2628 	      error_at (gimple_location (stmt),
2629 			"only %<distribute%> or %<parallel%> regions are "
2630 			"allowed to be strictly nested inside %<teams%> "
2631 			"region");
2632 	      return false;
2633 	    }
2634 	}
2635     }
2636   switch (gimple_code (stmt))
2637     {
2638     case GIMPLE_OMP_FOR:
2639       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2640 	return true;
2641       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2642 	{
2643 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2644 	    {
2645 	      error_at (gimple_location (stmt),
2646 			"%<distribute%> region must be strictly nested "
2647 			"inside %<teams%> construct");
2648 	      return false;
2649 	    }
2650 	  return true;
2651 	}
2652       /* We split taskloop into task and nested taskloop in it.  */
2653       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2654 	return true;
2655       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2656 	{
2657 	  bool ok = false;
2658 
2659 	  if (ctx)
2660 	    switch (gimple_code (ctx->stmt))
2661 	      {
2662 	      case GIMPLE_OMP_FOR:
2663 		ok = (gimple_omp_for_kind (ctx->stmt)
2664 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2665 		break;
2666 
2667 	      case GIMPLE_OMP_TARGET:
2668 		switch (gimple_omp_target_kind (ctx->stmt))
2669 		  {
2670 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2671 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2672 		    ok = true;
2673 		    break;
2674 
2675 		  default:
2676 		    break;
2677 		  }
2678 
2679 	      default:
2680 		break;
2681 	      }
2682 	  else if (oacc_get_fn_attrib (current_function_decl))
2683 	    ok = true;
2684 	  if (!ok)
2685 	    {
2686 	      error_at (gimple_location (stmt),
2687 			"OpenACC loop directive must be associated with"
2688 			" an OpenACC compute region");
2689 	      return false;
2690 	    }
2691 	}
2692       /* FALLTHRU */
2693     case GIMPLE_CALL:
2694       if (is_gimple_call (stmt)
2695 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2696 	      == BUILT_IN_GOMP_CANCEL
2697 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2698 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2699 	{
2700 	  const char *bad = NULL;
2701 	  const char *kind = NULL;
2702 	  const char *construct
2703 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2704 	       == BUILT_IN_GOMP_CANCEL)
2705 	      ? "#pragma omp cancel"
2706 	      : "#pragma omp cancellation point";
2707 	  if (ctx == NULL)
2708 	    {
2709 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2710 			construct);
2711 	      return false;
2712 	    }
2713 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2714 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2715 		  : 0)
2716 	    {
2717 	    case 1:
2718 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2719 		bad = "#pragma omp parallel";
2720 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2721 		       == BUILT_IN_GOMP_CANCEL
2722 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2723 		ctx->cancellable = true;
2724 	      kind = "parallel";
2725 	      break;
2726 	    case 2:
2727 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2728 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2729 		bad = "#pragma omp for";
2730 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2731 		       == BUILT_IN_GOMP_CANCEL
2732 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2733 		{
2734 		  ctx->cancellable = true;
2735 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2736 				       OMP_CLAUSE_NOWAIT))
2737 		    warning_at (gimple_location (stmt), 0,
2738 				"%<#pragma omp cancel for%> inside "
2739 				"%<nowait%> for construct");
2740 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2741 				       OMP_CLAUSE_ORDERED))
2742 		    warning_at (gimple_location (stmt), 0,
2743 				"%<#pragma omp cancel for%> inside "
2744 				"%<ordered%> for construct");
2745 		}
2746 	      kind = "for";
2747 	      break;
2748 	    case 4:
2749 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2750 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2751 		bad = "#pragma omp sections";
2752 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2753 		       == BUILT_IN_GOMP_CANCEL
2754 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2755 		{
2756 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2757 		    {
2758 		      ctx->cancellable = true;
2759 		      if (omp_find_clause (gimple_omp_sections_clauses
2760 								(ctx->stmt),
2761 					   OMP_CLAUSE_NOWAIT))
2762 			warning_at (gimple_location (stmt), 0,
2763 				    "%<#pragma omp cancel sections%> inside "
2764 				    "%<nowait%> sections construct");
2765 		    }
2766 		  else
2767 		    {
2768 		      gcc_assert (ctx->outer
2769 				  && gimple_code (ctx->outer->stmt)
2770 				     == GIMPLE_OMP_SECTIONS);
2771 		      ctx->outer->cancellable = true;
2772 		      if (omp_find_clause (gimple_omp_sections_clauses
2773 							(ctx->outer->stmt),
2774 					   OMP_CLAUSE_NOWAIT))
2775 			warning_at (gimple_location (stmt), 0,
2776 				    "%<#pragma omp cancel sections%> inside "
2777 				    "%<nowait%> sections construct");
2778 		    }
2779 		}
2780 	      kind = "sections";
2781 	      break;
2782 	    case 8:
2783 	      if (!is_task_ctx (ctx)
2784 		  && (!is_taskloop_ctx (ctx)
2785 		      || ctx->outer == NULL
2786 		      || !is_task_ctx (ctx->outer)))
2787 		bad = "#pragma omp task";
2788 	      else
2789 		{
2790 		  for (omp_context *octx = ctx->outer;
2791 		       octx; octx = octx->outer)
2792 		    {
2793 		      switch (gimple_code (octx->stmt))
2794 			{
2795 			case GIMPLE_OMP_TASKGROUP:
2796 			  break;
2797 			case GIMPLE_OMP_TARGET:
2798 			  if (gimple_omp_target_kind (octx->stmt)
2799 			      != GF_OMP_TARGET_KIND_REGION)
2800 			    continue;
2801 			  /* FALLTHRU */
2802 			case GIMPLE_OMP_PARALLEL:
2803 			case GIMPLE_OMP_TEAMS:
2804 			  error_at (gimple_location (stmt),
2805 				    "%<%s taskgroup%> construct not closely "
2806 				    "nested inside of %<taskgroup%> region",
2807 				    construct);
2808 			  return false;
2809 			case GIMPLE_OMP_TASK:
2810 			  if (gimple_omp_task_taskloop_p (octx->stmt)
2811 			      && octx->outer
2812 			      && is_taskloop_ctx (octx->outer))
2813 			    {
2814 			      tree clauses
2815 				= gimple_omp_for_clauses (octx->outer->stmt);
2816 			      if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
2817 				break;
2818 			    }
2819 			  continue;
2820 			default:
2821 			  continue;
2822 			}
2823 		      break;
2824 		    }
2825 		  ctx->cancellable = true;
2826 		}
2827 	      kind = "taskgroup";
2828 	      break;
2829 	    default:
2830 	      error_at (gimple_location (stmt), "invalid arguments");
2831 	      return false;
2832 	    }
2833 	  if (bad)
2834 	    {
2835 	      error_at (gimple_location (stmt),
2836 			"%<%s %s%> construct not closely nested inside of %qs",
2837 			construct, kind, bad);
2838 	      return false;
2839 	    }
2840 	}
2841       /* FALLTHRU */
2842     case GIMPLE_OMP_SECTIONS:
2843     case GIMPLE_OMP_SINGLE:
2844       for (; ctx != NULL; ctx = ctx->outer)
2845 	switch (gimple_code (ctx->stmt))
2846 	  {
2847 	  case GIMPLE_OMP_FOR:
2848 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2849 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2850 	      break;
2851 	    /* FALLTHRU */
2852 	  case GIMPLE_OMP_SECTIONS:
2853 	  case GIMPLE_OMP_SINGLE:
2854 	  case GIMPLE_OMP_ORDERED:
2855 	  case GIMPLE_OMP_MASTER:
2856 	  case GIMPLE_OMP_TASK:
2857 	  case GIMPLE_OMP_CRITICAL:
2858 	    if (is_gimple_call (stmt))
2859 	      {
2860 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2861 		    != BUILT_IN_GOMP_BARRIER)
2862 		  return true;
2863 		error_at (gimple_location (stmt),
2864 			  "barrier region may not be closely nested inside "
2865 			  "of work-sharing, %<critical%>, %<ordered%>, "
2866 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2867 			  "region");
2868 		return false;
2869 	      }
2870 	    error_at (gimple_location (stmt),
2871 		      "work-sharing region may not be closely nested inside "
2872 		      "of work-sharing, %<critical%>, %<ordered%>, "
2873 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2874 	    return false;
2875 	  case GIMPLE_OMP_PARALLEL:
2876 	  case GIMPLE_OMP_TEAMS:
2877 	    return true;
2878 	  case GIMPLE_OMP_TARGET:
2879 	    if (gimple_omp_target_kind (ctx->stmt)
2880 		== GF_OMP_TARGET_KIND_REGION)
2881 	      return true;
2882 	    break;
2883 	  default:
2884 	    break;
2885 	  }
2886       break;
2887     case GIMPLE_OMP_MASTER:
2888       for (; ctx != NULL; ctx = ctx->outer)
2889 	switch (gimple_code (ctx->stmt))
2890 	  {
2891 	  case GIMPLE_OMP_FOR:
2892 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2893 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2894 	      break;
2895 	    /* FALLTHRU */
2896 	  case GIMPLE_OMP_SECTIONS:
2897 	  case GIMPLE_OMP_SINGLE:
2898 	  case GIMPLE_OMP_TASK:
2899 	    error_at (gimple_location (stmt),
2900 		      "%<master%> region may not be closely nested inside "
2901 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2902 		      "region");
2903 	    return false;
2904 	  case GIMPLE_OMP_PARALLEL:
2905 	  case GIMPLE_OMP_TEAMS:
2906 	    return true;
2907 	  case GIMPLE_OMP_TARGET:
2908 	    if (gimple_omp_target_kind (ctx->stmt)
2909 		== GF_OMP_TARGET_KIND_REGION)
2910 	      return true;
2911 	    break;
2912 	  default:
2913 	    break;
2914 	  }
2915       break;
2916     case GIMPLE_OMP_TASK:
2917       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2918 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2919 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2920 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2921 	  {
2922 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2923 	    error_at (OMP_CLAUSE_LOCATION (c),
2924 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2925 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2926 	    return false;
2927 	  }
2928       break;
2929     case GIMPLE_OMP_ORDERED:
2930       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2931 	   c; c = OMP_CLAUSE_CHAIN (c))
2932 	{
2933 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2934 	    {
2935 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2936 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2937 	      continue;
2938 	    }
2939 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2940 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2941 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2942 	    {
2943 	      tree oclause;
2944 	      /* Look for containing ordered(N) loop.  */
2945 	      if (ctx == NULL
2946 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2947 		  || (oclause
2948 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2949 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2950 		{
2951 		  error_at (OMP_CLAUSE_LOCATION (c),
2952 			    "%<ordered%> construct with %<depend%> clause "
2953 			    "must be closely nested inside an %<ordered%> "
2954 			    "loop");
2955 		  return false;
2956 		}
2957 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2958 		{
2959 		  error_at (OMP_CLAUSE_LOCATION (c),
2960 			    "%<ordered%> construct with %<depend%> clause "
2961 			    "must be closely nested inside a loop with "
2962 			    "%<ordered%> clause with a parameter");
2963 		  return false;
2964 		}
2965 	    }
2966 	  else
2967 	    {
2968 	      error_at (OMP_CLAUSE_LOCATION (c),
2969 			"invalid depend kind in omp %<ordered%> %<depend%>");
2970 	      return false;
2971 	    }
2972 	}
2973       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2974       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2975 	{
2976 	  /* ordered simd must be closely nested inside of simd region,
2977 	     and simd region must not encounter constructs other than
2978 	     ordered simd, therefore ordered simd may be either orphaned,
2979 	     or ctx->stmt must be simd.  The latter case is handled already
2980 	     earlier.  */
2981 	  if (ctx != NULL)
2982 	    {
2983 	      error_at (gimple_location (stmt),
2984 			"%<ordered%> %<simd%> must be closely nested inside "
2985 			"%<simd%> region");
2986 	      return false;
2987 	    }
2988 	}
2989       for (; ctx != NULL; ctx = ctx->outer)
2990 	switch (gimple_code (ctx->stmt))
2991 	  {
2992 	  case GIMPLE_OMP_CRITICAL:
2993 	  case GIMPLE_OMP_TASK:
2994 	  case GIMPLE_OMP_ORDERED:
2995 	  ordered_in_taskloop:
2996 	    error_at (gimple_location (stmt),
2997 		      "%<ordered%> region may not be closely nested inside "
2998 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2999 		      "%<taskloop%> region");
3000 	    return false;
3001 	  case GIMPLE_OMP_FOR:
3002 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3003 	      goto ordered_in_taskloop;
3004 	    tree o;
3005 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3006 				 OMP_CLAUSE_ORDERED);
3007 	    if (o == NULL)
3008 	      {
3009 		error_at (gimple_location (stmt),
3010 			  "%<ordered%> region must be closely nested inside "
3011 			  "a loop region with an %<ordered%> clause");
3012 		return false;
3013 	      }
3014 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3015 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3016 	      {
3017 		error_at (gimple_location (stmt),
3018 			  "%<ordered%> region without %<depend%> clause may "
3019 			  "not be closely nested inside a loop region with "
3020 			  "an %<ordered%> clause with a parameter");
3021 		return false;
3022 	      }
3023 	    return true;
3024 	  case GIMPLE_OMP_TARGET:
3025 	    if (gimple_omp_target_kind (ctx->stmt)
3026 		!= GF_OMP_TARGET_KIND_REGION)
3027 	      break;
3028 	    /* FALLTHRU */
3029 	  case GIMPLE_OMP_PARALLEL:
3030 	  case GIMPLE_OMP_TEAMS:
3031 	    error_at (gimple_location (stmt),
3032 		      "%<ordered%> region must be closely nested inside "
3033 		      "a loop region with an %<ordered%> clause");
3034 	    return false;
3035 	  default:
3036 	    break;
3037 	  }
3038       break;
3039     case GIMPLE_OMP_CRITICAL:
3040       {
3041 	tree this_stmt_name
3042 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3043 	for (; ctx != NULL; ctx = ctx->outer)
3044 	  if (gomp_critical *other_crit
3045 	        = dyn_cast <gomp_critical *> (ctx->stmt))
3046 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
3047 	      {
3048 		error_at (gimple_location (stmt),
3049 			  "%<critical%> region may not be nested inside "
3050 			   "a %<critical%> region with the same name");
3051 		return false;
3052 	      }
3053       }
3054       break;
3055     case GIMPLE_OMP_TEAMS:
3056       if ((ctx == NULL
3057            || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3058            || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
3059 	  && lang_GNU_Fortran ())
3060 	{
3061 	  error_at (gimple_location (stmt),
3062 		    "%<teams%> construct not closely nested inside of "
3063 		    "%<target%> construct");
3064 	  return false;
3065 	}
3066       if (ctx == NULL)
3067 	break;
3068       else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3069 	       || (gimple_omp_target_kind (ctx->stmt)
3070 		   != GF_OMP_TARGET_KIND_REGION))
3071 	{
3072 	  /* Teams construct can appear either strictly nested inside of
3073 	     target construct with no intervening stmts, or can be encountered
3074 	     only by initial task (so must not appear inside any OpenMP
3075 	     construct.  */
3076 	  error_at (gimple_location (stmt),
3077 		    "%<teams%> construct must be closely nested inside of "
3078 		    "%<target%> construct or not nested in any OpenMP "
3079 		    "construct");
3080 	  return false;
3081 	}
3082       break;
3083     case GIMPLE_OMP_TARGET:
3084       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3085 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3086 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3087 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3088 	  {
3089 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3090 	    error_at (OMP_CLAUSE_LOCATION (c),
3091 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
3092 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3093 	    return false;
3094 	  }
3095       if (is_gimple_omp_offloaded (stmt)
3096 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
3097 	{
3098 	  error_at (gimple_location (stmt),
3099 		    "OpenACC region inside of OpenACC routine, nested "
3100 		    "parallelism not supported yet");
3101 	  return false;
3102 	}
3103       for (; ctx != NULL; ctx = ctx->outer)
3104 	{
3105 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3106 	    {
3107 	      if (is_gimple_omp (stmt)
3108 		  && is_gimple_omp_oacc (stmt)
3109 		  && is_gimple_omp (ctx->stmt))
3110 		{
3111 		  error_at (gimple_location (stmt),
3112 			    "OpenACC construct inside of non-OpenACC region");
3113 		  return false;
3114 		}
3115 	      continue;
3116 	    }
3117 
3118 	  const char *stmt_name, *ctx_stmt_name;
3119 	  switch (gimple_omp_target_kind (stmt))
3120 	    {
3121 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3122 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3123 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3124 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
3125 	      stmt_name = "target enter data"; break;
3126 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
3127 	      stmt_name = "target exit data"; break;
3128 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3129 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3130 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3131 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3132 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3133 	      stmt_name = "enter/exit data"; break;
3134 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3135 	      break;
3136 	    default: gcc_unreachable ();
3137 	    }
3138 	  switch (gimple_omp_target_kind (ctx->stmt))
3139 	    {
3140 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3141 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3142 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3143 	      ctx_stmt_name = "parallel"; break;
3144 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
3145 	      ctx_stmt_name = "kernels"; break;
3146 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3147 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3148 	      ctx_stmt_name = "host_data"; break;
3149 	    default: gcc_unreachable ();
3150 	    }
3151 
3152 	  /* OpenACC/OpenMP mismatch?  */
3153 	  if (is_gimple_omp_oacc (stmt)
3154 	      != is_gimple_omp_oacc (ctx->stmt))
3155 	    {
3156 	      error_at (gimple_location (stmt),
3157 			"%s %qs construct inside of %s %qs region",
3158 			(is_gimple_omp_oacc (stmt)
3159 			 ? "OpenACC" : "OpenMP"), stmt_name,
3160 			(is_gimple_omp_oacc (ctx->stmt)
3161 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3162 	      return false;
3163 	    }
3164 	  if (is_gimple_omp_offloaded (ctx->stmt))
3165 	    {
3166 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
3167 	      if (is_gimple_omp_oacc (ctx->stmt))
3168 		{
3169 		  error_at (gimple_location (stmt),
3170 			    "%qs construct inside of %qs region",
3171 			    stmt_name, ctx_stmt_name);
3172 		  return false;
3173 		}
3174 	      else
3175 		{
3176 		  warning_at (gimple_location (stmt), 0,
3177 			      "%qs construct inside of %qs region",
3178 			      stmt_name, ctx_stmt_name);
3179 		}
3180 	    }
3181 	}
3182       break;
3183     default:
3184       break;
3185     }
3186   return true;
3187 }
3188 
3189 
3190 /* Helper function scan_omp.
3191 
3192    Callback for walk_tree or operators in walk_gimple_stmt used to
3193    scan for OMP directives in TP.  */
3194 
3195 static tree
3196 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3197 {
3198   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3199   omp_context *ctx = (omp_context *) wi->info;
3200   tree t = *tp;
3201 
3202   switch (TREE_CODE (t))
3203     {
3204     case VAR_DECL:
3205     case PARM_DECL:
3206     case LABEL_DECL:
3207     case RESULT_DECL:
3208       if (ctx)
3209 	{
3210 	  tree repl = remap_decl (t, &ctx->cb);
3211 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3212 	  *tp = repl;
3213 	}
3214       break;
3215 
3216     default:
3217       if (ctx && TYPE_P (t))
3218 	*tp = remap_type (t, &ctx->cb);
3219       else if (!DECL_P (t))
3220 	{
3221 	  *walk_subtrees = 1;
3222 	  if (ctx)
3223 	    {
3224 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3225 	      if (tem != TREE_TYPE (t))
3226 		{
3227 		  if (TREE_CODE (t) == INTEGER_CST)
3228 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3229 		  else
3230 		    TREE_TYPE (t) = tem;
3231 		}
3232 	    }
3233 	}
3234       break;
3235     }
3236 
3237   return NULL_TREE;
3238 }
3239 
3240 /* Return true if FNDECL is a setjmp or a longjmp.  */
3241 
3242 static bool
3243 setjmp_or_longjmp_p (const_tree fndecl)
3244 {
3245   if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3246       || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3247     return true;
3248 
3249   tree declname = DECL_NAME (fndecl);
3250   if (!declname)
3251     return false;
3252   const char *name = IDENTIFIER_POINTER (declname);
3253   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3254 }
3255 
3256 
3257 /* Helper function for scan_omp.
3258 
3259    Callback for walk_gimple_stmt used to scan for OMP directives in
3260    the current statement in GSI.  */
3261 
3262 static tree
3263 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3264 		 struct walk_stmt_info *wi)
3265 {
3266   gimple *stmt = gsi_stmt (*gsi);
3267   omp_context *ctx = (omp_context *) wi->info;
3268 
3269   if (gimple_has_location (stmt))
3270     input_location = gimple_location (stmt);
3271 
3272   /* Check the nesting restrictions.  */
3273   bool remove = false;
3274   if (is_gimple_omp (stmt))
3275     remove = !check_omp_nesting_restrictions (stmt, ctx);
3276   else if (is_gimple_call (stmt))
3277     {
3278       tree fndecl = gimple_call_fndecl (stmt);
3279       if (fndecl)
3280 	{
3281 	  if (setjmp_or_longjmp_p (fndecl)
3282 	      && ctx
3283 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3284 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3285 	    {
3286 	      remove = true;
3287 	      error_at (gimple_location (stmt),
3288 			"setjmp/longjmp inside simd construct");
3289 	    }
3290 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3291 	    switch (DECL_FUNCTION_CODE (fndecl))
3292 	      {
3293 	      case BUILT_IN_GOMP_BARRIER:
3294 	      case BUILT_IN_GOMP_CANCEL:
3295 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3296 	      case BUILT_IN_GOMP_TASKYIELD:
3297 	      case BUILT_IN_GOMP_TASKWAIT:
3298 	      case BUILT_IN_GOMP_TASKGROUP_START:
3299 	      case BUILT_IN_GOMP_TASKGROUP_END:
3300 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3301 		break;
3302 	      default:
3303 		break;
3304 	      }
3305 	}
3306     }
3307   if (remove)
3308     {
3309       stmt = gimple_build_nop ();
3310       gsi_replace (gsi, stmt, false);
3311     }
3312 
3313   *handled_ops_p = true;
3314 
3315   switch (gimple_code (stmt))
3316     {
3317     case GIMPLE_OMP_PARALLEL:
3318       taskreg_nesting_level++;
3319       scan_omp_parallel (gsi, ctx);
3320       taskreg_nesting_level--;
3321       break;
3322 
3323     case GIMPLE_OMP_TASK:
3324       taskreg_nesting_level++;
3325       scan_omp_task (gsi, ctx);
3326       taskreg_nesting_level--;
3327       break;
3328 
3329     case GIMPLE_OMP_FOR:
3330       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3331 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3332 	  && omp_maybe_offloaded_ctx (ctx)
3333 	  && omp_max_simt_vf ())
3334 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3335       else
3336 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3337       break;
3338 
3339     case GIMPLE_OMP_SECTIONS:
3340       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3341       break;
3342 
3343     case GIMPLE_OMP_SINGLE:
3344       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3345       break;
3346 
3347     case GIMPLE_OMP_SECTION:
3348     case GIMPLE_OMP_MASTER:
3349     case GIMPLE_OMP_ORDERED:
3350     case GIMPLE_OMP_CRITICAL:
3351     case GIMPLE_OMP_GRID_BODY:
3352       ctx = new_omp_context (stmt, ctx);
3353       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3354       break;
3355 
3356     case GIMPLE_OMP_TASKGROUP:
3357       ctx = new_omp_context (stmt, ctx);
3358       scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3359       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3360       break;
3361 
3362     case GIMPLE_OMP_TARGET:
3363       if (is_gimple_omp_offloaded (stmt))
3364 	{
3365 	  taskreg_nesting_level++;
3366 	  scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3367 	  taskreg_nesting_level--;
3368 	}
3369       else
3370 	scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3371       break;
3372 
3373     case GIMPLE_OMP_TEAMS:
3374       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3375 	{
3376 	  taskreg_nesting_level++;
3377 	  scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3378 	  taskreg_nesting_level--;
3379 	}
3380       else
3381 	scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3382       break;
3383 
3384     case GIMPLE_BIND:
3385       {
3386 	tree var;
3387 
3388 	*handled_ops_p = false;
3389 	if (ctx)
3390 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3391 	       var ;
3392 	       var = DECL_CHAIN (var))
3393 	    insert_decl_map (&ctx->cb, var, var);
3394       }
3395       break;
3396     default:
3397       *handled_ops_p = false;
3398       break;
3399     }
3400 
3401   return NULL_TREE;
3402 }
3403 
3404 
3405 /* Scan all the statements starting at the current statement.  CTX
3406    contains context information about the OMP directives and
3407    clauses found during the scan.  */
3408 
3409 static void
3410 scan_omp (gimple_seq *body_p, omp_context *ctx)
3411 {
3412   location_t saved_location;
3413   struct walk_stmt_info wi;
3414 
3415   memset (&wi, 0, sizeof (wi));
3416   wi.info = ctx;
3417   wi.want_locations = true;
3418 
3419   saved_location = input_location;
3420   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3421   input_location = saved_location;
3422 }
3423 
3424 /* Re-gimplification and code generation routines.  */
3425 
3426 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3427    of BIND if in a method.  */
3428 
3429 static void
3430 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3431 {
3432   if (DECL_ARGUMENTS (current_function_decl)
3433       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3434       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3435 	  == POINTER_TYPE))
3436     {
3437       tree vars = gimple_bind_vars (bind);
3438       for (tree *pvar = &vars; *pvar; )
3439 	if (omp_member_access_dummy_var (*pvar))
3440 	  *pvar = DECL_CHAIN (*pvar);
3441 	else
3442 	  pvar = &DECL_CHAIN (*pvar);
3443       gimple_bind_set_vars (bind, vars);
3444     }
3445 }
3446 
3447 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3448    block and its subblocks.  */
3449 
3450 static void
3451 remove_member_access_dummy_vars (tree block)
3452 {
3453   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3454     if (omp_member_access_dummy_var (*pvar))
3455       *pvar = DECL_CHAIN (*pvar);
3456     else
3457       pvar = &DECL_CHAIN (*pvar);
3458 
3459   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3460     remove_member_access_dummy_vars (block);
3461 }
3462 
3463 /* If a context was created for STMT when it was scanned, return it.  */
3464 
3465 static omp_context *
3466 maybe_lookup_ctx (gimple *stmt)
3467 {
3468   splay_tree_node n;
3469   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3470   return n ? (omp_context *) n->value : NULL;
3471 }
3472 
3473 
3474 /* Find the mapping for DECL in CTX or the immediately enclosing
3475    context that has a mapping for DECL.
3476 
3477    If CTX is a nested parallel directive, we may have to use the decl
3478    mappings created in CTX's parent context.  Suppose that we have the
3479    following parallel nesting (variable UIDs showed for clarity):
3480 
3481 	iD.1562 = 0;
3482      	#omp parallel shared(iD.1562)		-> outer parallel
3483 	  iD.1562 = iD.1562 + 1;
3484 
3485 	  #omp parallel shared (iD.1562)	-> inner parallel
3486 	     iD.1562 = iD.1562 - 1;
3487 
3488    Each parallel structure will create a distinct .omp_data_s structure
3489    for copying iD.1562 in/out of the directive:
3490 
3491   	outer parallel		.omp_data_s.1.i -> iD.1562
3492 	inner parallel		.omp_data_s.2.i -> iD.1562
3493 
3494    A shared variable mapping will produce a copy-out operation before
3495    the parallel directive and a copy-in operation after it.  So, in
3496    this case we would have:
3497 
3498   	iD.1562 = 0;
3499 	.omp_data_o.1.i = iD.1562;
3500 	#omp parallel shared(iD.1562)		-> outer parallel
3501 	  .omp_data_i.1 = &.omp_data_o.1
3502 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3503 
3504 	  .omp_data_o.2.i = iD.1562;		-> **
3505 	  #omp parallel shared(iD.1562)		-> inner parallel
3506 	    .omp_data_i.2 = &.omp_data_o.2
3507 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3508 
3509 
3510     ** This is a problem.  The symbol iD.1562 cannot be referenced
3511        inside the body of the outer parallel region.  But since we are
3512        emitting this copy operation while expanding the inner parallel
3513        directive, we need to access the CTX structure of the outer
3514        parallel directive to get the correct mapping:
3515 
3516 	  .omp_data_o.2.i = .omp_data_i.1->i
3517 
3518     Since there may be other workshare or parallel directives enclosing
3519     the parallel directive, it may be necessary to walk up the context
3520     parent chain.  This is not a problem in general because nested
3521     parallelism happens only rarely.  */
3522 
3523 static tree
3524 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3525 {
3526   tree t;
3527   omp_context *up;
3528 
3529   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3530     t = maybe_lookup_decl (decl, up);
3531 
3532   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3533 
3534   return t ? t : decl;
3535 }
3536 
3537 
3538 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3539    in outer contexts.  */
3540 
3541 static tree
3542 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3543 {
3544   tree t = NULL;
3545   omp_context *up;
3546 
3547   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3548     t = maybe_lookup_decl (decl, up);
3549 
3550   return t ? t : decl;
3551 }
3552 
3553 
3554 /* Construct the initialization value for reduction operation OP.  */
3555 
3556 tree
3557 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3558 {
3559   switch (op)
3560     {
3561     case PLUS_EXPR:
3562     case MINUS_EXPR:
3563     case BIT_IOR_EXPR:
3564     case BIT_XOR_EXPR:
3565     case TRUTH_OR_EXPR:
3566     case TRUTH_ORIF_EXPR:
3567     case TRUTH_XOR_EXPR:
3568     case NE_EXPR:
3569       return build_zero_cst (type);
3570 
3571     case MULT_EXPR:
3572     case TRUTH_AND_EXPR:
3573     case TRUTH_ANDIF_EXPR:
3574     case EQ_EXPR:
3575       return fold_convert_loc (loc, type, integer_one_node);
3576 
3577     case BIT_AND_EXPR:
3578       return fold_convert_loc (loc, type, integer_minus_one_node);
3579 
3580     case MAX_EXPR:
3581       if (SCALAR_FLOAT_TYPE_P (type))
3582 	{
3583 	  REAL_VALUE_TYPE max, min;
3584 	  if (HONOR_INFINITIES (type))
3585 	    {
3586 	      real_inf (&max);
3587 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3588 	    }
3589 	  else
3590 	    real_maxval (&min, 1, TYPE_MODE (type));
3591 	  return build_real (type, min);
3592 	}
3593       else if (POINTER_TYPE_P (type))
3594 	{
3595 	  wide_int min
3596 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3597 	  return wide_int_to_tree (type, min);
3598 	}
3599       else
3600 	{
3601 	  gcc_assert (INTEGRAL_TYPE_P (type));
3602 	  return TYPE_MIN_VALUE (type);
3603 	}
3604 
3605     case MIN_EXPR:
3606       if (SCALAR_FLOAT_TYPE_P (type))
3607 	{
3608 	  REAL_VALUE_TYPE max;
3609 	  if (HONOR_INFINITIES (type))
3610 	    real_inf (&max);
3611 	  else
3612 	    real_maxval (&max, 0, TYPE_MODE (type));
3613 	  return build_real (type, max);
3614 	}
3615       else if (POINTER_TYPE_P (type))
3616 	{
3617 	  wide_int max
3618 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3619 	  return wide_int_to_tree (type, max);
3620 	}
3621       else
3622 	{
3623 	  gcc_assert (INTEGRAL_TYPE_P (type));
3624 	  return TYPE_MAX_VALUE (type);
3625 	}
3626 
3627     default:
3628       gcc_unreachable ();
3629     }
3630 }
3631 
3632 /* Construct the initialization value for reduction CLAUSE.  */
3633 
3634 tree
3635 omp_reduction_init (tree clause, tree type)
3636 {
3637   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3638 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3639 }
3640 
3641 /* Return alignment to be assumed for var in CLAUSE, which should be
3642    OMP_CLAUSE_ALIGNED.  */
3643 
3644 static tree
3645 omp_clause_aligned_alignment (tree clause)
3646 {
3647   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3648     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3649 
3650   /* Otherwise return implementation defined alignment.  */
3651   unsigned int al = 1;
3652   opt_scalar_mode mode_iter;
3653   auto_vector_sizes sizes;
3654   targetm.vectorize.autovectorize_vector_sizes (&sizes);
3655   poly_uint64 vs = 0;
3656   for (unsigned int i = 0; i < sizes.length (); ++i)
3657     vs = ordered_max (vs, sizes[i]);
3658   static enum mode_class classes[]
3659     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3660   for (int i = 0; i < 4; i += 2)
3661     /* The for loop above dictates that we only walk through scalar classes.  */
3662     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3663       {
3664 	scalar_mode mode = mode_iter.require ();
3665 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3666 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3667 	  continue;
3668 	while (maybe_ne (vs, 0U)
3669 	       && known_lt (GET_MODE_SIZE (vmode), vs)
3670 	       && GET_MODE_2XWIDER_MODE (vmode).exists ())
3671 	  vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3672 
3673 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3674 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3675 	  continue;
3676 	poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3677 				       GET_MODE_SIZE (mode));
3678 	type = build_vector_type (type, nelts);
3679 	if (TYPE_MODE (type) != vmode)
3680 	  continue;
3681 	if (TYPE_ALIGN_UNIT (type) > al)
3682 	  al = TYPE_ALIGN_UNIT (type);
3683       }
3684   return build_int_cst (integer_type_node, al);
3685 }
3686 
3687 
3688 /* This structure is part of the interface between lower_rec_simd_input_clauses
3689    and lower_rec_input_clauses.  */
3690 
3691 struct omplow_simd_context {
3692   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3693   tree idx;
3694   tree lane;
3695   vec<tree, va_heap> simt_eargs;
3696   gimple_seq simt_dlist;
3697   poly_uint64_pod max_vf;
3698   bool is_simt;
3699 };
3700 
3701 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3702    privatization.  */
3703 
3704 static bool
3705 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3706 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3707 {
3708   if (known_eq (sctx->max_vf, 0U))
3709     {
3710       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3711       if (maybe_gt (sctx->max_vf, 1U))
3712 	{
3713 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3714 				    OMP_CLAUSE_SAFELEN);
3715 	  if (c)
3716 	    {
3717 	      poly_uint64 safe_len;
3718 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3719 		  || maybe_lt (safe_len, 1U))
3720 		sctx->max_vf = 1;
3721 	      else
3722 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3723 	    }
3724 	}
3725       if (maybe_gt (sctx->max_vf, 1U))
3726 	{
3727 	  sctx->idx = create_tmp_var (unsigned_type_node);
3728 	  sctx->lane = create_tmp_var (unsigned_type_node);
3729 	}
3730     }
3731   if (known_eq (sctx->max_vf, 1U))
3732     return false;
3733 
3734   if (sctx->is_simt)
3735     {
3736       if (is_gimple_reg (new_var))
3737 	{
3738 	  ivar = lvar = new_var;
3739 	  return true;
3740 	}
3741       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3742       ivar = lvar = create_tmp_var (type);
3743       TREE_ADDRESSABLE (ivar) = 1;
3744       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3745 					  NULL, DECL_ATTRIBUTES (ivar));
3746       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3747       tree clobber = build_constructor (type, NULL);
3748       TREE_THIS_VOLATILE (clobber) = 1;
3749       gimple *g = gimple_build_assign (ivar, clobber);
3750       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3751     }
3752   else
3753     {
3754       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3755       tree avar = create_tmp_var_raw (atype);
3756       if (TREE_ADDRESSABLE (new_var))
3757 	TREE_ADDRESSABLE (avar) = 1;
3758       DECL_ATTRIBUTES (avar)
3759 	= tree_cons (get_identifier ("omp simd array"), NULL,
3760 		     DECL_ATTRIBUTES (avar));
3761       gimple_add_tmp_var (avar);
3762       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3763 		     NULL_TREE, NULL_TREE);
3764       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3765 		     NULL_TREE, NULL_TREE);
3766     }
3767   if (DECL_P (new_var))
3768     {
3769       SET_DECL_VALUE_EXPR (new_var, lvar);
3770       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3771     }
3772   return true;
3773 }
3774 
3775 /* Helper function of lower_rec_input_clauses.  For a reference
3776    in simd reduction, add an underlying variable it will reference.  */
3777 
3778 static void
3779 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3780 {
3781   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3782   if (TREE_CONSTANT (z))
3783     {
3784       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3785 			      get_name (new_vard));
3786       gimple_add_tmp_var (z);
3787       TREE_ADDRESSABLE (z) = 1;
3788       z = build_fold_addr_expr_loc (loc, z);
3789       gimplify_assign (new_vard, z, ilist);
3790     }
3791 }
3792 
3793 /* Helper function for lower_rec_input_clauses.  Emit into ilist sequence
3794    code to emit (type) (tskred_temp[idx]).  */
3795 
3796 static tree
3797 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
3798 		     unsigned idx)
3799 {
3800   unsigned HOST_WIDE_INT sz
3801     = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
3802   tree r = build2 (MEM_REF, pointer_sized_int_node,
3803 		   tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
3804 					       idx * sz));
3805   tree v = create_tmp_var (pointer_sized_int_node);
3806   gimple *g = gimple_build_assign (v, r);
3807   gimple_seq_add_stmt (ilist, g);
3808   if (!useless_type_conversion_p (type, pointer_sized_int_node))
3809     {
3810       v = create_tmp_var (type);
3811       g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
3812       gimple_seq_add_stmt (ilist, g);
3813     }
3814   return v;
3815 }
3816 
3817 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3818    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3819    private variables.  Initialization statements go in ILIST, while calls
3820    to destructors go in DLIST.  */
3821 
3822 static void
3823 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3824 			 omp_context *ctx, struct omp_for_data *fd)
3825 {
3826   tree c, dtor, copyin_seq, x, ptr;
3827   bool copyin_by_ref = false;
3828   bool lastprivate_firstprivate = false;
3829   bool reduction_omp_orig_ref = false;
3830   int pass;
3831   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3832 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3833   omplow_simd_context sctx = omplow_simd_context ();
3834   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3835   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3836   gimple_seq llist[3] = { };
3837   tree nonconst_simd_if = NULL_TREE;
3838 
3839   copyin_seq = NULL;
3840   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3841 
3842   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3843      with data sharing clauses referencing variable sized vars.  That
3844      is unnecessarily hard to support and very unlikely to result in
3845      vectorized code anyway.  */
3846   if (is_simd)
3847     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3848       switch (OMP_CLAUSE_CODE (c))
3849 	{
3850 	case OMP_CLAUSE_LINEAR:
3851 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3852 	    sctx.max_vf = 1;
3853 	  /* FALLTHRU */
3854 	case OMP_CLAUSE_PRIVATE:
3855 	case OMP_CLAUSE_FIRSTPRIVATE:
3856 	case OMP_CLAUSE_LASTPRIVATE:
3857 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3858 	    sctx.max_vf = 1;
3859 	  break;
3860 	case OMP_CLAUSE_REDUCTION:
3861 	case OMP_CLAUSE_IN_REDUCTION:
3862 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3863 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3864 	    sctx.max_vf = 1;
3865 	  break;
3866 	case OMP_CLAUSE_IF:
3867 	  if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
3868 	    sctx.max_vf = 1;
3869 	  else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
3870 	    nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
3871 	  break;
3872         case OMP_CLAUSE_SIMDLEN:
3873 	  if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
3874 	    sctx.max_vf = 1;
3875 	  break;
3876 	default:
3877 	  continue;
3878 	}
3879 
3880   /* Add a placeholder for simduid.  */
3881   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3882     sctx.simt_eargs.safe_push (NULL_TREE);
3883 
3884   unsigned task_reduction_cnt = 0;
3885   unsigned task_reduction_cntorig = 0;
3886   unsigned task_reduction_cnt_full = 0;
3887   unsigned task_reduction_cntorig_full = 0;
3888   unsigned task_reduction_other_cnt = 0;
3889   tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
3890   tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
3891   /* Do all the fixed sized types in the first pass, and the variable sized
3892      types in the second pass.  This makes sure that the scalar arguments to
3893      the variable sized types are processed before we use them in the
3894      variable sized operations.  For task reductions we use 4 passes, in the
3895      first two we ignore them, in the third one gather arguments for
3896      GOMP_task_reduction_remap call and in the last pass actually handle
3897      the task reductions.  */
3898   for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
3899 			 ? 4 : 2); ++pass)
3900     {
3901       if (pass == 2 && task_reduction_cnt)
3902 	{
3903 	  tskred_atype
3904 	    = build_array_type_nelts (ptr_type_node, task_reduction_cnt
3905 						     + task_reduction_cntorig);
3906 	  tskred_avar = create_tmp_var_raw (tskred_atype);
3907 	  gimple_add_tmp_var (tskred_avar);
3908 	  TREE_ADDRESSABLE (tskred_avar) = 1;
3909 	  task_reduction_cnt_full = task_reduction_cnt;
3910 	  task_reduction_cntorig_full = task_reduction_cntorig;
3911 	}
3912       else if (pass == 3 && task_reduction_cnt)
3913 	{
3914 	  x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
3915 	  gimple *g
3916 	    = gimple_build_call (x, 3, size_int (task_reduction_cnt),
3917 				 size_int (task_reduction_cntorig),
3918 				 build_fold_addr_expr (tskred_avar));
3919 	  gimple_seq_add_stmt (ilist, g);
3920 	}
3921       if (pass == 3 && task_reduction_other_cnt)
3922 	{
3923 	  /* For reduction clauses, build
3924 	     tskred_base = (void *) tskred_temp[2]
3925 			   + omp_get_thread_num () * tskred_temp[1]
3926 	     or if tskred_temp[1] is known to be constant, that constant
3927 	     directly.  This is the start of the private reduction copy block
3928 	     for the current thread.  */
3929 	  tree v = create_tmp_var (integer_type_node);
3930 	  x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
3931 	  gimple *g = gimple_build_call (x, 0);
3932 	  gimple_call_set_lhs (g, v);
3933 	  gimple_seq_add_stmt (ilist, g);
3934 	  c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
3935 	  tskred_temp = OMP_CLAUSE_DECL (c);
3936 	  if (is_taskreg_ctx (ctx))
3937 	    tskred_temp = lookup_decl (tskred_temp, ctx);
3938 	  tree v2 = create_tmp_var (sizetype);
3939 	  g = gimple_build_assign (v2, NOP_EXPR, v);
3940 	  gimple_seq_add_stmt (ilist, g);
3941 	  if (ctx->task_reductions[0])
3942 	    v = fold_convert (sizetype, ctx->task_reductions[0]);
3943 	  else
3944 	    v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
3945 	  tree v3 = create_tmp_var (sizetype);
3946 	  g = gimple_build_assign (v3, MULT_EXPR, v2, v);
3947 	  gimple_seq_add_stmt (ilist, g);
3948 	  v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
3949 	  tskred_base = create_tmp_var (ptr_type_node);
3950 	  g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
3951 	  gimple_seq_add_stmt (ilist, g);
3952 	}
3953       task_reduction_cnt = 0;
3954       task_reduction_cntorig = 0;
3955       task_reduction_other_cnt = 0;
3956       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3957 	{
3958 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3959 	  tree var, new_var;
3960 	  bool by_ref;
3961 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3962 	  bool task_reduction_p = false;
3963 	  bool task_reduction_needs_orig_p = false;
3964 	  tree cond = NULL_TREE;
3965 
3966 	  switch (c_kind)
3967 	    {
3968 	    case OMP_CLAUSE_PRIVATE:
3969 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3970 		continue;
3971 	      break;
3972 	    case OMP_CLAUSE_SHARED:
3973 	      /* Ignore shared directives in teams construct inside
3974 		 of target construct.  */
3975 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
3976 		  && !is_host_teams_ctx (ctx))
3977 		continue;
3978 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3979 		{
3980 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3981 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3982 		  continue;
3983 		}
3984 	    case OMP_CLAUSE_FIRSTPRIVATE:
3985 	    case OMP_CLAUSE_COPYIN:
3986 	      break;
3987 	    case OMP_CLAUSE_LINEAR:
3988 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3989 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3990 		lastprivate_firstprivate = true;
3991 	      break;
3992 	    case OMP_CLAUSE_REDUCTION:
3993 	    case OMP_CLAUSE_IN_REDUCTION:
3994 	      if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
3995 		{
3996 		  task_reduction_p = true;
3997 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
3998 		    {
3999 		      task_reduction_other_cnt++;
4000 		      if (pass == 2)
4001 			continue;
4002 		    }
4003 		  else
4004 		    task_reduction_cnt++;
4005 		  if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4006 		    {
4007 		      var = OMP_CLAUSE_DECL (c);
4008 		      /* If var is a global variable that isn't privatized
4009 			 in outer contexts, we don't need to look up the
4010 			 original address, it is always the address of the
4011 			 global variable itself.  */
4012 		      if (!DECL_P (var)
4013 			  || omp_is_reference (var)
4014 			  || !is_global_var
4015 				(maybe_lookup_decl_in_outer_ctx (var, ctx)))
4016 			{
4017 			  task_reduction_needs_orig_p = true;
4018 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4019 			    task_reduction_cntorig++;
4020 			}
4021 		    }
4022 		}
4023 	      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4024 		reduction_omp_orig_ref = true;
4025 	      break;
4026 	    case OMP_CLAUSE__REDUCTEMP_:
4027 	      if (!is_taskreg_ctx (ctx))
4028 		continue;
4029 	      /* FALLTHRU */
4030 	    case OMP_CLAUSE__LOOPTEMP_:
4031 	      /* Handle _looptemp_/_reductemp_ clauses only on
4032 		 parallel/task.  */
4033 	      if (fd)
4034 		continue;
4035 	      break;
4036 	    case OMP_CLAUSE_LASTPRIVATE:
4037 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4038 		{
4039 		  lastprivate_firstprivate = true;
4040 		  if (pass != 0 || is_taskloop_ctx (ctx))
4041 		    continue;
4042 		}
4043 	      /* Even without corresponding firstprivate, if
4044 		 decl is Fortran allocatable, it needs outer var
4045 		 reference.  */
4046 	      else if (pass == 0
4047 		       && lang_hooks.decls.omp_private_outer_ref
4048 							(OMP_CLAUSE_DECL (c)))
4049 		lastprivate_firstprivate = true;
4050 	      break;
4051 	    case OMP_CLAUSE_ALIGNED:
4052 	      if (pass != 1)
4053 		continue;
4054 	      var = OMP_CLAUSE_DECL (c);
4055 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4056 		  && !is_global_var (var))
4057 		{
4058 		  new_var = maybe_lookup_decl (var, ctx);
4059 		  if (new_var == NULL_TREE)
4060 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4061 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4062 		  tree alarg = omp_clause_aligned_alignment (c);
4063 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4064 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4065 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4066 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4067 		  gimplify_and_add (x, ilist);
4068 		}
4069 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4070 		       && is_global_var (var))
4071 		{
4072 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4073 		  new_var = lookup_decl (var, ctx);
4074 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4075 		  t = build_fold_addr_expr_loc (clause_loc, t);
4076 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4077 		  tree alarg = omp_clause_aligned_alignment (c);
4078 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4079 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4080 		  t = fold_convert_loc (clause_loc, ptype, t);
4081 		  x = create_tmp_var (ptype);
4082 		  t = build2 (MODIFY_EXPR, ptype, x, t);
4083 		  gimplify_and_add (t, ilist);
4084 		  t = build_simple_mem_ref_loc (clause_loc, x);
4085 		  SET_DECL_VALUE_EXPR (new_var, t);
4086 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4087 		}
4088 	      continue;
4089 	    default:
4090 	      continue;
4091 	    }
4092 
4093 	  if (task_reduction_p != (pass >= 2))
4094 	    continue;
4095 
4096 	  new_var = var = OMP_CLAUSE_DECL (c);
4097 	  if ((c_kind == OMP_CLAUSE_REDUCTION
4098 	       || c_kind == OMP_CLAUSE_IN_REDUCTION)
4099 	      && TREE_CODE (var) == MEM_REF)
4100 	    {
4101 	      var = TREE_OPERAND (var, 0);
4102 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4103 		var = TREE_OPERAND (var, 0);
4104 	      if (TREE_CODE (var) == INDIRECT_REF
4105 		  || TREE_CODE (var) == ADDR_EXPR)
4106 		var = TREE_OPERAND (var, 0);
4107 	      if (is_variable_sized (var))
4108 		{
4109 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4110 		  var = DECL_VALUE_EXPR (var);
4111 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4112 		  var = TREE_OPERAND (var, 0);
4113 		  gcc_assert (DECL_P (var));
4114 		}
4115 	      new_var = var;
4116 	    }
4117 	  if (c_kind != OMP_CLAUSE_COPYIN)
4118 	    new_var = lookup_decl (var, ctx);
4119 
4120 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4121 	    {
4122 	      if (pass != 0)
4123 		continue;
4124 	    }
4125 	  /* C/C++ array section reductions.  */
4126 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
4127 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
4128 		   && var != OMP_CLAUSE_DECL (c))
4129 	    {
4130 	      if (pass == 0)
4131 		continue;
4132 
4133 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4134 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4135 
4136 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4137 		{
4138 		  tree b = TREE_OPERAND (orig_var, 1);
4139 		  b = maybe_lookup_decl (b, ctx);
4140 		  if (b == NULL)
4141 		    {
4142 		      b = TREE_OPERAND (orig_var, 1);
4143 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4144 		    }
4145 		  if (integer_zerop (bias))
4146 		    bias = b;
4147 		  else
4148 		    {
4149 		      bias = fold_convert_loc (clause_loc,
4150 					       TREE_TYPE (b), bias);
4151 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4152 					      TREE_TYPE (b), b, bias);
4153 		    }
4154 		  orig_var = TREE_OPERAND (orig_var, 0);
4155 		}
4156 	      if (pass == 2)
4157 		{
4158 		  tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4159 		  if (is_global_var (out)
4160 		      && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4161 		      && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4162 			  || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4163 			      != POINTER_TYPE)))
4164 		    x = var;
4165 		  else
4166 		    {
4167 		      bool by_ref = use_pointer_for_field (var, NULL);
4168 		      x = build_receiver_ref (var, by_ref, ctx);
4169 		      if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4170 			  && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4171 			      == POINTER_TYPE))
4172 			x = build_fold_addr_expr (x);
4173 		    }
4174 		  if (TREE_CODE (orig_var) == INDIRECT_REF)
4175 		    x = build_simple_mem_ref (x);
4176 		  else if (TREE_CODE (orig_var) == ADDR_EXPR)
4177 		    {
4178 		      if (var == TREE_OPERAND (orig_var, 0))
4179 			x = build_fold_addr_expr (x);
4180 		    }
4181 		  bias = fold_convert (sizetype, bias);
4182 		  x = fold_convert (ptr_type_node, x);
4183 		  x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4184 				       TREE_TYPE (x), x, bias);
4185 		  unsigned cnt = task_reduction_cnt - 1;
4186 		  if (!task_reduction_needs_orig_p)
4187 		    cnt += (task_reduction_cntorig_full
4188 			    - task_reduction_cntorig);
4189 		  else
4190 		    cnt = task_reduction_cntorig - 1;
4191 		  tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4192 				   size_int (cnt), NULL_TREE, NULL_TREE);
4193 		  gimplify_assign (r, x, ilist);
4194 		  continue;
4195 		}
4196 
4197 	      if (TREE_CODE (orig_var) == INDIRECT_REF
4198 		  || TREE_CODE (orig_var) == ADDR_EXPR)
4199 		orig_var = TREE_OPERAND (orig_var, 0);
4200 	      tree d = OMP_CLAUSE_DECL (c);
4201 	      tree type = TREE_TYPE (d);
4202 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4203 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4204 	      const char *name = get_name (orig_var);
4205 	      if (pass == 3)
4206 		{
4207 		  tree xv = create_tmp_var (ptr_type_node);
4208 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4209 		    {
4210 		      unsigned cnt = task_reduction_cnt - 1;
4211 		      if (!task_reduction_needs_orig_p)
4212 			cnt += (task_reduction_cntorig_full
4213 				- task_reduction_cntorig);
4214 		      else
4215 			cnt = task_reduction_cntorig - 1;
4216 		      x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4217 				  size_int (cnt), NULL_TREE, NULL_TREE);
4218 
4219 		      gimple *g = gimple_build_assign (xv, x);
4220 		      gimple_seq_add_stmt (ilist, g);
4221 		    }
4222 		  else
4223 		    {
4224 		      unsigned int idx = *ctx->task_reduction_map->get (c);
4225 		      tree off;
4226 		      if (ctx->task_reductions[1 + idx])
4227 			off = fold_convert (sizetype,
4228 					    ctx->task_reductions[1 + idx]);
4229 		      else
4230 			off = task_reduction_read (ilist, tskred_temp, sizetype,
4231 						   7 + 3 * idx + 1);
4232 		      gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4233 						       tskred_base, off);
4234 		      gimple_seq_add_stmt (ilist, g);
4235 		    }
4236 		  x = fold_convert (build_pointer_type (boolean_type_node),
4237 				    xv);
4238 		  if (TREE_CONSTANT (v))
4239 		    x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4240 				     TYPE_SIZE_UNIT (type));
4241 		  else
4242 		    {
4243 		      tree t = maybe_lookup_decl (v, ctx);
4244 		      if (t)
4245 			v = t;
4246 		      else
4247 			v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4248 		      gimplify_expr (&v, ilist, NULL, is_gimple_val,
4249 				     fb_rvalue);
4250 		      t = fold_build2_loc (clause_loc, PLUS_EXPR,
4251 					   TREE_TYPE (v), v,
4252 					   build_int_cst (TREE_TYPE (v), 1));
4253 		      t = fold_build2_loc (clause_loc, MULT_EXPR,
4254 					   TREE_TYPE (v), t,
4255 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4256 		      x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4257 		    }
4258 		  cond = create_tmp_var (TREE_TYPE (x));
4259 		  gimplify_assign (cond, x, ilist);
4260 		  x = xv;
4261 		}
4262 	      else if (TREE_CONSTANT (v))
4263 		{
4264 		  x = create_tmp_var_raw (type, name);
4265 		  gimple_add_tmp_var (x);
4266 		  TREE_ADDRESSABLE (x) = 1;
4267 		  x = build_fold_addr_expr_loc (clause_loc, x);
4268 		}
4269 	      else
4270 		{
4271 		  tree atmp
4272 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4273 		  tree t = maybe_lookup_decl (v, ctx);
4274 		  if (t)
4275 		    v = t;
4276 		  else
4277 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4278 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4279 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
4280 				       TREE_TYPE (v), v,
4281 				       build_int_cst (TREE_TYPE (v), 1));
4282 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
4283 				       TREE_TYPE (v), t,
4284 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4285 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4286 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4287 		}
4288 
4289 	      tree ptype = build_pointer_type (TREE_TYPE (type));
4290 	      x = fold_convert_loc (clause_loc, ptype, x);
4291 	      tree y = create_tmp_var (ptype, name);
4292 	      gimplify_assign (y, x, ilist);
4293 	      x = y;
4294 	      tree yb = y;
4295 
4296 	      if (!integer_zerop (bias))
4297 		{
4298 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4299 					   bias);
4300 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4301 					 x);
4302 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4303 					pointer_sized_int_node, yb, bias);
4304 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4305 		  yb = create_tmp_var (ptype, name);
4306 		  gimplify_assign (yb, x, ilist);
4307 		  x = yb;
4308 		}
4309 
4310 	      d = TREE_OPERAND (d, 0);
4311 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4312 		d = TREE_OPERAND (d, 0);
4313 	      if (TREE_CODE (d) == ADDR_EXPR)
4314 		{
4315 		  if (orig_var != var)
4316 		    {
4317 		      gcc_assert (is_variable_sized (orig_var));
4318 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4319 					    x);
4320 		      gimplify_assign (new_var, x, ilist);
4321 		      tree new_orig_var = lookup_decl (orig_var, ctx);
4322 		      tree t = build_fold_indirect_ref (new_var);
4323 		      DECL_IGNORED_P (new_var) = 0;
4324 		      TREE_THIS_NOTRAP (t) = 1;
4325 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
4326 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4327 		    }
4328 		  else
4329 		    {
4330 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4331 				  build_int_cst (ptype, 0));
4332 		      SET_DECL_VALUE_EXPR (new_var, x);
4333 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4334 		    }
4335 		}
4336 	      else
4337 		{
4338 		  gcc_assert (orig_var == var);
4339 		  if (TREE_CODE (d) == INDIRECT_REF)
4340 		    {
4341 		      x = create_tmp_var (ptype, name);
4342 		      TREE_ADDRESSABLE (x) = 1;
4343 		      gimplify_assign (x, yb, ilist);
4344 		      x = build_fold_addr_expr_loc (clause_loc, x);
4345 		    }
4346 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4347 		  gimplify_assign (new_var, x, ilist);
4348 		}
4349 	      /* GOMP_taskgroup_reduction_register memsets the whole
4350 		 array to zero.  If the initializer is zero, we don't
4351 		 need to initialize it again, just mark it as ever
4352 		 used unconditionally, i.e. cond = true.  */
4353 	      if (cond
4354 		  && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4355 		  && initializer_zerop (omp_reduction_init (c,
4356 							    TREE_TYPE (type))))
4357 		{
4358 		  gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4359 						   boolean_true_node);
4360 		  gimple_seq_add_stmt (ilist, g);
4361 		  continue;
4362 		}
4363 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
4364 	      if (cond)
4365 		{
4366 		  gimple *g;
4367 		  if (!is_parallel_ctx (ctx))
4368 		    {
4369 		      tree condv = create_tmp_var (boolean_type_node);
4370 		      g = gimple_build_assign (condv,
4371 					       build_simple_mem_ref (cond));
4372 		      gimple_seq_add_stmt (ilist, g);
4373 		      tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4374 		      g = gimple_build_cond (NE_EXPR, condv,
4375 					     boolean_false_node, end, lab1);
4376 		      gimple_seq_add_stmt (ilist, g);
4377 		      gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4378 		    }
4379 		  g = gimple_build_assign (build_simple_mem_ref (cond),
4380 					   boolean_true_node);
4381 		  gimple_seq_add_stmt (ilist, g);
4382 		}
4383 
4384 	      tree y1 = create_tmp_var (ptype);
4385 	      gimplify_assign (y1, y, ilist);
4386 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
4387 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
4388 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
4389 	      if (task_reduction_needs_orig_p)
4390 		{
4391 		  y3 = create_tmp_var (ptype);
4392 		  tree ref;
4393 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4394 		    ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4395 				  size_int (task_reduction_cnt_full
4396 					    + task_reduction_cntorig - 1),
4397 				  NULL_TREE, NULL_TREE);
4398 		  else
4399 		    {
4400 		      unsigned int idx = *ctx->task_reduction_map->get (c);
4401 		      ref = task_reduction_read (ilist, tskred_temp, ptype,
4402 						 7 + 3 * idx);
4403 		    }
4404 		  gimplify_assign (y3, ref, ilist);
4405 		}
4406 	      else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4407 		{
4408 		  if (pass != 3)
4409 		    {
4410 		      y2 = create_tmp_var (ptype);
4411 		      gimplify_assign (y2, y, ilist);
4412 		    }
4413 		  if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4414 		    {
4415 		      tree ref = build_outer_var_ref (var, ctx);
4416 		      /* For ref build_outer_var_ref already performs this.  */
4417 		      if (TREE_CODE (d) == INDIRECT_REF)
4418 			gcc_assert (omp_is_reference (var));
4419 		      else if (TREE_CODE (d) == ADDR_EXPR)
4420 			ref = build_fold_addr_expr (ref);
4421 		      else if (omp_is_reference (var))
4422 			ref = build_fold_addr_expr (ref);
4423 		      ref = fold_convert_loc (clause_loc, ptype, ref);
4424 		      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4425 			  && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4426 			{
4427 			  y3 = create_tmp_var (ptype);
4428 			  gimplify_assign (y3, unshare_expr (ref), ilist);
4429 			}
4430 		      if (is_simd)
4431 			{
4432 			  y4 = create_tmp_var (ptype);
4433 			  gimplify_assign (y4, ref, dlist);
4434 			}
4435 		    }
4436 		}
4437 	      tree i = create_tmp_var (TREE_TYPE (v));
4438 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4439 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
4440 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
4441 	      if (y2)
4442 		{
4443 		  i2 = create_tmp_var (TREE_TYPE (v));
4444 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4445 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
4446 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
4447 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4448 		}
4449 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4450 		{
4451 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4452 		  tree decl_placeholder
4453 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4454 		  SET_DECL_VALUE_EXPR (decl_placeholder,
4455 				       build_simple_mem_ref (y1));
4456 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4457 		  SET_DECL_VALUE_EXPR (placeholder,
4458 				       y3 ? build_simple_mem_ref (y3)
4459 				       : error_mark_node);
4460 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4461 		  x = lang_hooks.decls.omp_clause_default_ctor
4462 				(c, build_simple_mem_ref (y1),
4463 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4464 		  if (x)
4465 		    gimplify_and_add (x, ilist);
4466 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4467 		    {
4468 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4469 		      lower_omp (&tseq, ctx);
4470 		      gimple_seq_add_seq (ilist, tseq);
4471 		    }
4472 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4473 		  if (is_simd)
4474 		    {
4475 		      SET_DECL_VALUE_EXPR (decl_placeholder,
4476 					   build_simple_mem_ref (y2));
4477 		      SET_DECL_VALUE_EXPR (placeholder,
4478 					   build_simple_mem_ref (y4));
4479 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4480 		      lower_omp (&tseq, ctx);
4481 		      gimple_seq_add_seq (dlist, tseq);
4482 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4483 		    }
4484 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4485 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4486 		  if (y2)
4487 		    {
4488 		      x = lang_hooks.decls.omp_clause_dtor
4489 						(c, build_simple_mem_ref (y2));
4490 		      if (x)
4491 			{
4492 			  gimple_seq tseq = NULL;
4493 			  dtor = x;
4494 			  gimplify_stmt (&dtor, &tseq);
4495 			  gimple_seq_add_seq (dlist, tseq);
4496 			}
4497 		    }
4498 		}
4499 	      else
4500 		{
4501 		  x = omp_reduction_init (c, TREE_TYPE (type));
4502 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4503 
4504 		  /* reduction(-:var) sums up the partial results, so it
4505 		     acts identically to reduction(+:var).  */
4506 		  if (code == MINUS_EXPR)
4507 		    code = PLUS_EXPR;
4508 
4509 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4510 		  if (is_simd)
4511 		    {
4512 		      x = build2 (code, TREE_TYPE (type),
4513 				  build_simple_mem_ref (y4),
4514 				  build_simple_mem_ref (y2));
4515 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4516 		    }
4517 		}
4518 	      gimple *g
4519 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4520 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4521 	      gimple_seq_add_stmt (ilist, g);
4522 	      if (y3)
4523 		{
4524 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4525 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4526 		  gimple_seq_add_stmt (ilist, g);
4527 		}
4528 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4529 				       build_int_cst (TREE_TYPE (i), 1));
4530 	      gimple_seq_add_stmt (ilist, g);
4531 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4532 	      gimple_seq_add_stmt (ilist, g);
4533 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4534 	      if (y2)
4535 		{
4536 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4537 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4538 		  gimple_seq_add_stmt (dlist, g);
4539 		  if (y4)
4540 		    {
4541 		      g = gimple_build_assign
4542 					(y4, POINTER_PLUS_EXPR, y4,
4543 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4544 		      gimple_seq_add_stmt (dlist, g);
4545 		    }
4546 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4547 					   build_int_cst (TREE_TYPE (i2), 1));
4548 		  gimple_seq_add_stmt (dlist, g);
4549 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4550 		  gimple_seq_add_stmt (dlist, g);
4551 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4552 		}
4553 	      continue;
4554 	    }
4555 	  else if (pass == 2)
4556 	    {
4557 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
4558 		x = var;
4559 	      else
4560 		{
4561 		  bool by_ref = use_pointer_for_field (var, ctx);
4562 		  x = build_receiver_ref (var, by_ref, ctx);
4563 		}
4564 	      if (!omp_is_reference (var))
4565 		x = build_fold_addr_expr (x);
4566 	      x = fold_convert (ptr_type_node, x);
4567 	      unsigned cnt = task_reduction_cnt - 1;
4568 	      if (!task_reduction_needs_orig_p)
4569 		cnt += task_reduction_cntorig_full - task_reduction_cntorig;
4570 	      else
4571 		cnt = task_reduction_cntorig - 1;
4572 	      tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4573 			       size_int (cnt), NULL_TREE, NULL_TREE);
4574 	      gimplify_assign (r, x, ilist);
4575 	      continue;
4576 	    }
4577 	  else if (pass == 3)
4578 	    {
4579 	      tree type = TREE_TYPE (new_var);
4580 	      if (!omp_is_reference (var))
4581 		type = build_pointer_type (type);
4582 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4583 		{
4584 		  unsigned cnt = task_reduction_cnt - 1;
4585 		  if (!task_reduction_needs_orig_p)
4586 		    cnt += (task_reduction_cntorig_full
4587 			    - task_reduction_cntorig);
4588 		  else
4589 		    cnt = task_reduction_cntorig - 1;
4590 		  x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4591 			      size_int (cnt), NULL_TREE, NULL_TREE);
4592 		}
4593 	      else
4594 		{
4595 		  unsigned int idx = *ctx->task_reduction_map->get (c);
4596 		  tree off;
4597 		  if (ctx->task_reductions[1 + idx])
4598 		    off = fold_convert (sizetype,
4599 					ctx->task_reductions[1 + idx]);
4600 		  else
4601 		    off = task_reduction_read (ilist, tskred_temp, sizetype,
4602 					       7 + 3 * idx + 1);
4603 		  x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
4604 				   tskred_base, off);
4605 		}
4606 	      x = fold_convert (type, x);
4607 	      tree t;
4608 	      if (omp_is_reference (var))
4609 		{
4610 		  gimplify_assign (new_var, x, ilist);
4611 		  t = new_var;
4612 		  new_var = build_simple_mem_ref (new_var);
4613 		}
4614 	      else
4615 		{
4616 		  t = create_tmp_var (type);
4617 		  gimplify_assign (t, x, ilist);
4618 		  SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
4619 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4620 		}
4621 	      t = fold_convert (build_pointer_type (boolean_type_node), t);
4622 	      t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
4623 			       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4624 	      cond = create_tmp_var (TREE_TYPE (t));
4625 	      gimplify_assign (cond, t, ilist);
4626 	    }
4627 	  else if (is_variable_sized (var))
4628 	    {
4629 	      /* For variable sized types, we need to allocate the
4630 		 actual storage here.  Call alloca and store the
4631 		 result in the pointer decl that we created elsewhere.  */
4632 	      if (pass == 0)
4633 		continue;
4634 
4635 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4636 		{
4637 		  gcall *stmt;
4638 		  tree tmp, atmp;
4639 
4640 		  ptr = DECL_VALUE_EXPR (new_var);
4641 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4642 		  ptr = TREE_OPERAND (ptr, 0);
4643 		  gcc_assert (DECL_P (ptr));
4644 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4645 
4646 		  /* void *tmp = __builtin_alloca */
4647 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4648 		  stmt = gimple_build_call (atmp, 2, x,
4649 					    size_int (DECL_ALIGN (var)));
4650 		  tmp = create_tmp_var_raw (ptr_type_node);
4651 		  gimple_add_tmp_var (tmp);
4652 		  gimple_call_set_lhs (stmt, tmp);
4653 
4654 		  gimple_seq_add_stmt (ilist, stmt);
4655 
4656 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4657 		  gimplify_assign (ptr, x, ilist);
4658 		}
4659 	    }
4660 	  else if (omp_is_reference (var)
4661 		   && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
4662 		       || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
4663 	    {
4664 	      /* For references that are being privatized for Fortran,
4665 		 allocate new backing storage for the new pointer
4666 		 variable.  This allows us to avoid changing all the
4667 		 code that expects a pointer to something that expects
4668 		 a direct variable.  */
4669 	      if (pass == 0)
4670 		continue;
4671 
4672 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4673 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4674 		{
4675 		  x = build_receiver_ref (var, false, ctx);
4676 		  x = build_fold_addr_expr_loc (clause_loc, x);
4677 		}
4678 	      else if (TREE_CONSTANT (x))
4679 		{
4680 		  /* For reduction in SIMD loop, defer adding the
4681 		     initialization of the reference, because if we decide
4682 		     to use SIMD array for it, the initilization could cause
4683 		     expansion ICE.  */
4684 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4685 		    x = NULL_TREE;
4686 		  else
4687 		    {
4688 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4689 					      get_name (var));
4690 		      gimple_add_tmp_var (x);
4691 		      TREE_ADDRESSABLE (x) = 1;
4692 		      x = build_fold_addr_expr_loc (clause_loc, x);
4693 		    }
4694 		}
4695 	      else
4696 		{
4697 		  tree atmp
4698 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4699 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4700 		  tree al = size_int (TYPE_ALIGN (rtype));
4701 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4702 		}
4703 
4704 	      if (x)
4705 		{
4706 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4707 		  gimplify_assign (new_var, x, ilist);
4708 		}
4709 
4710 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4711 	    }
4712 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
4713 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
4714 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4715 	    {
4716 	      if (pass == 0)
4717 		continue;
4718 	    }
4719 	  else if (pass != 0)
4720 	    continue;
4721 
4722 	  switch (OMP_CLAUSE_CODE (c))
4723 	    {
4724 	    case OMP_CLAUSE_SHARED:
4725 	      /* Ignore shared directives in teams construct inside
4726 		 target construct.  */
4727 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4728 		  && !is_host_teams_ctx (ctx))
4729 		continue;
4730 	      /* Shared global vars are just accessed directly.  */
4731 	      if (is_global_var (new_var))
4732 		break;
4733 	      /* For taskloop firstprivate/lastprivate, represented
4734 		 as firstprivate and shared clause on the task, new_var
4735 		 is the firstprivate var.  */
4736 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4737 		break;
4738 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4739 		 needs to be delayed until after fixup_child_record_type so
4740 		 that we get the correct type during the dereference.  */
4741 	      by_ref = use_pointer_for_field (var, ctx);
4742 	      x = build_receiver_ref (var, by_ref, ctx);
4743 	      SET_DECL_VALUE_EXPR (new_var, x);
4744 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4745 
4746 	      /* ??? If VAR is not passed by reference, and the variable
4747 		 hasn't been initialized yet, then we'll get a warning for
4748 		 the store into the omp_data_s structure.  Ideally, we'd be
4749 		 able to notice this and not store anything at all, but
4750 		 we're generating code too early.  Suppress the warning.  */
4751 	      if (!by_ref)
4752 		TREE_NO_WARNING (var) = 1;
4753 	      break;
4754 
4755 	    case OMP_CLAUSE_LASTPRIVATE:
4756 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4757 		break;
4758 	      /* FALLTHRU */
4759 
4760 	    case OMP_CLAUSE_PRIVATE:
4761 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4762 		x = build_outer_var_ref (var, ctx);
4763 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4764 		{
4765 		  if (is_task_ctx (ctx))
4766 		    x = build_receiver_ref (var, false, ctx);
4767 		  else
4768 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4769 		}
4770 	      else
4771 		x = NULL;
4772 	    do_private:
4773 	      tree nx;
4774 	      nx = lang_hooks.decls.omp_clause_default_ctor
4775 						(c, unshare_expr (new_var), x);
4776 	      if (is_simd)
4777 		{
4778 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4779 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4780 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4781 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4782 						       ivar, lvar))
4783 		    {
4784 		      if (nx)
4785 			x = lang_hooks.decls.omp_clause_default_ctor
4786 						(c, unshare_expr (ivar), x);
4787 		      if (nx && x)
4788 			gimplify_and_add (x, &llist[0]);
4789 		      if (y)
4790 			{
4791 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4792 			  if (y)
4793 			    {
4794 			      gimple_seq tseq = NULL;
4795 
4796 			      dtor = y;
4797 			      gimplify_stmt (&dtor, &tseq);
4798 			      gimple_seq_add_seq (&llist[1], tseq);
4799 			    }
4800 			}
4801 		      break;
4802 		    }
4803 		}
4804 	      if (nx)
4805 		gimplify_and_add (nx, ilist);
4806 	      /* FALLTHRU */
4807 
4808 	    do_dtor:
4809 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4810 	      if (x)
4811 		{
4812 		  gimple_seq tseq = NULL;
4813 
4814 		  dtor = x;
4815 		  gimplify_stmt (&dtor, &tseq);
4816 		  gimple_seq_add_seq (dlist, tseq);
4817 		}
4818 	      break;
4819 
4820 	    case OMP_CLAUSE_LINEAR:
4821 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4822 		goto do_firstprivate;
4823 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4824 		x = NULL;
4825 	      else
4826 		x = build_outer_var_ref (var, ctx);
4827 	      goto do_private;
4828 
4829 	    case OMP_CLAUSE_FIRSTPRIVATE:
4830 	      if (is_task_ctx (ctx))
4831 		{
4832 		  if ((omp_is_reference (var)
4833 		       && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
4834 		      || is_variable_sized (var))
4835 		    goto do_dtor;
4836 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4837 									  ctx))
4838 			   || use_pointer_for_field (var, NULL))
4839 		    {
4840 		      x = build_receiver_ref (var, false, ctx);
4841 		      SET_DECL_VALUE_EXPR (new_var, x);
4842 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4843 		      goto do_dtor;
4844 		    }
4845 		}
4846 	      if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
4847 		  && omp_is_reference (var))
4848 		{
4849 		  x = build_outer_var_ref (var, ctx);
4850 		  gcc_assert (TREE_CODE (x) == MEM_REF
4851 			      && integer_zerop (TREE_OPERAND (x, 1)));
4852 		  x = TREE_OPERAND (x, 0);
4853 		  x = lang_hooks.decls.omp_clause_copy_ctor
4854 						(c, unshare_expr (new_var), x);
4855 		  gimplify_and_add (x, ilist);
4856 		  goto do_dtor;
4857 		}
4858 	    do_firstprivate:
4859 	      x = build_outer_var_ref (var, ctx);
4860 	      if (is_simd)
4861 		{
4862 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4863 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4864 		    {
4865 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4866 		      tree stept = TREE_TYPE (t);
4867 		      tree ct = omp_find_clause (clauses,
4868 						 OMP_CLAUSE__LOOPTEMP_);
4869 		      gcc_assert (ct);
4870 		      tree l = OMP_CLAUSE_DECL (ct);
4871 		      tree n1 = fd->loop.n1;
4872 		      tree step = fd->loop.step;
4873 		      tree itype = TREE_TYPE (l);
4874 		      if (POINTER_TYPE_P (itype))
4875 			itype = signed_type_for (itype);
4876 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4877 		      if (TYPE_UNSIGNED (itype)
4878 			  && fd->loop.cond_code == GT_EXPR)
4879 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4880 					 fold_build1 (NEGATE_EXPR, itype, l),
4881 					 fold_build1 (NEGATE_EXPR,
4882 						      itype, step));
4883 		      else
4884 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4885 		      t = fold_build2 (MULT_EXPR, stept,
4886 				       fold_convert (stept, l), t);
4887 
4888 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4889 			{
4890 			  x = lang_hooks.decls.omp_clause_linear_ctor
4891 							(c, new_var, x, t);
4892 			  gimplify_and_add (x, ilist);
4893 			  goto do_dtor;
4894 			}
4895 
4896 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4897 			x = fold_build2 (POINTER_PLUS_EXPR,
4898 					 TREE_TYPE (x), x, t);
4899 		      else
4900 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4901 		    }
4902 
4903 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4904 		       || TREE_ADDRESSABLE (new_var))
4905 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4906 						       ivar, lvar))
4907 		    {
4908 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4909 			{
4910 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4911 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4912 			  gimplify_and_add (x, ilist);
4913 			  gimple_stmt_iterator gsi
4914 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4915 			  gassign *g
4916 			    = gimple_build_assign (unshare_expr (lvar), iv);
4917 			  gsi_insert_before_without_update (&gsi, g,
4918 							    GSI_SAME_STMT);
4919 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4920 			  enum tree_code code = PLUS_EXPR;
4921 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4922 			    code = POINTER_PLUS_EXPR;
4923 			  g = gimple_build_assign (iv, code, iv, t);
4924 			  gsi_insert_before_without_update (&gsi, g,
4925 							    GSI_SAME_STMT);
4926 			  break;
4927 			}
4928 		      x = lang_hooks.decls.omp_clause_copy_ctor
4929 						(c, unshare_expr (ivar), x);
4930 		      gimplify_and_add (x, &llist[0]);
4931 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4932 		      if (x)
4933 			{
4934 			  gimple_seq tseq = NULL;
4935 
4936 			  dtor = x;
4937 			  gimplify_stmt (&dtor, &tseq);
4938 			  gimple_seq_add_seq (&llist[1], tseq);
4939 			}
4940 		      break;
4941 		    }
4942 		}
4943 	      x = lang_hooks.decls.omp_clause_copy_ctor
4944 						(c, unshare_expr (new_var), x);
4945 	      gimplify_and_add (x, ilist);
4946 	      goto do_dtor;
4947 
4948 	    case OMP_CLAUSE__LOOPTEMP_:
4949 	    case OMP_CLAUSE__REDUCTEMP_:
4950 	      gcc_assert (is_taskreg_ctx (ctx));
4951 	      x = build_outer_var_ref (var, ctx);
4952 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4953 	      gimplify_and_add (x, ilist);
4954 	      break;
4955 
4956 	    case OMP_CLAUSE_COPYIN:
4957 	      by_ref = use_pointer_for_field (var, NULL);
4958 	      x = build_receiver_ref (var, by_ref, ctx);
4959 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4960 	      append_to_statement_list (x, &copyin_seq);
4961 	      copyin_by_ref |= by_ref;
4962 	      break;
4963 
4964 	    case OMP_CLAUSE_REDUCTION:
4965 	    case OMP_CLAUSE_IN_REDUCTION:
4966 	      /* OpenACC reductions are initialized using the
4967 		 GOACC_REDUCTION internal function.  */
4968 	      if (is_gimple_omp_oacc (ctx->stmt))
4969 		break;
4970 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4971 		{
4972 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4973 		  gimple *tseq;
4974 		  tree ptype = TREE_TYPE (placeholder);
4975 		  if (cond)
4976 		    {
4977 		      x = error_mark_node;
4978 		      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
4979 			  && !task_reduction_needs_orig_p)
4980 			x = var;
4981 		      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4982 			{
4983 			  tree pptype = build_pointer_type (ptype);
4984 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4985 			    x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4986 					size_int (task_reduction_cnt_full
4987 						  + task_reduction_cntorig - 1),
4988 					NULL_TREE, NULL_TREE);
4989 			  else
4990 			    {
4991 			      unsigned int idx
4992 				= *ctx->task_reduction_map->get (c);
4993 			      x = task_reduction_read (ilist, tskred_temp,
4994 						       pptype, 7 + 3 * idx);
4995 			    }
4996 			  x = fold_convert (pptype, x);
4997 			  x = build_simple_mem_ref (x);
4998 			}
4999 		    }
5000 		  else
5001 		    {
5002 		      x = build_outer_var_ref (var, ctx);
5003 
5004 		      if (omp_is_reference (var)
5005 			  && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5006 			x = build_fold_addr_expr_loc (clause_loc, x);
5007 		    }
5008 		  SET_DECL_VALUE_EXPR (placeholder, x);
5009 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5010 		  tree new_vard = new_var;
5011 		  if (omp_is_reference (var))
5012 		    {
5013 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5014 		      new_vard = TREE_OPERAND (new_var, 0);
5015 		      gcc_assert (DECL_P (new_vard));
5016 		    }
5017 		  if (is_simd
5018 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5019 						       ivar, lvar))
5020 		    {
5021 		      if (new_vard == new_var)
5022 			{
5023 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5024 			  SET_DECL_VALUE_EXPR (new_var, ivar);
5025 			}
5026 		      else
5027 			{
5028 			  SET_DECL_VALUE_EXPR (new_vard,
5029 					       build_fold_addr_expr (ivar));
5030 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5031 			}
5032 		      x = lang_hooks.decls.omp_clause_default_ctor
5033 				(c, unshare_expr (ivar),
5034 				 build_outer_var_ref (var, ctx));
5035 		      if (x)
5036 			gimplify_and_add (x, &llist[0]);
5037 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5038 			{
5039 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5040 			  lower_omp (&tseq, ctx);
5041 			  gimple_seq_add_seq (&llist[0], tseq);
5042 			}
5043 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5044 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5045 		      lower_omp (&tseq, ctx);
5046 		      gimple_seq_add_seq (&llist[1], tseq);
5047 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5048 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5049 		      if (new_vard == new_var)
5050 			SET_DECL_VALUE_EXPR (new_var, lvar);
5051 		      else
5052 			SET_DECL_VALUE_EXPR (new_vard,
5053 					     build_fold_addr_expr (lvar));
5054 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5055 		      if (x)
5056 			{
5057 			  tseq = NULL;
5058 			  dtor = x;
5059 			  gimplify_stmt (&dtor, &tseq);
5060 			  gimple_seq_add_seq (&llist[1], tseq);
5061 			}
5062 		      break;
5063 		    }
5064 		  /* If this is a reference to constant size reduction var
5065 		     with placeholder, we haven't emitted the initializer
5066 		     for it because it is undesirable if SIMD arrays are used.
5067 		     But if they aren't used, we need to emit the deferred
5068 		     initialization now.  */
5069 		  else if (omp_is_reference (var) && is_simd)
5070 		    handle_simd_reference (clause_loc, new_vard, ilist);
5071 
5072 		  tree lab2 = NULL_TREE;
5073 		  if (cond)
5074 		    {
5075 		      gimple *g;
5076 		      if (!is_parallel_ctx (ctx))
5077 			{
5078 			  tree condv = create_tmp_var (boolean_type_node);
5079 			  tree m = build_simple_mem_ref (cond);
5080 			  g = gimple_build_assign (condv, m);
5081 			  gimple_seq_add_stmt (ilist, g);
5082 			  tree lab1
5083 			    = create_artificial_label (UNKNOWN_LOCATION);
5084 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
5085 			  g = gimple_build_cond (NE_EXPR, condv,
5086 						 boolean_false_node,
5087 						 lab2, lab1);
5088 			  gimple_seq_add_stmt (ilist, g);
5089 			  gimple_seq_add_stmt (ilist,
5090 					       gimple_build_label (lab1));
5091 			}
5092 		      g = gimple_build_assign (build_simple_mem_ref (cond),
5093 					       boolean_true_node);
5094 		      gimple_seq_add_stmt (ilist, g);
5095 		    }
5096 		  x = lang_hooks.decls.omp_clause_default_ctor
5097 				(c, unshare_expr (new_var),
5098 				 cond ? NULL_TREE
5099 				 : build_outer_var_ref (var, ctx));
5100 		  if (x)
5101 		    gimplify_and_add (x, ilist);
5102 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5103 		    {
5104 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5105 		      lower_omp (&tseq, ctx);
5106 		      gimple_seq_add_seq (ilist, tseq);
5107 		    }
5108 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5109 		  if (is_simd)
5110 		    {
5111 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5112 		      lower_omp (&tseq, ctx);
5113 		      gimple_seq_add_seq (dlist, tseq);
5114 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5115 		    }
5116 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5117 		  if (cond)
5118 		    {
5119 		      if (lab2)
5120 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5121 		      break;
5122 		    }
5123 		  goto do_dtor;
5124 		}
5125 	      else
5126 		{
5127 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
5128 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5129 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5130 
5131 		  if (cond)
5132 		    {
5133 		      gimple *g;
5134 		      tree lab2 = NULL_TREE;
5135 		      /* GOMP_taskgroup_reduction_register memsets the whole
5136 			 array to zero.  If the initializer is zero, we don't
5137 			 need to initialize it again, just mark it as ever
5138 			 used unconditionally, i.e. cond = true.  */
5139 		      if (initializer_zerop (x))
5140 			{
5141 			  g = gimple_build_assign (build_simple_mem_ref (cond),
5142 						   boolean_true_node);
5143 			  gimple_seq_add_stmt (ilist, g);
5144 			  break;
5145 			}
5146 
5147 		      /* Otherwise, emit
5148 			 if (!cond) { cond = true; new_var = x; }  */
5149 		      if (!is_parallel_ctx (ctx))
5150 			{
5151 			  tree condv = create_tmp_var (boolean_type_node);
5152 			  tree m = build_simple_mem_ref (cond);
5153 			  g = gimple_build_assign (condv, m);
5154 			  gimple_seq_add_stmt (ilist, g);
5155 			  tree lab1
5156 			    = create_artificial_label (UNKNOWN_LOCATION);
5157 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
5158 			  g = gimple_build_cond (NE_EXPR, condv,
5159 						 boolean_false_node,
5160 						 lab2, lab1);
5161 			  gimple_seq_add_stmt (ilist, g);
5162 			  gimple_seq_add_stmt (ilist,
5163 					       gimple_build_label (lab1));
5164 			}
5165 		      g = gimple_build_assign (build_simple_mem_ref (cond),
5166 					       boolean_true_node);
5167 		      gimple_seq_add_stmt (ilist, g);
5168 		      gimplify_assign (new_var, x, ilist);
5169 		      if (lab2)
5170 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5171 		      break;
5172 		    }
5173 
5174 		  /* reduction(-:var) sums up the partial results, so it
5175 		     acts identically to reduction(+:var).  */
5176 		  if (code == MINUS_EXPR)
5177 		    code = PLUS_EXPR;
5178 
5179 		  tree new_vard = new_var;
5180 		  if (is_simd && omp_is_reference (var))
5181 		    {
5182 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5183 		      new_vard = TREE_OPERAND (new_var, 0);
5184 		      gcc_assert (DECL_P (new_vard));
5185 		    }
5186 		  if (is_simd
5187 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5188 						       ivar, lvar))
5189 		    {
5190 		      tree ref = build_outer_var_ref (var, ctx);
5191 
5192 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
5193 
5194 		      if (sctx.is_simt)
5195 			{
5196 			  if (!simt_lane)
5197 			    simt_lane = create_tmp_var (unsigned_type_node);
5198 			  x = build_call_expr_internal_loc
5199 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
5200 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
5201 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
5202 			  gimplify_assign (ivar, x, &llist[2]);
5203 			}
5204 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
5205 		      ref = build_outer_var_ref (var, ctx);
5206 		      gimplify_assign (ref, x, &llist[1]);
5207 
5208 		      if (new_vard != new_var)
5209 			{
5210 			  SET_DECL_VALUE_EXPR (new_vard,
5211 					       build_fold_addr_expr (lvar));
5212 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5213 			}
5214 		    }
5215 		  else
5216 		    {
5217 		      if (omp_is_reference (var) && is_simd)
5218 			handle_simd_reference (clause_loc, new_vard, ilist);
5219 		      gimplify_assign (new_var, x, ilist);
5220 		      if (is_simd)
5221 			{
5222 			  tree ref = build_outer_var_ref (var, ctx);
5223 
5224 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5225 			  ref = build_outer_var_ref (var, ctx);
5226 			  gimplify_assign (ref, x, dlist);
5227 			}
5228 		    }
5229 		}
5230 	      break;
5231 
5232 	    default:
5233 	      gcc_unreachable ();
5234 	    }
5235 	}
5236     }
5237   if (tskred_avar)
5238     {
5239       tree clobber = build_constructor (TREE_TYPE (tskred_avar), NULL);
5240       TREE_THIS_VOLATILE (clobber) = 1;
5241       gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
5242     }
5243 
5244   if (known_eq (sctx.max_vf, 1U))
5245     sctx.is_simt = false;
5246 
5247   if (nonconst_simd_if)
5248     {
5249       if (sctx.lane == NULL_TREE)
5250 	{
5251 	  sctx.idx = create_tmp_var (unsigned_type_node);
5252 	  sctx.lane = create_tmp_var (unsigned_type_node);
5253 	}
5254       /* FIXME: For now.  */
5255       sctx.is_simt = false;
5256     }
5257 
5258   if (sctx.lane || sctx.is_simt)
5259     {
5260       uid = create_tmp_var (ptr_type_node, "simduid");
5261       /* Don't want uninit warnings on simduid, it is always uninitialized,
5262 	 but we use it not for the value, but for the DECL_UID only.  */
5263       TREE_NO_WARNING (uid) = 1;
5264       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
5265       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
5266       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5267       gimple_omp_for_set_clauses (ctx->stmt, c);
5268     }
5269   /* Emit calls denoting privatized variables and initializing a pointer to
5270      structure that holds private variables as fields after ompdevlow pass.  */
5271   if (sctx.is_simt)
5272     {
5273       sctx.simt_eargs[0] = uid;
5274       gimple *g
5275 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
5276       gimple_call_set_lhs (g, uid);
5277       gimple_seq_add_stmt (ilist, g);
5278       sctx.simt_eargs.release ();
5279 
5280       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
5281       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
5282       gimple_call_set_lhs (g, simtrec);
5283       gimple_seq_add_stmt (ilist, g);
5284     }
5285   if (sctx.lane)
5286     {
5287       gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
5288 					      1 + (nonconst_simd_if != NULL),
5289 					      uid, nonconst_simd_if);
5290       gimple_call_set_lhs (g, sctx.lane);
5291       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5292       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
5293       g = gimple_build_assign (sctx.lane, INTEGER_CST,
5294 			       build_int_cst (unsigned_type_node, 0));
5295       gimple_seq_add_stmt (ilist, g);
5296       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
5297       if (llist[2])
5298 	{
5299 	  tree simt_vf = create_tmp_var (unsigned_type_node);
5300 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
5301 	  gimple_call_set_lhs (g, simt_vf);
5302 	  gimple_seq_add_stmt (dlist, g);
5303 
5304 	  tree t = build_int_cst (unsigned_type_node, 1);
5305 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
5306 	  gimple_seq_add_stmt (dlist, g);
5307 
5308 	  t = build_int_cst (unsigned_type_node, 0);
5309 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5310 	  gimple_seq_add_stmt (dlist, g);
5311 
5312 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5313 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
5314 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5315 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
5316 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
5317 
5318 	  gimple_seq_add_seq (dlist, llist[2]);
5319 
5320 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
5321 	  gimple_seq_add_stmt (dlist, g);
5322 
5323 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
5324 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
5325 	  gimple_seq_add_stmt (dlist, g);
5326 
5327 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
5328 	}
5329       for (int i = 0; i < 2; i++)
5330 	if (llist[i])
5331 	  {
5332 	    tree vf = create_tmp_var (unsigned_type_node);
5333 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
5334 	    gimple_call_set_lhs (g, vf);
5335 	    gimple_seq *seq = i == 0 ? ilist : dlist;
5336 	    gimple_seq_add_stmt (seq, g);
5337 	    tree t = build_int_cst (unsigned_type_node, 0);
5338 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
5339 	    gimple_seq_add_stmt (seq, g);
5340 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
5341 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
5342 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
5343 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
5344 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
5345 	    gimple_seq_add_seq (seq, llist[i]);
5346 	    t = build_int_cst (unsigned_type_node, 1);
5347 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
5348 	    gimple_seq_add_stmt (seq, g);
5349 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
5350 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
5351 	    gimple_seq_add_stmt (seq, g);
5352 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
5353 	  }
5354     }
5355   if (sctx.is_simt)
5356     {
5357       gimple_seq_add_seq (dlist, sctx.simt_dlist);
5358       gimple *g
5359 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
5360       gimple_seq_add_stmt (dlist, g);
5361     }
5362 
5363   /* The copyin sequence is not to be executed by the main thread, since
5364      that would result in self-copies.  Perhaps not visible to scalars,
5365      but it certainly is to C++ operator=.  */
5366   if (copyin_seq)
5367     {
5368       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
5369 			   0);
5370       x = build2 (NE_EXPR, boolean_type_node, x,
5371 		  build_int_cst (TREE_TYPE (x), 0));
5372       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
5373       gimplify_and_add (x, ilist);
5374     }
5375 
5376   /* If any copyin variable is passed by reference, we must ensure the
5377      master thread doesn't modify it before it is copied over in all
5378      threads.  Similarly for variables in both firstprivate and
5379      lastprivate clauses we need to ensure the lastprivate copying
5380      happens after firstprivate copying in all threads.  And similarly
5381      for UDRs if initializer expression refers to omp_orig.  */
5382   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
5383     {
5384       /* Don't add any barrier for #pragma omp simd or
5385 	 #pragma omp distribute.  */
5386       if (!is_task_ctx (ctx)
5387 	  && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
5388 	      || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
5389 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
5390     }
5391 
5392   /* If max_vf is non-zero, then we can use only a vectorization factor
5393      up to the max_vf we chose.  So stick it into the safelen clause.  */
5394   if (maybe_ne (sctx.max_vf, 0U))
5395     {
5396       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
5397 				OMP_CLAUSE_SAFELEN);
5398       poly_uint64 safe_len;
5399       if (c == NULL_TREE
5400 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
5401 	      && maybe_gt (safe_len, sctx.max_vf)))
5402 	{
5403 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
5404 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
5405 						       sctx.max_vf);
5406 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
5407 	  gimple_omp_for_set_clauses (ctx->stmt, c);
5408 	}
5409     }
5410 }
5411 
5412 
5413 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
5414    both parallel and workshare constructs.  PREDICATE may be NULL if it's
5415    always true.   */
5416 
5417 static void
5418 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
5419 			   omp_context *ctx)
5420 {
5421   tree x, c, label = NULL, orig_clauses = clauses;
5422   bool par_clauses = false;
5423   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
5424 
5425   /* Early exit if there are no lastprivate or linear clauses.  */
5426   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
5427     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
5428 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
5429 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
5430       break;
5431   if (clauses == NULL)
5432     {
5433       /* If this was a workshare clause, see if it had been combined
5434 	 with its parallel.  In that case, look for the clauses on the
5435 	 parallel statement itself.  */
5436       if (is_parallel_ctx (ctx))
5437 	return;
5438 
5439       ctx = ctx->outer;
5440       if (ctx == NULL || !is_parallel_ctx (ctx))
5441 	return;
5442 
5443       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5444 				 OMP_CLAUSE_LASTPRIVATE);
5445       if (clauses == NULL)
5446 	return;
5447       par_clauses = true;
5448     }
5449 
5450   bool maybe_simt = false;
5451   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5452       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5453     {
5454       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
5455       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
5456       if (simduid)
5457 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
5458     }
5459 
5460   if (predicate)
5461     {
5462       gcond *stmt;
5463       tree label_true, arm1, arm2;
5464       enum tree_code pred_code = TREE_CODE (predicate);
5465 
5466       label = create_artificial_label (UNKNOWN_LOCATION);
5467       label_true = create_artificial_label (UNKNOWN_LOCATION);
5468       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
5469 	{
5470 	  arm1 = TREE_OPERAND (predicate, 0);
5471 	  arm2 = TREE_OPERAND (predicate, 1);
5472 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5473 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
5474 	}
5475       else
5476 	{
5477 	  arm1 = predicate;
5478 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
5479 	  arm2 = boolean_false_node;
5480 	  pred_code = NE_EXPR;
5481 	}
5482       if (maybe_simt)
5483 	{
5484 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
5485 	  c = fold_convert (integer_type_node, c);
5486 	  simtcond = create_tmp_var (integer_type_node);
5487 	  gimplify_assign (simtcond, c, stmt_list);
5488 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
5489 						 1, simtcond);
5490 	  c = create_tmp_var (integer_type_node);
5491 	  gimple_call_set_lhs (g, c);
5492 	  gimple_seq_add_stmt (stmt_list, g);
5493 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
5494 				    label_true, label);
5495 	}
5496       else
5497 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
5498       gimple_seq_add_stmt (stmt_list, stmt);
5499       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
5500     }
5501 
5502   for (c = clauses; c ;)
5503     {
5504       tree var, new_var;
5505       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5506 
5507       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5508 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5509 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
5510 	{
5511 	  var = OMP_CLAUSE_DECL (c);
5512 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5513 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
5514 	      && is_taskloop_ctx (ctx))
5515 	    {
5516 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
5517 	      new_var = lookup_decl (var, ctx->outer);
5518 	    }
5519 	  else
5520 	    {
5521 	      new_var = lookup_decl (var, ctx);
5522 	      /* Avoid uninitialized warnings for lastprivate and
5523 		 for linear iterators.  */
5524 	      if (predicate
5525 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5526 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
5527 		TREE_NO_WARNING (new_var) = 1;
5528 	    }
5529 
5530 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
5531 	    {
5532 	      tree val = DECL_VALUE_EXPR (new_var);
5533 	      if (TREE_CODE (val) == ARRAY_REF
5534 		  && VAR_P (TREE_OPERAND (val, 0))
5535 		  && lookup_attribute ("omp simd array",
5536 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
5537 								      0))))
5538 		{
5539 		  if (lastlane == NULL)
5540 		    {
5541 		      lastlane = create_tmp_var (unsigned_type_node);
5542 		      gcall *g
5543 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
5544 						      2, simduid,
5545 						      TREE_OPERAND (val, 1));
5546 		      gimple_call_set_lhs (g, lastlane);
5547 		      gimple_seq_add_stmt (stmt_list, g);
5548 		    }
5549 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
5550 				    TREE_OPERAND (val, 0), lastlane,
5551 				    NULL_TREE, NULL_TREE);
5552 		}
5553 	    }
5554 	  else if (maybe_simt)
5555 	    {
5556 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
5557 			  ? DECL_VALUE_EXPR (new_var)
5558 			  : new_var);
5559 	      if (simtlast == NULL)
5560 		{
5561 		  simtlast = create_tmp_var (unsigned_type_node);
5562 		  gcall *g = gimple_build_call_internal
5563 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
5564 		  gimple_call_set_lhs (g, simtlast);
5565 		  gimple_seq_add_stmt (stmt_list, g);
5566 		}
5567 	      x = build_call_expr_internal_loc
5568 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
5569 		 TREE_TYPE (val), 2, val, simtlast);
5570 	      new_var = unshare_expr (new_var);
5571 	      gimplify_assign (new_var, x, stmt_list);
5572 	      new_var = unshare_expr (new_var);
5573 	    }
5574 
5575 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5576 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
5577 	    {
5578 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
5579 	      gimple_seq_add_seq (stmt_list,
5580 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
5581 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
5582 	    }
5583 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5584 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
5585 	    {
5586 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
5587 	      gimple_seq_add_seq (stmt_list,
5588 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
5589 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
5590 	    }
5591 
5592 	  x = NULL_TREE;
5593 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5594 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
5595 	    {
5596 	      gcc_checking_assert (is_taskloop_ctx (ctx));
5597 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
5598 							  ctx->outer->outer);
5599 	      if (is_global_var (ovar))
5600 		x = ovar;
5601 	    }
5602 	  if (!x)
5603 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
5604 	  if (omp_is_reference (var))
5605 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5606 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
5607 	  gimplify_and_add (x, stmt_list);
5608 	}
5609       c = OMP_CLAUSE_CHAIN (c);
5610       if (c == NULL && !par_clauses)
5611 	{
5612 	  /* If this was a workshare clause, see if it had been combined
5613 	     with its parallel.  In that case, continue looking for the
5614 	     clauses also on the parallel statement itself.  */
5615 	  if (is_parallel_ctx (ctx))
5616 	    break;
5617 
5618 	  ctx = ctx->outer;
5619 	  if (ctx == NULL || !is_parallel_ctx (ctx))
5620 	    break;
5621 
5622 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
5623 			       OMP_CLAUSE_LASTPRIVATE);
5624 	  par_clauses = true;
5625 	}
5626     }
5627 
5628   if (label)
5629     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
5630 }
5631 
5632 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
5633    (which might be a placeholder).  INNER is true if this is an inner
5634    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
5635    join markers.  Generate the before-loop forking sequence in
5636    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
5637    general form of these sequences is
5638 
5639      GOACC_REDUCTION_SETUP
5640      GOACC_FORK
5641      GOACC_REDUCTION_INIT
5642      ...
5643      GOACC_REDUCTION_FINI
5644      GOACC_JOIN
5645      GOACC_REDUCTION_TEARDOWN.  */
5646 
5647 static void
5648 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
5649 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
5650 		       gimple_seq *join_seq, omp_context *ctx)
5651 {
5652   gimple_seq before_fork = NULL;
5653   gimple_seq after_fork = NULL;
5654   gimple_seq before_join = NULL;
5655   gimple_seq after_join = NULL;
5656   tree init_code = NULL_TREE, fini_code = NULL_TREE,
5657     setup_code = NULL_TREE, teardown_code = NULL_TREE;
5658   unsigned offset = 0;
5659 
5660   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5661     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5662       {
5663 	tree orig = OMP_CLAUSE_DECL (c);
5664 	tree var = maybe_lookup_decl (orig, ctx);
5665 	tree ref_to_res = NULL_TREE;
5666 	tree incoming, outgoing, v1, v2, v3;
5667 	bool is_private = false;
5668 
5669 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
5670 	if (rcode == MINUS_EXPR)
5671 	  rcode = PLUS_EXPR;
5672 	else if (rcode == TRUTH_ANDIF_EXPR)
5673 	  rcode = BIT_AND_EXPR;
5674 	else if (rcode == TRUTH_ORIF_EXPR)
5675 	  rcode = BIT_IOR_EXPR;
5676 	tree op = build_int_cst (unsigned_type_node, rcode);
5677 
5678 	if (!var)
5679 	  var = orig;
5680 
5681 	incoming = outgoing = var;
5682 
5683 	if (!inner)
5684 	  {
5685 	    /* See if an outer construct also reduces this variable.  */
5686 	    omp_context *outer = ctx;
5687 
5688 	    while (omp_context *probe = outer->outer)
5689 	      {
5690 		enum gimple_code type = gimple_code (probe->stmt);
5691 		tree cls;
5692 
5693 		switch (type)
5694 		  {
5695 		  case GIMPLE_OMP_FOR:
5696 		    cls = gimple_omp_for_clauses (probe->stmt);
5697 		    break;
5698 
5699 		  case GIMPLE_OMP_TARGET:
5700 		    if (gimple_omp_target_kind (probe->stmt)
5701 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
5702 		      goto do_lookup;
5703 
5704 		    cls = gimple_omp_target_clauses (probe->stmt);
5705 		    break;
5706 
5707 		  default:
5708 		    goto do_lookup;
5709 		  }
5710 
5711 		outer = probe;
5712 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
5713 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5714 		      && orig == OMP_CLAUSE_DECL (cls))
5715 		    {
5716 		      incoming = outgoing = lookup_decl (orig, probe);
5717 		      goto has_outer_reduction;
5718 		    }
5719 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5720 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5721 			   && orig == OMP_CLAUSE_DECL (cls))
5722 		    {
5723 		      is_private = true;
5724 		      goto do_lookup;
5725 		    }
5726 	      }
5727 
5728 	  do_lookup:
5729 	    /* This is the outermost construct with this reduction,
5730 	       see if there's a mapping for it.  */
5731 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5732 		&& maybe_lookup_field (orig, outer) && !is_private)
5733 	      {
5734 		ref_to_res = build_receiver_ref (orig, false, outer);
5735 		if (omp_is_reference (orig))
5736 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5737 
5738 		tree type = TREE_TYPE (var);
5739 		if (POINTER_TYPE_P (type))
5740 		  type = TREE_TYPE (type);
5741 
5742 		outgoing = var;
5743 		incoming = omp_reduction_init_op (loc, rcode, type);
5744 	      }
5745 	    else
5746 	      {
5747 		/* Try to look at enclosing contexts for reduction var,
5748 		   use original if no mapping found.  */
5749 		tree t = NULL_TREE;
5750 		omp_context *c = ctx->outer;
5751 		while (c && !t)
5752 		  {
5753 		    t = maybe_lookup_decl (orig, c);
5754 		    c = c->outer;
5755 		  }
5756 		incoming = outgoing = (t ? t : orig);
5757 	      }
5758 
5759 	  has_outer_reduction:;
5760 	  }
5761 
5762 	if (!ref_to_res)
5763 	  ref_to_res = integer_zero_node;
5764 
5765 	if (omp_is_reference (orig))
5766 	  {
5767 	    tree type = TREE_TYPE (var);
5768 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5769 
5770 	    if (!inner)
5771 	      {
5772 		tree x = create_tmp_var (TREE_TYPE (type), id);
5773 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5774 	      }
5775 
5776 	    v1 = create_tmp_var (type, id);
5777 	    v2 = create_tmp_var (type, id);
5778 	    v3 = create_tmp_var (type, id);
5779 
5780 	    gimplify_assign (v1, var, fork_seq);
5781 	    gimplify_assign (v2, var, fork_seq);
5782 	    gimplify_assign (v3, var, fork_seq);
5783 
5784 	    var = build_simple_mem_ref (var);
5785 	    v1 = build_simple_mem_ref (v1);
5786 	    v2 = build_simple_mem_ref (v2);
5787 	    v3 = build_simple_mem_ref (v3);
5788 	    outgoing = build_simple_mem_ref (outgoing);
5789 
5790 	    if (!TREE_CONSTANT (incoming))
5791 	      incoming = build_simple_mem_ref (incoming);
5792 	  }
5793 	else
5794 	  v1 = v2 = v3 = var;
5795 
5796 	/* Determine position in reduction buffer, which may be used
5797 	   by target.  The parser has ensured that this is not a
5798 	   variable-sized type.  */
5799 	fixed_size_mode mode
5800 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5801 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5802 	offset = (offset + align - 1) & ~(align - 1);
5803 	tree off = build_int_cst (sizetype, offset);
5804 	offset += GET_MODE_SIZE (mode);
5805 
5806 	if (!init_code)
5807 	  {
5808 	    init_code = build_int_cst (integer_type_node,
5809 				       IFN_GOACC_REDUCTION_INIT);
5810 	    fini_code = build_int_cst (integer_type_node,
5811 				       IFN_GOACC_REDUCTION_FINI);
5812 	    setup_code = build_int_cst (integer_type_node,
5813 					IFN_GOACC_REDUCTION_SETUP);
5814 	    teardown_code = build_int_cst (integer_type_node,
5815 					   IFN_GOACC_REDUCTION_TEARDOWN);
5816 	  }
5817 
5818 	tree setup_call
5819 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5820 					  TREE_TYPE (var), 6, setup_code,
5821 					  unshare_expr (ref_to_res),
5822 					  incoming, level, op, off);
5823 	tree init_call
5824 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5825 					  TREE_TYPE (var), 6, init_code,
5826 					  unshare_expr (ref_to_res),
5827 					  v1, level, op, off);
5828 	tree fini_call
5829 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5830 					  TREE_TYPE (var), 6, fini_code,
5831 					  unshare_expr (ref_to_res),
5832 					  v2, level, op, off);
5833 	tree teardown_call
5834 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5835 					  TREE_TYPE (var), 6, teardown_code,
5836 					  ref_to_res, v3, level, op, off);
5837 
5838 	gimplify_assign (v1, setup_call, &before_fork);
5839 	gimplify_assign (v2, init_call, &after_fork);
5840 	gimplify_assign (v3, fini_call, &before_join);
5841 	gimplify_assign (outgoing, teardown_call, &after_join);
5842       }
5843 
5844   /* Now stitch things together.  */
5845   gimple_seq_add_seq (fork_seq, before_fork);
5846   if (fork)
5847     gimple_seq_add_stmt (fork_seq, fork);
5848   gimple_seq_add_seq (fork_seq, after_fork);
5849 
5850   gimple_seq_add_seq (join_seq, before_join);
5851   if (join)
5852     gimple_seq_add_stmt (join_seq, join);
5853   gimple_seq_add_seq (join_seq, after_join);
5854 }
5855 
5856 /* Generate code to implement the REDUCTION clauses.  */
5857 
5858 static void
5859 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5860 {
5861   gimple_seq sub_seq = NULL;
5862   gimple *stmt;
5863   tree x, c;
5864   int count = 0;
5865 
5866   /* OpenACC loop reductions are handled elsewhere.  */
5867   if (is_gimple_omp_oacc (ctx->stmt))
5868     return;
5869 
5870   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5871   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5872       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5873     return;
5874 
5875   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5876      update in that case, otherwise use a lock.  */
5877   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5878     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5879 	&& !OMP_CLAUSE_REDUCTION_TASK (c))
5880       {
5881 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5882 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5883 	  {
5884 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5885 	    count = -1;
5886 	    break;
5887 	  }
5888 	count++;
5889       }
5890 
5891   if (count == 0)
5892     return;
5893 
5894   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5895     {
5896       tree var, ref, new_var, orig_var;
5897       enum tree_code code;
5898       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5899 
5900       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5901 	  || OMP_CLAUSE_REDUCTION_TASK (c))
5902 	continue;
5903 
5904       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5905       orig_var = var = OMP_CLAUSE_DECL (c);
5906       if (TREE_CODE (var) == MEM_REF)
5907 	{
5908 	  var = TREE_OPERAND (var, 0);
5909 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5910 	    var = TREE_OPERAND (var, 0);
5911 	  if (TREE_CODE (var) == ADDR_EXPR)
5912 	    var = TREE_OPERAND (var, 0);
5913 	  else
5914 	    {
5915 	      /* If this is a pointer or referenced based array
5916 		 section, the var could be private in the outer
5917 		 context e.g. on orphaned loop construct.  Pretend this
5918 		 is private variable's outer reference.  */
5919 	      ccode = OMP_CLAUSE_PRIVATE;
5920 	      if (TREE_CODE (var) == INDIRECT_REF)
5921 		var = TREE_OPERAND (var, 0);
5922 	    }
5923 	  orig_var = var;
5924 	  if (is_variable_sized (var))
5925 	    {
5926 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5927 	      var = DECL_VALUE_EXPR (var);
5928 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5929 	      var = TREE_OPERAND (var, 0);
5930 	      gcc_assert (DECL_P (var));
5931 	    }
5932 	}
5933       new_var = lookup_decl (var, ctx);
5934       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5935 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5936       ref = build_outer_var_ref (var, ctx, ccode);
5937       code = OMP_CLAUSE_REDUCTION_CODE (c);
5938 
5939       /* reduction(-:var) sums up the partial results, so it acts
5940 	 identically to reduction(+:var).  */
5941       if (code == MINUS_EXPR)
5942         code = PLUS_EXPR;
5943 
5944       if (count == 1)
5945 	{
5946 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5947 
5948 	  addr = save_expr (addr);
5949 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5950 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5951 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5952 	  OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
5953 	  gimplify_and_add (x, stmt_seqp);
5954 	  return;
5955 	}
5956       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5957 	{
5958 	  tree d = OMP_CLAUSE_DECL (c);
5959 	  tree type = TREE_TYPE (d);
5960 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5961 	  tree i = create_tmp_var (TREE_TYPE (v));
5962 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5963 	  tree bias = TREE_OPERAND (d, 1);
5964 	  d = TREE_OPERAND (d, 0);
5965 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5966 	    {
5967 	      tree b = TREE_OPERAND (d, 1);
5968 	      b = maybe_lookup_decl (b, ctx);
5969 	      if (b == NULL)
5970 		{
5971 		  b = TREE_OPERAND (d, 1);
5972 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5973 		}
5974 	      if (integer_zerop (bias))
5975 		bias = b;
5976 	      else
5977 		{
5978 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5979 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5980 					  TREE_TYPE (b), b, bias);
5981 		}
5982 	      d = TREE_OPERAND (d, 0);
5983 	    }
5984 	  /* For ref build_outer_var_ref already performs this, so
5985 	     only new_var needs a dereference.  */
5986 	  if (TREE_CODE (d) == INDIRECT_REF)
5987 	    {
5988 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5989 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5990 	    }
5991 	  else if (TREE_CODE (d) == ADDR_EXPR)
5992 	    {
5993 	      if (orig_var == var)
5994 		{
5995 		  new_var = build_fold_addr_expr (new_var);
5996 		  ref = build_fold_addr_expr (ref);
5997 		}
5998 	    }
5999 	  else
6000 	    {
6001 	      gcc_assert (orig_var == var);
6002 	      if (omp_is_reference (var))
6003 		ref = build_fold_addr_expr (ref);
6004 	    }
6005 	  if (DECL_P (v))
6006 	    {
6007 	      tree t = maybe_lookup_decl (v, ctx);
6008 	      if (t)
6009 		v = t;
6010 	      else
6011 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
6012 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
6013 	    }
6014 	  if (!integer_zerop (bias))
6015 	    {
6016 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
6017 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6018 					 TREE_TYPE (new_var), new_var,
6019 					 unshare_expr (bias));
6020 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
6021 					 TREE_TYPE (ref), ref, bias);
6022 	    }
6023 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
6024 	  ref = fold_convert_loc (clause_loc, ptype, ref);
6025 	  tree m = create_tmp_var (ptype);
6026 	  gimplify_assign (m, new_var, stmt_seqp);
6027 	  new_var = m;
6028 	  m = create_tmp_var (ptype);
6029 	  gimplify_assign (m, ref, stmt_seqp);
6030 	  ref = m;
6031 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
6032 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
6033 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
6034 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
6035 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
6036 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
6037 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6038 	    {
6039 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6040 	      tree decl_placeholder
6041 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
6042 	      SET_DECL_VALUE_EXPR (placeholder, out);
6043 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6044 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
6045 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
6046 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6047 	      gimple_seq_add_seq (&sub_seq,
6048 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6049 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6050 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6051 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
6052 	    }
6053 	  else
6054 	    {
6055 	      x = build2 (code, TREE_TYPE (out), out, priv);
6056 	      out = unshare_expr (out);
6057 	      gimplify_assign (out, x, &sub_seq);
6058 	    }
6059 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
6060 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
6061 	  gimple_seq_add_stmt (&sub_seq, g);
6062 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
6063 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
6064 	  gimple_seq_add_stmt (&sub_seq, g);
6065 	  g = gimple_build_assign (i, PLUS_EXPR, i,
6066 				   build_int_cst (TREE_TYPE (i), 1));
6067 	  gimple_seq_add_stmt (&sub_seq, g);
6068 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
6069 	  gimple_seq_add_stmt (&sub_seq, g);
6070 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
6071 	}
6072       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6073 	{
6074 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6075 
6076 	  if (omp_is_reference (var)
6077 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
6078 					     TREE_TYPE (ref)))
6079 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
6080 	  SET_DECL_VALUE_EXPR (placeholder, ref);
6081 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6082 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
6083 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6084 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6085 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
6086 	}
6087       else
6088 	{
6089 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
6090 	  ref = build_outer_var_ref (var, ctx);
6091 	  gimplify_assign (ref, x, &sub_seq);
6092 	}
6093     }
6094 
6095   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
6096 			    0);
6097   gimple_seq_add_stmt (stmt_seqp, stmt);
6098 
6099   gimple_seq_add_seq (stmt_seqp, sub_seq);
6100 
6101   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
6102 			    0);
6103   gimple_seq_add_stmt (stmt_seqp, stmt);
6104 }
6105 
6106 
6107 /* Generate code to implement the COPYPRIVATE clauses.  */
6108 
6109 static void
6110 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
6111 			    omp_context *ctx)
6112 {
6113   tree c;
6114 
6115   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6116     {
6117       tree var, new_var, ref, x;
6118       bool by_ref;
6119       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6120 
6121       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
6122 	continue;
6123 
6124       var = OMP_CLAUSE_DECL (c);
6125       by_ref = use_pointer_for_field (var, NULL);
6126 
6127       ref = build_sender_ref (var, ctx);
6128       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
6129       if (by_ref)
6130 	{
6131 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
6132 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
6133 	}
6134       gimplify_assign (ref, x, slist);
6135 
6136       ref = build_receiver_ref (var, false, ctx);
6137       if (by_ref)
6138 	{
6139 	  ref = fold_convert_loc (clause_loc,
6140 				  build_pointer_type (TREE_TYPE (new_var)),
6141 				  ref);
6142 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
6143 	}
6144       if (omp_is_reference (var))
6145 	{
6146 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
6147 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
6148 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6149 	}
6150       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
6151       gimplify_and_add (x, rlist);
6152     }
6153 }
6154 
6155 
6156 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
6157    and REDUCTION from the sender (aka parent) side.  */
6158 
6159 static void
6160 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
6161     		    omp_context *ctx)
6162 {
6163   tree c, t;
6164   int ignored_looptemp = 0;
6165   bool is_taskloop = false;
6166 
6167   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
6168      by GOMP_taskloop.  */
6169   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
6170     {
6171       ignored_looptemp = 2;
6172       is_taskloop = true;
6173     }
6174 
6175   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6176     {
6177       tree val, ref, x, var;
6178       bool by_ref, do_in = false, do_out = false;
6179       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6180 
6181       switch (OMP_CLAUSE_CODE (c))
6182 	{
6183 	case OMP_CLAUSE_PRIVATE:
6184 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
6185 	    break;
6186 	  continue;
6187 	case OMP_CLAUSE_FIRSTPRIVATE:
6188 	case OMP_CLAUSE_COPYIN:
6189 	case OMP_CLAUSE_LASTPRIVATE:
6190 	case OMP_CLAUSE_IN_REDUCTION:
6191 	case OMP_CLAUSE__REDUCTEMP_:
6192 	  break;
6193 	case OMP_CLAUSE_REDUCTION:
6194 	  if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
6195 	    continue;
6196 	  break;
6197 	case OMP_CLAUSE_SHARED:
6198 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6199 	    break;
6200 	  continue;
6201 	case OMP_CLAUSE__LOOPTEMP_:
6202 	  if (ignored_looptemp)
6203 	    {
6204 	      ignored_looptemp--;
6205 	      continue;
6206 	    }
6207 	  break;
6208 	default:
6209 	  continue;
6210 	}
6211 
6212       val = OMP_CLAUSE_DECL (c);
6213       if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6214 	   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
6215 	  && TREE_CODE (val) == MEM_REF)
6216 	{
6217 	  val = TREE_OPERAND (val, 0);
6218 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
6219 	    val = TREE_OPERAND (val, 0);
6220 	  if (TREE_CODE (val) == INDIRECT_REF
6221 	      || TREE_CODE (val) == ADDR_EXPR)
6222 	    val = TREE_OPERAND (val, 0);
6223 	  if (is_variable_sized (val))
6224 	    continue;
6225 	}
6226 
6227       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
6228 	 outer taskloop region.  */
6229       omp_context *ctx_for_o = ctx;
6230       if (is_taskloop
6231 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
6232 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
6233 	ctx_for_o = ctx->outer;
6234 
6235       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
6236 
6237       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
6238 	  && is_global_var (var)
6239 	  && (val == OMP_CLAUSE_DECL (c)
6240 	      || !is_task_ctx (ctx)
6241 	      || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
6242 		  && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
6243 		      || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
6244 			  != POINTER_TYPE)))))
6245 	continue;
6246 
6247       t = omp_member_access_dummy_var (var);
6248       if (t)
6249 	{
6250 	  var = DECL_VALUE_EXPR (var);
6251 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
6252 	  if (o != t)
6253 	    var = unshare_and_remap (var, t, o);
6254 	  else
6255 	    var = unshare_expr (var);
6256 	}
6257 
6258       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
6259 	{
6260 	  /* Handle taskloop firstprivate/lastprivate, where the
6261 	     lastprivate on GIMPLE_OMP_TASK is represented as
6262 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
6263 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
6264 	  x = omp_build_component_ref (ctx->sender_decl, f);
6265 	  if (use_pointer_for_field (val, ctx))
6266 	    var = build_fold_addr_expr (var);
6267 	  gimplify_assign (x, var, ilist);
6268 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
6269 	  continue;
6270 	}
6271 
6272       if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
6273 	    && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
6274 	   || val == OMP_CLAUSE_DECL (c))
6275 	  && is_variable_sized (val))
6276 	continue;
6277       by_ref = use_pointer_for_field (val, NULL);
6278 
6279       switch (OMP_CLAUSE_CODE (c))
6280 	{
6281 	case OMP_CLAUSE_FIRSTPRIVATE:
6282 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
6283 	      && !by_ref
6284 	      && is_task_ctx (ctx))
6285 	    TREE_NO_WARNING (var) = 1;
6286 	  do_in = true;
6287 	  break;
6288 
6289 	case OMP_CLAUSE_PRIVATE:
6290 	case OMP_CLAUSE_COPYIN:
6291 	case OMP_CLAUSE__LOOPTEMP_:
6292 	case OMP_CLAUSE__REDUCTEMP_:
6293 	  do_in = true;
6294 	  break;
6295 
6296 	case OMP_CLAUSE_LASTPRIVATE:
6297 	  if (by_ref || omp_is_reference (val))
6298 	    {
6299 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6300 		continue;
6301 	      do_in = true;
6302 	    }
6303 	  else
6304 	    {
6305 	      do_out = true;
6306 	      if (lang_hooks.decls.omp_private_outer_ref (val))
6307 		do_in = true;
6308 	    }
6309 	  break;
6310 
6311 	case OMP_CLAUSE_REDUCTION:
6312 	case OMP_CLAUSE_IN_REDUCTION:
6313 	  do_in = true;
6314 	  if (val == OMP_CLAUSE_DECL (c))
6315 	    {
6316 	      if (is_task_ctx (ctx))
6317 		by_ref = use_pointer_for_field (val, ctx);
6318 	      else
6319 		do_out = !(by_ref || omp_is_reference (val));
6320 	    }
6321 	  else
6322 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
6323 	  break;
6324 
6325 	default:
6326 	  gcc_unreachable ();
6327 	}
6328 
6329       if (do_in)
6330 	{
6331 	  ref = build_sender_ref (val, ctx);
6332 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
6333 	  gimplify_assign (ref, x, ilist);
6334 	  if (is_task_ctx (ctx))
6335 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
6336 	}
6337 
6338       if (do_out)
6339 	{
6340 	  ref = build_sender_ref (val, ctx);
6341 	  gimplify_assign (var, ref, olist);
6342 	}
6343     }
6344 }
6345 
6346 /* Generate code to implement SHARED from the sender (aka parent)
6347    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
6348    list things that got automatically shared.  */
6349 
6350 static void
6351 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
6352 {
6353   tree var, ovar, nvar, t, f, x, record_type;
6354 
6355   if (ctx->record_type == NULL)
6356     return;
6357 
6358   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
6359   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
6360     {
6361       ovar = DECL_ABSTRACT_ORIGIN (f);
6362       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
6363 	continue;
6364 
6365       nvar = maybe_lookup_decl (ovar, ctx);
6366       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
6367 	continue;
6368 
6369       /* If CTX is a nested parallel directive.  Find the immediately
6370 	 enclosing parallel or workshare construct that contains a
6371 	 mapping for OVAR.  */
6372       var = lookup_decl_in_outer_ctx (ovar, ctx);
6373 
6374       t = omp_member_access_dummy_var (var);
6375       if (t)
6376 	{
6377 	  var = DECL_VALUE_EXPR (var);
6378 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
6379 	  if (o != t)
6380 	    var = unshare_and_remap (var, t, o);
6381 	  else
6382 	    var = unshare_expr (var);
6383 	}
6384 
6385       if (use_pointer_for_field (ovar, ctx))
6386 	{
6387 	  x = build_sender_ref (ovar, ctx);
6388 	  var = build_fold_addr_expr (var);
6389 	  gimplify_assign (x, var, ilist);
6390 	}
6391       else
6392 	{
6393 	  x = build_sender_ref (ovar, ctx);
6394 	  gimplify_assign (x, var, ilist);
6395 
6396 	  if (!TREE_READONLY (var)
6397 	      /* We don't need to receive a new reference to a result
6398 	         or parm decl.  In fact we may not store to it as we will
6399 		 invalidate any pending RSO and generate wrong gimple
6400 		 during inlining.  */
6401 	      && !((TREE_CODE (var) == RESULT_DECL
6402 		    || TREE_CODE (var) == PARM_DECL)
6403 		   && DECL_BY_REFERENCE (var)))
6404 	    {
6405 	      x = build_sender_ref (ovar, ctx);
6406 	      gimplify_assign (var, x, olist);
6407 	    }
6408 	}
6409     }
6410 }
6411 
6412 /* Emit an OpenACC head marker call, encapulating the partitioning and
6413    other information that must be processed by the target compiler.
6414    Return the maximum number of dimensions the associated loop might
6415    be partitioned over.  */
6416 
6417 static unsigned
6418 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
6419 		      gimple_seq *seq, omp_context *ctx)
6420 {
6421   unsigned levels = 0;
6422   unsigned tag = 0;
6423   tree gang_static = NULL_TREE;
6424   auto_vec<tree, 5> args;
6425 
6426   args.quick_push (build_int_cst
6427 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
6428   args.quick_push (ddvar);
6429   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6430     {
6431       switch (OMP_CLAUSE_CODE (c))
6432 	{
6433 	case OMP_CLAUSE_GANG:
6434 	  tag |= OLF_DIM_GANG;
6435 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
6436 	  /* static:* is represented by -1, and we can ignore it, as
6437 	     scheduling is always static.  */
6438 	  if (gang_static && integer_minus_onep (gang_static))
6439 	    gang_static = NULL_TREE;
6440 	  levels++;
6441 	  break;
6442 
6443 	case OMP_CLAUSE_WORKER:
6444 	  tag |= OLF_DIM_WORKER;
6445 	  levels++;
6446 	  break;
6447 
6448 	case OMP_CLAUSE_VECTOR:
6449 	  tag |= OLF_DIM_VECTOR;
6450 	  levels++;
6451 	  break;
6452 
6453 	case OMP_CLAUSE_SEQ:
6454 	  tag |= OLF_SEQ;
6455 	  break;
6456 
6457 	case OMP_CLAUSE_AUTO:
6458 	  tag |= OLF_AUTO;
6459 	  break;
6460 
6461 	case OMP_CLAUSE_INDEPENDENT:
6462 	  tag |= OLF_INDEPENDENT;
6463 	  break;
6464 
6465 	case OMP_CLAUSE_TILE:
6466 	  tag |= OLF_TILE;
6467 	  break;
6468 
6469 	default:
6470 	  continue;
6471 	}
6472     }
6473 
6474   if (gang_static)
6475     {
6476       if (DECL_P (gang_static))
6477 	gang_static = build_outer_var_ref (gang_static, ctx);
6478       tag |= OLF_GANG_STATIC;
6479     }
6480 
6481   /* In a parallel region, loops are implicitly INDEPENDENT.  */
6482   omp_context *tgt = enclosing_target_ctx (ctx);
6483   if (!tgt || is_oacc_parallel (tgt))
6484     tag |= OLF_INDEPENDENT;
6485 
6486   if (tag & OLF_TILE)
6487     /* Tiling could use all 3 levels.  */
6488     levels = 3;
6489   else
6490     {
6491       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
6492 	 Ensure at least one level, or 2 for possible auto
6493 	 partitioning */
6494       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
6495 				  << OLF_DIM_BASE) | OLF_SEQ));
6496 
6497       if (levels < 1u + maybe_auto)
6498 	levels = 1u + maybe_auto;
6499     }
6500 
6501   args.quick_push (build_int_cst (integer_type_node, levels));
6502   args.quick_push (build_int_cst (integer_type_node, tag));
6503   if (gang_static)
6504     args.quick_push (gang_static);
6505 
6506   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
6507   gimple_set_location (call, loc);
6508   gimple_set_lhs (call, ddvar);
6509   gimple_seq_add_stmt (seq, call);
6510 
6511   return levels;
6512 }
6513 
6514 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
6515    partitioning level of the enclosed region.  */
6516 
6517 static void
6518 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
6519 			tree tofollow, gimple_seq *seq)
6520 {
6521   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
6522 		     : IFN_UNIQUE_OACC_TAIL_MARK);
6523   tree marker = build_int_cst (integer_type_node, marker_kind);
6524   int nargs = 2 + (tofollow != NULL_TREE);
6525   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
6526 					    marker, ddvar, tofollow);
6527   gimple_set_location (call, loc);
6528   gimple_set_lhs (call, ddvar);
6529   gimple_seq_add_stmt (seq, call);
6530 }
6531 
6532 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
6533    the loop clauses, from which we extract reductions.  Initialize
6534    HEAD and TAIL.  */
6535 
6536 static void
6537 lower_oacc_head_tail (location_t loc, tree clauses,
6538 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
6539 {
6540   bool inner = false;
6541   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
6542   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
6543 
6544   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
6545   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
6546   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
6547 
6548   gcc_assert (count);
6549   for (unsigned done = 1; count; count--, done++)
6550     {
6551       gimple_seq fork_seq = NULL;
6552       gimple_seq join_seq = NULL;
6553 
6554       tree place = build_int_cst (integer_type_node, -1);
6555       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
6556 						fork_kind, ddvar, place);
6557       gimple_set_location (fork, loc);
6558       gimple_set_lhs (fork, ddvar);
6559 
6560       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
6561 						join_kind, ddvar, place);
6562       gimple_set_location (join, loc);
6563       gimple_set_lhs (join, ddvar);
6564 
6565       /* Mark the beginning of this level sequence.  */
6566       if (inner)
6567 	lower_oacc_loop_marker (loc, ddvar, true,
6568 				build_int_cst (integer_type_node, count),
6569 				&fork_seq);
6570       lower_oacc_loop_marker (loc, ddvar, false,
6571 			      build_int_cst (integer_type_node, done),
6572 			      &join_seq);
6573 
6574       lower_oacc_reductions (loc, clauses, place, inner,
6575 			     fork, join, &fork_seq, &join_seq,  ctx);
6576 
6577       /* Append this level to head. */
6578       gimple_seq_add_seq (head, fork_seq);
6579       /* Prepend it to tail.  */
6580       gimple_seq_add_seq (&join_seq, *tail);
6581       *tail = join_seq;
6582 
6583       inner = true;
6584     }
6585 
6586   /* Mark the end of the sequence.  */
6587   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
6588   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
6589 }
6590 
6591 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
6592    catch handler and return it.  This prevents programs from violating the
6593    structured block semantics with throws.  */
6594 
6595 static gimple_seq
6596 maybe_catch_exception (gimple_seq body)
6597 {
6598   gimple *g;
6599   tree decl;
6600 
6601   if (!flag_exceptions)
6602     return body;
6603 
6604   if (lang_hooks.eh_protect_cleanup_actions != NULL)
6605     decl = lang_hooks.eh_protect_cleanup_actions ();
6606   else
6607     decl = builtin_decl_explicit (BUILT_IN_TRAP);
6608 
6609   g = gimple_build_eh_must_not_throw (decl);
6610   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
6611       			GIMPLE_TRY_CATCH);
6612 
6613  return gimple_seq_alloc_with_stmt (g);
6614 }
6615 
6616 
6617 /* Routines to lower OMP directives into OMP-GIMPLE.  */
6618 
6619 /* If ctx is a worksharing context inside of a cancellable parallel
6620    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
6621    and conditional branch to parallel's cancel_label to handle
6622    cancellation in the implicit barrier.  */
6623 
6624 static void
6625 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
6626 				   gimple_seq *body)
6627 {
6628   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
6629   if (gimple_omp_return_nowait_p (omp_return))
6630     return;
6631   for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
6632     if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
6633 	&& outer->cancellable)
6634       {
6635 	tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
6636 	tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
6637 	tree lhs = create_tmp_var (c_bool_type);
6638 	gimple_omp_return_set_lhs (omp_return, lhs);
6639 	tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
6640 	gimple *g = gimple_build_cond (NE_EXPR, lhs,
6641 				       fold_convert (c_bool_type,
6642 						     boolean_false_node),
6643 				       outer->cancel_label, fallthru_label);
6644 	gimple_seq_add_stmt (body, g);
6645 	gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
6646       }
6647     else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
6648       return;
6649 }
6650 
6651 /* Find the first task_reduction or reduction clause or return NULL
6652    if there are none.  */
6653 
6654 static inline tree
6655 omp_task_reductions_find_first (tree clauses, enum tree_code code,
6656 				enum omp_clause_code ccode)
6657 {
6658   while (1)
6659     {
6660       clauses = omp_find_clause (clauses, ccode);
6661       if (clauses == NULL_TREE)
6662 	return NULL_TREE;
6663       if (ccode != OMP_CLAUSE_REDUCTION
6664 	  || code == OMP_TASKLOOP
6665 	  || OMP_CLAUSE_REDUCTION_TASK (clauses))
6666 	return clauses;
6667       clauses = OMP_CLAUSE_CHAIN (clauses);
6668     }
6669 }
6670 
6671 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
6672 				       gimple_seq *, gimple_seq *);
6673 
6674 /* Lower the OpenMP sections directive in the current statement in GSI_P.
6675    CTX is the enclosing OMP context for the current statement.  */
6676 
6677 static void
6678 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6679 {
6680   tree block, control;
6681   gimple_stmt_iterator tgsi;
6682   gomp_sections *stmt;
6683   gimple *t;
6684   gbind *new_stmt, *bind;
6685   gimple_seq ilist, dlist, olist, tred_dlist = NULL, new_body;
6686 
6687   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
6688 
6689   push_gimplify_context ();
6690 
6691   dlist = NULL;
6692   ilist = NULL;
6693 
6694   tree rclauses
6695     = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
6696 				      OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
6697   tree rtmp = NULL_TREE;
6698   if (rclauses)
6699     {
6700       tree type = build_pointer_type (pointer_sized_int_node);
6701       tree temp = create_tmp_var (type);
6702       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
6703       OMP_CLAUSE_DECL (c) = temp;
6704       OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
6705       gimple_omp_sections_set_clauses (stmt, c);
6706       lower_omp_task_reductions (ctx, OMP_SECTIONS,
6707 				 gimple_omp_sections_clauses (stmt),
6708 				 &ilist, &tred_dlist);
6709       rclauses = c;
6710       rtmp = make_ssa_name (type);
6711       gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
6712     }
6713 
6714   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
6715       			   &ilist, &dlist, ctx, NULL);
6716 
6717   new_body = gimple_omp_body (stmt);
6718   gimple_omp_set_body (stmt, NULL);
6719   tgsi = gsi_start (new_body);
6720   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
6721     {
6722       omp_context *sctx;
6723       gimple *sec_start;
6724 
6725       sec_start = gsi_stmt (tgsi);
6726       sctx = maybe_lookup_ctx (sec_start);
6727       gcc_assert (sctx);
6728 
6729       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
6730       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
6731 			    GSI_CONTINUE_LINKING);
6732       gimple_omp_set_body (sec_start, NULL);
6733 
6734       if (gsi_one_before_end_p (tgsi))
6735 	{
6736 	  gimple_seq l = NULL;
6737 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
6738 				     &l, ctx);
6739 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
6740 	  gimple_omp_section_set_last (sec_start);
6741 	}
6742 
6743       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
6744 			GSI_CONTINUE_LINKING);
6745     }
6746 
6747   block = make_node (BLOCK);
6748   bind = gimple_build_bind (NULL, new_body, block);
6749 
6750   olist = NULL;
6751   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
6752 
6753   block = make_node (BLOCK);
6754   new_stmt = gimple_build_bind (NULL, NULL, block);
6755   gsi_replace (gsi_p, new_stmt, true);
6756 
6757   pop_gimplify_context (new_stmt);
6758   gimple_bind_append_vars (new_stmt, ctx->block_vars);
6759   BLOCK_VARS (block) = gimple_bind_vars (bind);
6760   if (BLOCK_VARS (block))
6761     TREE_USED (block) = 1;
6762 
6763   new_body = NULL;
6764   gimple_seq_add_seq (&new_body, ilist);
6765   gimple_seq_add_stmt (&new_body, stmt);
6766   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
6767   gimple_seq_add_stmt (&new_body, bind);
6768 
6769   control = create_tmp_var (unsigned_type_node, ".section");
6770   t = gimple_build_omp_continue (control, control);
6771   gimple_omp_sections_set_control (stmt, control);
6772   gimple_seq_add_stmt (&new_body, t);
6773 
6774   gimple_seq_add_seq (&new_body, olist);
6775   if (ctx->cancellable)
6776     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6777   gimple_seq_add_seq (&new_body, dlist);
6778 
6779   new_body = maybe_catch_exception (new_body);
6780 
6781   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6782 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6783   t = gimple_build_omp_return (nowait);
6784   gimple_seq_add_stmt (&new_body, t);
6785   gimple_seq_add_seq (&new_body, tred_dlist);
6786   maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
6787 
6788   if (rclauses)
6789     OMP_CLAUSE_DECL (rclauses) = rtmp;
6790 
6791   gimple_bind_set_body (new_stmt, new_body);
6792 }
6793 
6794 
6795 /* A subroutine of lower_omp_single.  Expand the simple form of
6796    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6797 
6798      	if (GOMP_single_start ())
6799 	  BODY;
6800 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
6801 
6802   FIXME.  It may be better to delay expanding the logic of this until
6803   pass_expand_omp.  The expanded logic may make the job more difficult
6804   to a synchronization analysis pass.  */
6805 
6806 static void
6807 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6808 {
6809   location_t loc = gimple_location (single_stmt);
6810   tree tlabel = create_artificial_label (loc);
6811   tree flabel = create_artificial_label (loc);
6812   gimple *call, *cond;
6813   tree lhs, decl;
6814 
6815   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6816   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6817   call = gimple_build_call (decl, 0);
6818   gimple_call_set_lhs (call, lhs);
6819   gimple_seq_add_stmt (pre_p, call);
6820 
6821   cond = gimple_build_cond (EQ_EXPR, lhs,
6822 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6823 					      boolean_true_node),
6824 			    tlabel, flabel);
6825   gimple_seq_add_stmt (pre_p, cond);
6826   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6827   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6828   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6829 }
6830 
6831 
6832 /* A subroutine of lower_omp_single.  Expand the simple form of
6833    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6834 
6835 	#pragma omp single copyprivate (a, b, c)
6836 
6837    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6838 
6839       {
6840 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6841 	  {
6842 	    BODY;
6843 	    copyout.a = a;
6844 	    copyout.b = b;
6845 	    copyout.c = c;
6846 	    GOMP_single_copy_end (&copyout);
6847 	  }
6848 	else
6849 	  {
6850 	    a = copyout_p->a;
6851 	    b = copyout_p->b;
6852 	    c = copyout_p->c;
6853 	  }
6854 	GOMP_barrier ();
6855       }
6856 
6857   FIXME.  It may be better to delay expanding the logic of this until
6858   pass_expand_omp.  The expanded logic may make the job more difficult
6859   to a synchronization analysis pass.  */
6860 
6861 static void
6862 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6863 		       omp_context *ctx)
6864 {
6865   tree ptr_type, t, l0, l1, l2, bfn_decl;
6866   gimple_seq copyin_seq;
6867   location_t loc = gimple_location (single_stmt);
6868 
6869   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6870 
6871   ptr_type = build_pointer_type (ctx->record_type);
6872   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6873 
6874   l0 = create_artificial_label (loc);
6875   l1 = create_artificial_label (loc);
6876   l2 = create_artificial_label (loc);
6877 
6878   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6879   t = build_call_expr_loc (loc, bfn_decl, 0);
6880   t = fold_convert_loc (loc, ptr_type, t);
6881   gimplify_assign (ctx->receiver_decl, t, pre_p);
6882 
6883   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6884 	      build_int_cst (ptr_type, 0));
6885   t = build3 (COND_EXPR, void_type_node, t,
6886 	      build_and_jump (&l0), build_and_jump (&l1));
6887   gimplify_and_add (t, pre_p);
6888 
6889   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6890 
6891   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6892 
6893   copyin_seq = NULL;
6894   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6895 			      &copyin_seq, ctx);
6896 
6897   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6898   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6899   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6900   gimplify_and_add (t, pre_p);
6901 
6902   t = build_and_jump (&l2);
6903   gimplify_and_add (t, pre_p);
6904 
6905   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6906 
6907   gimple_seq_add_seq (pre_p, copyin_seq);
6908 
6909   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6910 }
6911 
6912 
6913 /* Expand code for an OpenMP single directive.  */
6914 
6915 static void
6916 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6917 {
6918   tree block;
6919   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6920   gbind *bind;
6921   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6922 
6923   push_gimplify_context ();
6924 
6925   block = make_node (BLOCK);
6926   bind = gimple_build_bind (NULL, NULL, block);
6927   gsi_replace (gsi_p, bind, true);
6928   bind_body = NULL;
6929   dlist = NULL;
6930   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6931 			   &bind_body, &dlist, ctx, NULL);
6932   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6933 
6934   gimple_seq_add_stmt (&bind_body, single_stmt);
6935 
6936   if (ctx->record_type)
6937     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6938   else
6939     lower_omp_single_simple (single_stmt, &bind_body);
6940 
6941   gimple_omp_set_body (single_stmt, NULL);
6942 
6943   gimple_seq_add_seq (&bind_body, dlist);
6944 
6945   bind_body = maybe_catch_exception (bind_body);
6946 
6947   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6948 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6949   gimple *g = gimple_build_omp_return (nowait);
6950   gimple_seq_add_stmt (&bind_body_tail, g);
6951   maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
6952   if (ctx->record_type)
6953     {
6954       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6955       tree clobber = build_constructor (ctx->record_type, NULL);
6956       TREE_THIS_VOLATILE (clobber) = 1;
6957       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6958 						   clobber), GSI_SAME_STMT);
6959     }
6960   gimple_seq_add_seq (&bind_body, bind_body_tail);
6961   gimple_bind_set_body (bind, bind_body);
6962 
6963   pop_gimplify_context (bind);
6964 
6965   gimple_bind_append_vars (bind, ctx->block_vars);
6966   BLOCK_VARS (block) = ctx->block_vars;
6967   if (BLOCK_VARS (block))
6968     TREE_USED (block) = 1;
6969 }
6970 
6971 
6972 /* Expand code for an OpenMP master directive.  */
6973 
6974 static void
6975 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6976 {
6977   tree block, lab = NULL, x, bfn_decl;
6978   gimple *stmt = gsi_stmt (*gsi_p);
6979   gbind *bind;
6980   location_t loc = gimple_location (stmt);
6981   gimple_seq tseq;
6982 
6983   push_gimplify_context ();
6984 
6985   block = make_node (BLOCK);
6986   bind = gimple_build_bind (NULL, NULL, block);
6987   gsi_replace (gsi_p, bind, true);
6988   gimple_bind_add_stmt (bind, stmt);
6989 
6990   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6991   x = build_call_expr_loc (loc, bfn_decl, 0);
6992   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6993   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6994   tseq = NULL;
6995   gimplify_and_add (x, &tseq);
6996   gimple_bind_add_seq (bind, tseq);
6997 
6998   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6999   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7000   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7001   gimple_omp_set_body (stmt, NULL);
7002 
7003   gimple_bind_add_stmt (bind, gimple_build_label (lab));
7004 
7005   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7006 
7007   pop_gimplify_context (bind);
7008 
7009   gimple_bind_append_vars (bind, ctx->block_vars);
7010   BLOCK_VARS (block) = ctx->block_vars;
7011 }
7012 
7013 /* Helper function for lower_omp_task_reductions.  For a specific PASS
7014    find out the current clause it should be processed, or return false
7015    if all have been processed already.  */
7016 
7017 static inline bool
7018 omp_task_reduction_iterate (int pass, enum tree_code code,
7019 			    enum omp_clause_code ccode, tree *c, tree *decl,
7020 			    tree *type, tree *next)
7021 {
7022   for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
7023     {
7024       if (ccode == OMP_CLAUSE_REDUCTION
7025 	  && code != OMP_TASKLOOP
7026 	  && !OMP_CLAUSE_REDUCTION_TASK (*c))
7027 	continue;
7028       *decl = OMP_CLAUSE_DECL (*c);
7029       *type = TREE_TYPE (*decl);
7030       if (TREE_CODE (*decl) == MEM_REF)
7031 	{
7032 	  if (pass != 1)
7033 	    continue;
7034 	}
7035       else
7036 	{
7037 	  if (omp_is_reference (*decl))
7038 	    *type = TREE_TYPE (*type);
7039 	  if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
7040 	    continue;
7041 	}
7042       *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
7043       return true;
7044     }
7045   *decl = NULL_TREE;
7046   *type = NULL_TREE;
7047   *next = NULL_TREE;
7048   return false;
7049 }
7050 
7051 /* Lower task_reduction and reduction clauses (the latter unless CODE is
7052    OMP_TASKGROUP only with task modifier).  Register mapping of those in
7053    START sequence and reducing them and unregister them in the END sequence.  */
7054 
7055 static void
7056 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
7057 			   gimple_seq *start, gimple_seq *end)
7058 {
7059   enum omp_clause_code ccode
7060     = (code == OMP_TASKGROUP
7061        ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
7062   tree cancellable = NULL_TREE;
7063   clauses = omp_task_reductions_find_first (clauses, code, ccode);
7064   if (clauses == NULL_TREE)
7065     return;
7066   if (code == OMP_FOR || code == OMP_SECTIONS)
7067     {
7068       for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7069 	if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7070 	    && outer->cancellable)
7071 	  {
7072 	    cancellable = error_mark_node;
7073 	    break;
7074 	  }
7075 	else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7076 	  break;
7077     }
7078   tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
7079   tree *last = &TYPE_FIELDS (record_type);
7080   unsigned cnt = 0;
7081   if (cancellable)
7082     {
7083       tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7084 			       ptr_type_node);
7085       tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
7086 				integer_type_node);
7087       *last = field;
7088       DECL_CHAIN (field) = ifield;
7089       last = &DECL_CHAIN (ifield);
7090       DECL_CONTEXT (field) = record_type;
7091       if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7092 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7093       DECL_CONTEXT (ifield) = record_type;
7094       if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
7095 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
7096     }
7097   for (int pass = 0; pass < 2; pass++)
7098     {
7099       tree decl, type, next;
7100       for (tree c = clauses;
7101 	   omp_task_reduction_iterate (pass, code, ccode,
7102 				       &c, &decl, &type, &next); c = next)
7103 	{
7104 	  ++cnt;
7105 	  tree new_type = type;
7106 	  if (ctx->outer)
7107 	    new_type = remap_type (type, &ctx->outer->cb);
7108 	  tree field
7109 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
7110 			  DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
7111 			  new_type);
7112 	  if (DECL_P (decl) && type == TREE_TYPE (decl))
7113 	    {
7114 	      SET_DECL_ALIGN (field, DECL_ALIGN (decl));
7115 	      DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
7116 	      TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
7117 	    }
7118 	  else
7119 	    SET_DECL_ALIGN (field, TYPE_ALIGN (type));
7120 	  DECL_CONTEXT (field) = record_type;
7121 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
7122 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
7123 	  *last = field;
7124 	  last = &DECL_CHAIN (field);
7125 	  tree bfield
7126 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
7127 			  boolean_type_node);
7128 	  DECL_CONTEXT (bfield) = record_type;
7129 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
7130 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
7131 	  *last = bfield;
7132 	  last = &DECL_CHAIN (bfield);
7133 	}
7134     }
7135   *last = NULL_TREE;
7136   layout_type (record_type);
7137 
7138   /* Build up an array which registers with the runtime all the reductions
7139      and deregisters them at the end.  Format documented in libgomp/task.c.  */
7140   tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
7141   tree avar = create_tmp_var_raw (atype);
7142   gimple_add_tmp_var (avar);
7143   TREE_ADDRESSABLE (avar) = 1;
7144   tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
7145 		   NULL_TREE, NULL_TREE);
7146   tree t = build_int_cst (pointer_sized_int_node, cnt);
7147   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7148   gimple_seq seq = NULL;
7149   tree sz = fold_convert (pointer_sized_int_node,
7150 			  TYPE_SIZE_UNIT (record_type));
7151   int cachesz = 64;
7152   sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
7153 		    build_int_cst (pointer_sized_int_node, cachesz - 1));
7154   sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
7155 		    build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
7156   ctx->task_reductions.create (1 + cnt);
7157   ctx->task_reduction_map = new hash_map<tree, unsigned>;
7158   ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
7159 				   ? sz : NULL_TREE);
7160   sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
7161   gimple_seq_add_seq (start, seq);
7162   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
7163 	      NULL_TREE, NULL_TREE);
7164   gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
7165   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7166 	      NULL_TREE, NULL_TREE);
7167   t = build_int_cst (pointer_sized_int_node,
7168 		     MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
7169   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7170   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
7171 	      NULL_TREE, NULL_TREE);
7172   t = build_int_cst (pointer_sized_int_node, -1);
7173   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7174   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
7175 	      NULL_TREE, NULL_TREE);
7176   t = build_int_cst (pointer_sized_int_node, 0);
7177   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7178 
7179   /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
7180      and for each task reduction checks a bool right after the private variable
7181      within that thread's chunk; if the bool is clear, it hasn't been
7182      initialized and thus isn't going to be reduced nor destructed, otherwise
7183      reduce and destruct it.  */
7184   tree idx = create_tmp_var (size_type_node);
7185   gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
7186   tree num_thr_sz = create_tmp_var (size_type_node);
7187   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7188   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
7189   tree lab3 = NULL_TREE;
7190   gimple *g;
7191   if (code == OMP_FOR || code == OMP_SECTIONS)
7192     {
7193       /* For worksharing constructs, only perform it in the master thread,
7194 	 with the exception of cancelled implicit barriers - then only handle
7195 	 the current thread.  */
7196       tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7197       t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
7198       tree thr_num = create_tmp_var (integer_type_node);
7199       g = gimple_build_call (t, 0);
7200       gimple_call_set_lhs (g, thr_num);
7201       gimple_seq_add_stmt (end, g);
7202       if (cancellable)
7203 	{
7204 	  tree c;
7205 	  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7206 	  tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
7207 	  lab3 = create_artificial_label (UNKNOWN_LOCATION);
7208 	  if (code == OMP_FOR)
7209 	    c = gimple_omp_for_clauses (ctx->stmt);
7210 	  else /* if (code == OMP_SECTIONS) */
7211 	    c = gimple_omp_sections_clauses (ctx->stmt);
7212 	  c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
7213 	  cancellable = c;
7214 	  g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
7215 				 lab5, lab6);
7216 	  gimple_seq_add_stmt (end, g);
7217 	  gimple_seq_add_stmt (end, gimple_build_label (lab5));
7218 	  g = gimple_build_assign (idx, NOP_EXPR, thr_num);
7219 	  gimple_seq_add_stmt (end, g);
7220 	  g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
7221 				   build_one_cst (TREE_TYPE (idx)));
7222 	  gimple_seq_add_stmt (end, g);
7223 	  gimple_seq_add_stmt (end, gimple_build_goto (lab3));
7224 	  gimple_seq_add_stmt (end, gimple_build_label (lab6));
7225 	}
7226       g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
7227       gimple_seq_add_stmt (end, g);
7228       gimple_seq_add_stmt (end, gimple_build_label (lab4));
7229     }
7230   if (code != OMP_PARALLEL)
7231     {
7232       t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
7233       tree num_thr = create_tmp_var (integer_type_node);
7234       g = gimple_build_call (t, 0);
7235       gimple_call_set_lhs (g, num_thr);
7236       gimple_seq_add_stmt (end, g);
7237       g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
7238       gimple_seq_add_stmt (end, g);
7239       if (cancellable)
7240 	gimple_seq_add_stmt (end, gimple_build_label (lab3));
7241     }
7242   else
7243     {
7244       tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7245 				OMP_CLAUSE__REDUCTEMP_);
7246       t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
7247       t = fold_convert (size_type_node, t);
7248       gimplify_assign (num_thr_sz, t, end);
7249     }
7250   t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
7251 	      NULL_TREE, NULL_TREE);
7252   tree data = create_tmp_var (pointer_sized_int_node);
7253   gimple_seq_add_stmt (end, gimple_build_assign (data, t));
7254   gimple_seq_add_stmt (end, gimple_build_label (lab1));
7255   tree ptr;
7256   if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
7257     ptr = create_tmp_var (build_pointer_type (record_type));
7258   else
7259     ptr = create_tmp_var (ptr_type_node);
7260   gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
7261 
7262   tree field = TYPE_FIELDS (record_type);
7263   cnt = 0;
7264   if (cancellable)
7265     field = DECL_CHAIN (DECL_CHAIN (field));
7266   for (int pass = 0; pass < 2; pass++)
7267     {
7268       tree decl, type, next;
7269       for (tree c = clauses;
7270 	   omp_task_reduction_iterate (pass, code, ccode,
7271 				       &c, &decl, &type, &next); c = next)
7272 	{
7273 	  tree var = decl, ref;
7274 	  if (TREE_CODE (decl) == MEM_REF)
7275 	    {
7276 	      var = TREE_OPERAND (var, 0);
7277 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7278 		var = TREE_OPERAND (var, 0);
7279 	      tree v = var;
7280 	      if (TREE_CODE (var) == ADDR_EXPR)
7281 		var = TREE_OPERAND (var, 0);
7282 	      else if (TREE_CODE (var) == INDIRECT_REF)
7283 		var = TREE_OPERAND (var, 0);
7284 	      tree orig_var = var;
7285 	      if (is_variable_sized (var))
7286 		{
7287 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7288 		  var = DECL_VALUE_EXPR (var);
7289 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7290 		  var = TREE_OPERAND (var, 0);
7291 		  gcc_assert (DECL_P (var));
7292 		}
7293 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7294 	      if (orig_var != var)
7295 		gcc_assert (TREE_CODE (v) == ADDR_EXPR);
7296 	      else if (TREE_CODE (v) == ADDR_EXPR)
7297 		t = build_fold_addr_expr (t);
7298 	      else if (TREE_CODE (v) == INDIRECT_REF)
7299 		t = build_fold_indirect_ref (t);
7300 	      if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
7301 		{
7302 		  tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
7303 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7304 		  t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
7305 		}
7306 	      if (!integer_zerop (TREE_OPERAND (decl, 1)))
7307 		t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
7308 				 fold_convert (size_type_node,
7309 					       TREE_OPERAND (decl, 1)));
7310 	    }
7311 	  else
7312 	    {
7313 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
7314 	      if (!omp_is_reference (decl))
7315 		t = build_fold_addr_expr (t);
7316 	    }
7317 	  t = fold_convert (pointer_sized_int_node, t);
7318 	  seq = NULL;
7319 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
7320 	  gimple_seq_add_seq (start, seq);
7321 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7322 		      size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7323 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7324 	  t = unshare_expr (byte_position (field));
7325 	  t = fold_convert (pointer_sized_int_node, t);
7326 	  ctx->task_reduction_map->put (c, cnt);
7327 	  ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
7328 					   ? t : NULL_TREE);
7329 	  seq = NULL;
7330 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
7331 	  gimple_seq_add_seq (start, seq);
7332 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7333 		      size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
7334 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
7335 
7336 	  tree bfield = DECL_CHAIN (field);
7337 	  tree cond;
7338 	  if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
7339 	    /* In parallel or worksharing all threads unconditionally
7340 	       initialize all their task reduction private variables.  */
7341 	    cond = boolean_true_node;
7342 	  else if (TREE_TYPE (ptr) == ptr_type_node)
7343 	    {
7344 	      cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7345 			     unshare_expr (byte_position (bfield)));
7346 	      seq = NULL;
7347 	      cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
7348 	      gimple_seq_add_seq (end, seq);
7349 	      tree pbool = build_pointer_type (TREE_TYPE (bfield));
7350 	      cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
7351 			     build_int_cst (pbool, 0));
7352 	    }
7353 	  else
7354 	    cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
7355 			   build_simple_mem_ref (ptr), bfield, NULL_TREE);
7356 	  tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
7357 	  tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
7358 	  tree condv = create_tmp_var (boolean_type_node);
7359 	  gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
7360 	  g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
7361 				 lab3, lab4);
7362 	  gimple_seq_add_stmt (end, g);
7363 	  gimple_seq_add_stmt (end, gimple_build_label (lab3));
7364 	  if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
7365 	    {
7366 	      /* If this reduction doesn't need destruction and parallel
7367 		 has been cancelled, there is nothing to do for this
7368 		 reduction, so jump around the merge operation.  */
7369 	      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7370 	      g = gimple_build_cond (NE_EXPR, cancellable,
7371 				     build_zero_cst (TREE_TYPE (cancellable)),
7372 				     lab4, lab5);
7373 	      gimple_seq_add_stmt (end, g);
7374 	      gimple_seq_add_stmt (end, gimple_build_label (lab5));
7375 	    }
7376 
7377 	  tree new_var;
7378 	  if (TREE_TYPE (ptr) == ptr_type_node)
7379 	    {
7380 	      new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
7381 				unshare_expr (byte_position (field)));
7382 	      seq = NULL;
7383 	      new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
7384 	      gimple_seq_add_seq (end, seq);
7385 	      tree pbool = build_pointer_type (TREE_TYPE (field));
7386 	      new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
7387 				build_int_cst (pbool, 0));
7388 	    }
7389 	  else
7390 	    new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
7391 			      build_simple_mem_ref (ptr), field, NULL_TREE);
7392 
7393 	  enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7394 	  if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
7395 	    ref = build_simple_mem_ref (ref);
7396 	  /* reduction(-:var) sums up the partial results, so it acts
7397 	     identically to reduction(+:var).  */
7398 	  if (rcode == MINUS_EXPR)
7399 	    rcode = PLUS_EXPR;
7400 	  if (TREE_CODE (decl) == MEM_REF)
7401 	    {
7402 	      tree type = TREE_TYPE (new_var);
7403 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7404 	      tree i = create_tmp_var (TREE_TYPE (v));
7405 	      tree ptype = build_pointer_type (TREE_TYPE (type));
7406 	      if (DECL_P (v))
7407 		{
7408 		  v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7409 		  tree vv = create_tmp_var (TREE_TYPE (v));
7410 		  gimplify_assign (vv, v, start);
7411 		  v = vv;
7412 		}
7413 	      ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
7414 			    size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
7415 	      new_var = build_fold_addr_expr (new_var);
7416 	      new_var = fold_convert (ptype, new_var);
7417 	      ref = fold_convert (ptype, ref);
7418 	      tree m = create_tmp_var (ptype);
7419 	      gimplify_assign (m, new_var, end);
7420 	      new_var = m;
7421 	      m = create_tmp_var (ptype);
7422 	      gimplify_assign (m, ref, end);
7423 	      ref = m;
7424 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
7425 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
7426 	      tree endl = create_artificial_label (UNKNOWN_LOCATION);
7427 	      gimple_seq_add_stmt (end, gimple_build_label (body));
7428 	      tree priv = build_simple_mem_ref (new_var);
7429 	      tree out = build_simple_mem_ref (ref);
7430 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7431 		{
7432 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7433 		  tree decl_placeholder
7434 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7435 		  tree lab6 = NULL_TREE;
7436 		  if (cancellable)
7437 		    {
7438 		      /* If this reduction needs destruction and parallel
7439 			 has been cancelled, jump around the merge operation
7440 			 to the destruction.  */
7441 		      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7442 		      lab6 = create_artificial_label (UNKNOWN_LOCATION);
7443 		      tree zero = build_zero_cst (TREE_TYPE (cancellable));
7444 		      g = gimple_build_cond (NE_EXPR, cancellable, zero,
7445 					     lab6, lab5);
7446 		      gimple_seq_add_stmt (end, g);
7447 		      gimple_seq_add_stmt (end, gimple_build_label (lab5));
7448 		    }
7449 		  SET_DECL_VALUE_EXPR (placeholder, out);
7450 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7451 		  SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7452 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7453 		  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7454 		  gimple_seq_add_seq (end,
7455 				      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7456 		  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7457 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7458 		    {
7459 		      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7460 		      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7461 		    }
7462 		  if (cancellable)
7463 		    gimple_seq_add_stmt (end, gimple_build_label (lab6));
7464 		  tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
7465 		  if (x)
7466 		    {
7467 		      gimple_seq tseq = NULL;
7468 		      gimplify_stmt (&x, &tseq);
7469 		      gimple_seq_add_seq (end, tseq);
7470 		    }
7471 		}
7472 	      else
7473 		{
7474 		  tree x = build2 (rcode, TREE_TYPE (out), out, priv);
7475 		  out = unshare_expr (out);
7476 		  gimplify_assign (out, x, end);
7477 		}
7478 	      gimple *g
7479 		= gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7480 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
7481 	      gimple_seq_add_stmt (end, g);
7482 	      g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7483 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
7484 	      gimple_seq_add_stmt (end, g);
7485 	      g = gimple_build_assign (i, PLUS_EXPR, i,
7486 				       build_int_cst (TREE_TYPE (i), 1));
7487 	      gimple_seq_add_stmt (end, g);
7488 	      g = gimple_build_cond (LE_EXPR, i, v, body, endl);
7489 	      gimple_seq_add_stmt (end, g);
7490 	      gimple_seq_add_stmt (end, gimple_build_label (endl));
7491 	    }
7492 	  else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7493 	    {
7494 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7495 	      tree oldv = NULL_TREE;
7496 	      tree lab6 = NULL_TREE;
7497 	      if (cancellable)
7498 		{
7499 		  /* If this reduction needs destruction and parallel
7500 		     has been cancelled, jump around the merge operation
7501 		     to the destruction.  */
7502 		  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
7503 		  lab6 = create_artificial_label (UNKNOWN_LOCATION);
7504 		  tree zero = build_zero_cst (TREE_TYPE (cancellable));
7505 		  g = gimple_build_cond (NE_EXPR, cancellable, zero,
7506 					 lab6, lab5);
7507 		  gimple_seq_add_stmt (end, g);
7508 		  gimple_seq_add_stmt (end, gimple_build_label (lab5));
7509 		}
7510 	      if (omp_is_reference (decl)
7511 		  && !useless_type_conversion_p (TREE_TYPE (placeholder),
7512 						 TREE_TYPE (ref)))
7513 		ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7514 	      ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
7515 	      tree refv = create_tmp_var (TREE_TYPE (ref));
7516 	      gimplify_assign (refv, ref, end);
7517 	      ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
7518 	      SET_DECL_VALUE_EXPR (placeholder, ref);
7519 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7520 	      tree d = maybe_lookup_decl (decl, ctx);
7521 	      gcc_assert (d);
7522 	      if (DECL_HAS_VALUE_EXPR_P (d))
7523 		oldv = DECL_VALUE_EXPR (d);
7524 	      if (omp_is_reference (var))
7525 		{
7526 		  tree v = fold_convert (TREE_TYPE (d),
7527 					 build_fold_addr_expr (new_var));
7528 		  SET_DECL_VALUE_EXPR (d, v);
7529 		}
7530 	      else
7531 		SET_DECL_VALUE_EXPR (d, new_var);
7532 	      DECL_HAS_VALUE_EXPR_P (d) = 1;
7533 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7534 	      if (oldv)
7535 		SET_DECL_VALUE_EXPR (d, oldv);
7536 	      else
7537 		{
7538 		  SET_DECL_VALUE_EXPR (d, NULL_TREE);
7539 		  DECL_HAS_VALUE_EXPR_P (d) = 0;
7540 		}
7541 	      gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7542 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7543 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
7544 		OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7545 	      if (cancellable)
7546 		gimple_seq_add_stmt (end, gimple_build_label (lab6));
7547 	      tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
7548 	      if (x)
7549 		{
7550 		  gimple_seq tseq = NULL;
7551 		  gimplify_stmt (&x, &tseq);
7552 		  gimple_seq_add_seq (end, tseq);
7553 		}
7554 	    }
7555 	  else
7556 	    {
7557 	      tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
7558 	      ref = unshare_expr (ref);
7559 	      gimplify_assign (ref, x, end);
7560 	    }
7561 	  gimple_seq_add_stmt (end, gimple_build_label (lab4));
7562 	  ++cnt;
7563 	  field = DECL_CHAIN (bfield);
7564 	}
7565     }
7566 
7567   if (code == OMP_TASKGROUP)
7568     {
7569       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
7570       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7571       gimple_seq_add_stmt (start, g);
7572     }
7573   else
7574     {
7575       tree c;
7576       if (code == OMP_FOR)
7577 	c = gimple_omp_for_clauses (ctx->stmt);
7578       else if (code == OMP_SECTIONS)
7579 	c = gimple_omp_sections_clauses (ctx->stmt);
7580       else
7581 	c = gimple_omp_taskreg_clauses (ctx->stmt);
7582       c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
7583       t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
7584 			build_fold_addr_expr (avar));
7585       gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
7586     }
7587 
7588   gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
7589   gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
7590 						 size_one_node));
7591   g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
7592   gimple_seq_add_stmt (end, g);
7593   gimple_seq_add_stmt (end, gimple_build_label (lab2));
7594   if (code == OMP_FOR || code == OMP_SECTIONS)
7595     {
7596       enum built_in_function bfn
7597 	= BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
7598       t = builtin_decl_explicit (bfn);
7599       tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
7600       tree arg;
7601       if (cancellable)
7602 	{
7603 	  arg = create_tmp_var (c_bool_type);
7604 	  gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
7605 							 cancellable));
7606 	}
7607       else
7608 	arg = build_int_cst (c_bool_type, 0);
7609       g = gimple_build_call (t, 1, arg);
7610     }
7611   else
7612     {
7613       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
7614       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
7615     }
7616   gimple_seq_add_stmt (end, g);
7617   t = build_constructor (atype, NULL);
7618   TREE_THIS_VOLATILE (t) = 1;
7619   gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
7620 }
7621 
7622 /* Expand code for an OpenMP taskgroup directive.  */
7623 
7624 static void
7625 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7626 {
7627   gimple *stmt = gsi_stmt (*gsi_p);
7628   gcall *x;
7629   gbind *bind;
7630   gimple_seq dseq = NULL;
7631   tree block = make_node (BLOCK);
7632 
7633   bind = gimple_build_bind (NULL, NULL, block);
7634   gsi_replace (gsi_p, bind, true);
7635   gimple_bind_add_stmt (bind, stmt);
7636 
7637   push_gimplify_context ();
7638 
7639   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
7640 			 0);
7641   gimple_bind_add_stmt (bind, x);
7642 
7643   lower_omp_task_reductions (ctx, OMP_TASKGROUP,
7644 			     gimple_omp_taskgroup_clauses (stmt),
7645 			     gimple_bind_body_ptr (bind), &dseq);
7646 
7647   lower_omp (gimple_omp_body_ptr (stmt), ctx);
7648   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7649   gimple_omp_set_body (stmt, NULL);
7650 
7651   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
7652   gimple_bind_add_seq (bind, dseq);
7653 
7654   pop_gimplify_context (bind);
7655 
7656   gimple_bind_append_vars (bind, ctx->block_vars);
7657   BLOCK_VARS (block) = ctx->block_vars;
7658 }
7659 
7660 
7661 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
7662 
7663 static void
7664 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
7665 			   omp_context *ctx)
7666 {
7667   struct omp_for_data fd;
7668   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
7669     return;
7670 
7671   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
7672   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
7673   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
7674   if (!fd.ordered)
7675     return;
7676 
7677   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7678   tree c = gimple_omp_ordered_clauses (ord_stmt);
7679   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7680       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
7681     {
7682       /* Merge depend clauses from multiple adjacent
7683 	 #pragma omp ordered depend(sink:...) constructs
7684 	 into one #pragma omp ordered depend(sink:...), so that
7685 	 we can optimize them together.  */
7686       gimple_stmt_iterator gsi = *gsi_p;
7687       gsi_next (&gsi);
7688       while (!gsi_end_p (gsi))
7689 	{
7690 	  gimple *stmt = gsi_stmt (gsi);
7691 	  if (is_gimple_debug (stmt)
7692 	      || gimple_code (stmt) == GIMPLE_NOP)
7693 	    {
7694 	      gsi_next (&gsi);
7695 	      continue;
7696 	    }
7697 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
7698 	    break;
7699 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
7700 	  c = gimple_omp_ordered_clauses (ord_stmt2);
7701 	  if (c == NULL_TREE
7702 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
7703 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7704 	    break;
7705 	  while (*list_p)
7706 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
7707 	  *list_p = c;
7708 	  gsi_remove (&gsi, true);
7709 	}
7710     }
7711 
7712   /* Canonicalize sink dependence clauses into one folded clause if
7713      possible.
7714 
7715      The basic algorithm is to create a sink vector whose first
7716      element is the GCD of all the first elements, and whose remaining
7717      elements are the minimum of the subsequent columns.
7718 
7719      We ignore dependence vectors whose first element is zero because
7720      such dependencies are known to be executed by the same thread.
7721 
7722      We take into account the direction of the loop, so a minimum
7723      becomes a maximum if the loop is iterating forwards.  We also
7724      ignore sink clauses where the loop direction is unknown, or where
7725      the offsets are clearly invalid because they are not a multiple
7726      of the loop increment.
7727 
7728      For example:
7729 
7730 	#pragma omp for ordered(2)
7731 	for (i=0; i < N; ++i)
7732 	  for (j=0; j < M; ++j)
7733 	    {
7734 	      #pragma omp ordered \
7735 		depend(sink:i-8,j-2) \
7736 		depend(sink:i,j-1) \	// Completely ignored because i+0.
7737 		depend(sink:i-4,j-3) \
7738 		depend(sink:i-6,j-4)
7739 	      #pragma omp ordered depend(source)
7740 	    }
7741 
7742      Folded clause is:
7743 
7744 	depend(sink:-gcd(8,4,6),-min(2,3,4))
7745 	  -or-
7746 	depend(sink:-2,-2)
7747   */
7748 
7749   /* FIXME: Computing GCD's where the first element is zero is
7750      non-trivial in the presence of collapsed loops.  Do this later.  */
7751   if (fd.collapse > 1)
7752     return;
7753 
7754   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
7755 
7756   /* wide_int is not a POD so it must be default-constructed.  */
7757   for (unsigned i = 0; i != 2 * len - 1; ++i)
7758     new (static_cast<void*>(folded_deps + i)) wide_int ();
7759 
7760   tree folded_dep = NULL_TREE;
7761   /* TRUE if the first dimension's offset is negative.  */
7762   bool neg_offset_p = false;
7763 
7764   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
7765   unsigned int i;
7766   while ((c = *list_p) != NULL)
7767     {
7768       bool remove = false;
7769 
7770       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
7771       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
7772 	goto next_ordered_clause;
7773 
7774       tree vec;
7775       for (vec = OMP_CLAUSE_DECL (c), i = 0;
7776 	   vec && TREE_CODE (vec) == TREE_LIST;
7777 	   vec = TREE_CHAIN (vec), ++i)
7778 	{
7779 	  gcc_assert (i < len);
7780 
7781 	  /* omp_extract_for_data has canonicalized the condition.  */
7782 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
7783 		      || fd.loops[i].cond_code == GT_EXPR);
7784 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
7785 	  bool maybe_lexically_later = true;
7786 
7787 	  /* While the committee makes up its mind, bail if we have any
7788 	     non-constant steps.  */
7789 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
7790 	    goto lower_omp_ordered_ret;
7791 
7792 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
7793 	  if (POINTER_TYPE_P (itype))
7794 	    itype = sizetype;
7795 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
7796 					    TYPE_PRECISION (itype),
7797 					    TYPE_SIGN (itype));
7798 
7799 	  /* Ignore invalid offsets that are not multiples of the step.  */
7800 	  if (!wi::multiple_of_p (wi::abs (offset),
7801 				  wi::abs (wi::to_wide (fd.loops[i].step)),
7802 				  UNSIGNED))
7803 	    {
7804 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
7805 			  "ignoring sink clause with offset that is not "
7806 			  "a multiple of the loop step");
7807 	      remove = true;
7808 	      goto next_ordered_clause;
7809 	    }
7810 
7811 	  /* Calculate the first dimension.  The first dimension of
7812 	     the folded dependency vector is the GCD of the first
7813 	     elements, while ignoring any first elements whose offset
7814 	     is 0.  */
7815 	  if (i == 0)
7816 	    {
7817 	      /* Ignore dependence vectors whose first dimension is 0.  */
7818 	      if (offset == 0)
7819 		{
7820 		  remove = true;
7821 		  goto next_ordered_clause;
7822 		}
7823 	      else
7824 		{
7825 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
7826 		    {
7827 		      error_at (OMP_CLAUSE_LOCATION (c),
7828 				"first offset must be in opposite direction "
7829 				"of loop iterations");
7830 		      goto lower_omp_ordered_ret;
7831 		    }
7832 		  if (forward)
7833 		    offset = -offset;
7834 		  neg_offset_p = forward;
7835 		  /* Initialize the first time around.  */
7836 		  if (folded_dep == NULL_TREE)
7837 		    {
7838 		      folded_dep = c;
7839 		      folded_deps[0] = offset;
7840 		    }
7841 		  else
7842 		    folded_deps[0] = wi::gcd (folded_deps[0],
7843 					      offset, UNSIGNED);
7844 		}
7845 	    }
7846 	  /* Calculate minimum for the remaining dimensions.  */
7847 	  else
7848 	    {
7849 	      folded_deps[len + i - 1] = offset;
7850 	      if (folded_dep == c)
7851 		folded_deps[i] = offset;
7852 	      else if (maybe_lexically_later
7853 		       && !wi::eq_p (folded_deps[i], offset))
7854 		{
7855 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
7856 		    {
7857 		      unsigned int j;
7858 		      folded_dep = c;
7859 		      for (j = 1; j <= i; j++)
7860 			folded_deps[j] = folded_deps[len + j - 1];
7861 		    }
7862 		  else
7863 		    maybe_lexically_later = false;
7864 		}
7865 	    }
7866 	}
7867       gcc_assert (i == len);
7868 
7869       remove = true;
7870 
7871     next_ordered_clause:
7872       if (remove)
7873 	*list_p = OMP_CLAUSE_CHAIN (c);
7874       else
7875 	list_p = &OMP_CLAUSE_CHAIN (c);
7876     }
7877 
7878   if (folded_dep)
7879     {
7880       if (neg_offset_p)
7881 	folded_deps[0] = -folded_deps[0];
7882 
7883       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
7884       if (POINTER_TYPE_P (itype))
7885 	itype = sizetype;
7886 
7887       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
7888 	= wide_int_to_tree (itype, folded_deps[0]);
7889       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
7890       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
7891     }
7892 
7893  lower_omp_ordered_ret:
7894 
7895   /* Ordered without clauses is #pragma omp threads, while we want
7896      a nop instead if we remove all clauses.  */
7897   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
7898     gsi_replace (gsi_p, gimple_build_nop (), true);
7899 }
7900 
7901 
7902 /* Expand code for an OpenMP ordered directive.  */
7903 
7904 static void
7905 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7906 {
7907   tree block;
7908   gimple *stmt = gsi_stmt (*gsi_p), *g;
7909   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
7910   gcall *x;
7911   gbind *bind;
7912   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7913 			       OMP_CLAUSE_SIMD);
7914   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
7915      loop.  */
7916   bool maybe_simt
7917     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
7918   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7919 				  OMP_CLAUSE_THREADS);
7920 
7921   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
7922 		       OMP_CLAUSE_DEPEND))
7923     {
7924       /* FIXME: This is needs to be moved to the expansion to verify various
7925 	 conditions only testable on cfg with dominators computed, and also
7926 	 all the depend clauses to be merged still might need to be available
7927 	 for the runtime checks.  */
7928       if (0)
7929 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
7930       return;
7931     }
7932 
7933   push_gimplify_context ();
7934 
7935   block = make_node (BLOCK);
7936   bind = gimple_build_bind (NULL, NULL, block);
7937   gsi_replace (gsi_p, bind, true);
7938   gimple_bind_add_stmt (bind, stmt);
7939 
7940   if (simd)
7941     {
7942       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
7943 				      build_int_cst (NULL_TREE, threads));
7944       cfun->has_simduid_loops = true;
7945     }
7946   else
7947     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
7948 			   0);
7949   gimple_bind_add_stmt (bind, x);
7950 
7951   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
7952   if (maybe_simt)
7953     {
7954       counter = create_tmp_var (integer_type_node);
7955       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
7956       gimple_call_set_lhs (g, counter);
7957       gimple_bind_add_stmt (bind, g);
7958 
7959       body = create_artificial_label (UNKNOWN_LOCATION);
7960       test = create_artificial_label (UNKNOWN_LOCATION);
7961       gimple_bind_add_stmt (bind, gimple_build_label (body));
7962 
7963       tree simt_pred = create_tmp_var (integer_type_node);
7964       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
7965       gimple_call_set_lhs (g, simt_pred);
7966       gimple_bind_add_stmt (bind, g);
7967 
7968       tree t = create_artificial_label (UNKNOWN_LOCATION);
7969       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
7970       gimple_bind_add_stmt (bind, g);
7971 
7972       gimple_bind_add_stmt (bind, gimple_build_label (t));
7973     }
7974   lower_omp (gimple_omp_body_ptr (stmt), ctx);
7975   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
7976   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
7977   gimple_omp_set_body (stmt, NULL);
7978 
7979   if (maybe_simt)
7980     {
7981       gimple_bind_add_stmt (bind, gimple_build_label (test));
7982       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
7983       gimple_bind_add_stmt (bind, g);
7984 
7985       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
7986       tree nonneg = create_tmp_var (integer_type_node);
7987       gimple_seq tseq = NULL;
7988       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
7989       gimple_bind_add_seq (bind, tseq);
7990 
7991       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
7992       gimple_call_set_lhs (g, nonneg);
7993       gimple_bind_add_stmt (bind, g);
7994 
7995       tree end = create_artificial_label (UNKNOWN_LOCATION);
7996       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
7997       gimple_bind_add_stmt (bind, g);
7998 
7999       gimple_bind_add_stmt (bind, gimple_build_label (end));
8000     }
8001   if (simd)
8002     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
8003 				    build_int_cst (NULL_TREE, threads));
8004   else
8005     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
8006 			   0);
8007   gimple_bind_add_stmt (bind, x);
8008 
8009   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8010 
8011   pop_gimplify_context (bind);
8012 
8013   gimple_bind_append_vars (bind, ctx->block_vars);
8014   BLOCK_VARS (block) = gimple_bind_vars (bind);
8015 }
8016 
8017 
8018 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
8019    substitution of a couple of function calls.  But in the NAMED case,
8020    requires that languages coordinate a symbol name.  It is therefore
8021    best put here in common code.  */
8022 
8023 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
8024 
8025 static void
8026 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8027 {
8028   tree block;
8029   tree name, lock, unlock;
8030   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
8031   gbind *bind;
8032   location_t loc = gimple_location (stmt);
8033   gimple_seq tbody;
8034 
8035   name = gimple_omp_critical_name (stmt);
8036   if (name)
8037     {
8038       tree decl;
8039 
8040       if (!critical_name_mutexes)
8041 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
8042 
8043       tree *n = critical_name_mutexes->get (name);
8044       if (n == NULL)
8045 	{
8046 	  char *new_str;
8047 
8048 	  decl = create_tmp_var_raw (ptr_type_node);
8049 
8050 	  new_str = ACONCAT ((".gomp_critical_user_",
8051 			      IDENTIFIER_POINTER (name), NULL));
8052 	  DECL_NAME (decl) = get_identifier (new_str);
8053 	  TREE_PUBLIC (decl) = 1;
8054 	  TREE_STATIC (decl) = 1;
8055 	  DECL_COMMON (decl) = 1;
8056 	  DECL_ARTIFICIAL (decl) = 1;
8057 	  DECL_IGNORED_P (decl) = 1;
8058 
8059 	  varpool_node::finalize_decl (decl);
8060 
8061 	  critical_name_mutexes->put (name, decl);
8062 	}
8063       else
8064 	decl = *n;
8065 
8066       /* If '#pragma omp critical' is inside offloaded region or
8067 	 inside function marked as offloadable, the symbol must be
8068 	 marked as offloadable too.  */
8069       omp_context *octx;
8070       if (cgraph_node::get (current_function_decl)->offloadable)
8071 	varpool_node::get_create (decl)->offloadable = 1;
8072       else
8073 	for (octx = ctx->outer; octx; octx = octx->outer)
8074 	  if (is_gimple_omp_offloaded (octx->stmt))
8075 	    {
8076 	      varpool_node::get_create (decl)->offloadable = 1;
8077 	      break;
8078 	    }
8079 
8080       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
8081       lock = build_call_expr_loc (loc, lock, 1,
8082 				  build_fold_addr_expr_loc (loc, decl));
8083 
8084       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
8085       unlock = build_call_expr_loc (loc, unlock, 1,
8086 				build_fold_addr_expr_loc (loc, decl));
8087     }
8088   else
8089     {
8090       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
8091       lock = build_call_expr_loc (loc, lock, 0);
8092 
8093       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
8094       unlock = build_call_expr_loc (loc, unlock, 0);
8095     }
8096 
8097   push_gimplify_context ();
8098 
8099   block = make_node (BLOCK);
8100   bind = gimple_build_bind (NULL, NULL, block);
8101   gsi_replace (gsi_p, bind, true);
8102   gimple_bind_add_stmt (bind, stmt);
8103 
8104   tbody = gimple_bind_body (bind);
8105   gimplify_and_add (lock, &tbody);
8106   gimple_bind_set_body (bind, tbody);
8107 
8108   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8109   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8110   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8111   gimple_omp_set_body (stmt, NULL);
8112 
8113   tbody = gimple_bind_body (bind);
8114   gimplify_and_add (unlock, &tbody);
8115   gimple_bind_set_body (bind, tbody);
8116 
8117   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8118 
8119   pop_gimplify_context (bind);
8120   gimple_bind_append_vars (bind, ctx->block_vars);
8121   BLOCK_VARS (block) = gimple_bind_vars (bind);
8122 }
8123 
8124 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
8125    for a lastprivate clause.  Given a loop control predicate of (V
8126    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
8127    is appended to *DLIST, iterator initialization is appended to
8128    *BODY_P.  */
8129 
8130 static void
8131 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
8132 			   gimple_seq *dlist, struct omp_context *ctx)
8133 {
8134   tree clauses, cond, vinit;
8135   enum tree_code cond_code;
8136   gimple_seq stmts;
8137 
8138   cond_code = fd->loop.cond_code;
8139   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
8140 
8141   /* When possible, use a strict equality expression.  This can let VRP
8142      type optimizations deduce the value and remove a copy.  */
8143   if (tree_fits_shwi_p (fd->loop.step))
8144     {
8145       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
8146       if (step == 1 || step == -1)
8147 	cond_code = EQ_EXPR;
8148     }
8149 
8150   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
8151       || gimple_omp_for_grid_phony (fd->for_stmt))
8152     cond = omp_grid_lastprivate_predicate (fd);
8153   else
8154     {
8155       tree n2 = fd->loop.n2;
8156       if (fd->collapse > 1
8157 	  && TREE_CODE (n2) != INTEGER_CST
8158 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
8159 	{
8160 	  struct omp_context *taskreg_ctx = NULL;
8161 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
8162 	    {
8163 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
8164 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
8165 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
8166 		{
8167 		  if (gimple_omp_for_combined_into_p (gfor))
8168 		    {
8169 		      gcc_assert (ctx->outer->outer
8170 				  && is_parallel_ctx (ctx->outer->outer));
8171 		      taskreg_ctx = ctx->outer->outer;
8172 		    }
8173 		  else
8174 		    {
8175 		      struct omp_for_data outer_fd;
8176 		      omp_extract_for_data (gfor, &outer_fd, NULL);
8177 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
8178 		    }
8179 		}
8180 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
8181 		taskreg_ctx = ctx->outer->outer;
8182 	    }
8183 	  else if (is_taskreg_ctx (ctx->outer))
8184 	    taskreg_ctx = ctx->outer;
8185 	  if (taskreg_ctx)
8186 	    {
8187 	      int i;
8188 	      tree taskreg_clauses
8189 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
8190 	      tree innerc = omp_find_clause (taskreg_clauses,
8191 					     OMP_CLAUSE__LOOPTEMP_);
8192 	      gcc_assert (innerc);
8193 	      for (i = 0; i < fd->collapse; i++)
8194 		{
8195 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8196 					    OMP_CLAUSE__LOOPTEMP_);
8197 		  gcc_assert (innerc);
8198 		}
8199 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
8200 					OMP_CLAUSE__LOOPTEMP_);
8201 	      if (innerc)
8202 		n2 = fold_convert (TREE_TYPE (n2),
8203 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
8204 						taskreg_ctx));
8205 	    }
8206 	}
8207       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
8208     }
8209 
8210   clauses = gimple_omp_for_clauses (fd->for_stmt);
8211   stmts = NULL;
8212   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
8213   if (!gimple_seq_empty_p (stmts))
8214     {
8215       gimple_seq_add_seq (&stmts, *dlist);
8216       *dlist = stmts;
8217 
8218       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
8219       vinit = fd->loop.n1;
8220       if (cond_code == EQ_EXPR
8221 	  && tree_fits_shwi_p (fd->loop.n2)
8222 	  && ! integer_zerop (fd->loop.n2))
8223 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
8224       else
8225 	vinit = unshare_expr (vinit);
8226 
8227       /* Initialize the iterator variable, so that threads that don't execute
8228 	 any iterations don't execute the lastprivate clauses by accident.  */
8229       gimplify_assign (fd->loop.v, vinit, body_p);
8230     }
8231 }
8232 
8233 
8234 /* Lower code for an OMP loop directive.  */
8235 
8236 static void
8237 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8238 {
8239   tree *rhs_p, block;
8240   struct omp_for_data fd, *fdp = NULL;
8241   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
8242   gbind *new_stmt;
8243   gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
8244   gimple_seq cnt_list = NULL;
8245   gimple_seq oacc_head = NULL, oacc_tail = NULL;
8246   size_t i;
8247 
8248   push_gimplify_context ();
8249 
8250   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
8251 
8252   block = make_node (BLOCK);
8253   new_stmt = gimple_build_bind (NULL, NULL, block);
8254   /* Replace at gsi right away, so that 'stmt' is no member
8255      of a sequence anymore as we're going to add to a different
8256      one below.  */
8257   gsi_replace (gsi_p, new_stmt, true);
8258 
8259   /* Move declaration of temporaries in the loop body before we make
8260      it go away.  */
8261   omp_for_body = gimple_omp_body (stmt);
8262   if (!gimple_seq_empty_p (omp_for_body)
8263       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
8264     {
8265       gbind *inner_bind
8266 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
8267       tree vars = gimple_bind_vars (inner_bind);
8268       gimple_bind_append_vars (new_stmt, vars);
8269       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
8270 	 keep them on the inner_bind and it's block.  */
8271       gimple_bind_set_vars (inner_bind, NULL_TREE);
8272       if (gimple_bind_block (inner_bind))
8273 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
8274     }
8275 
8276   if (gimple_omp_for_combined_into_p (stmt))
8277     {
8278       omp_extract_for_data (stmt, &fd, NULL);
8279       fdp = &fd;
8280 
8281       /* We need two temporaries with fd.loop.v type (istart/iend)
8282 	 and then (fd.collapse - 1) temporaries with the same
8283 	 type for count2 ... countN-1 vars if not constant.  */
8284       size_t count = 2;
8285       tree type = fd.iter_type;
8286       if (fd.collapse > 1
8287 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
8288 	count += fd.collapse - 1;
8289       bool taskreg_for
8290 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
8291 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
8292       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
8293       tree simtc = NULL;
8294       tree clauses = *pc;
8295       if (taskreg_for)
8296 	outerc
8297 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
8298 			     OMP_CLAUSE__LOOPTEMP_);
8299       if (ctx->simt_stmt)
8300 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
8301 				 OMP_CLAUSE__LOOPTEMP_);
8302       for (i = 0; i < count; i++)
8303 	{
8304 	  tree temp;
8305 	  if (taskreg_for)
8306 	    {
8307 	      gcc_assert (outerc);
8308 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
8309 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
8310 					OMP_CLAUSE__LOOPTEMP_);
8311 	    }
8312 	  else
8313 	    {
8314 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
8315 		 clause, another without, make sure they have the same
8316 		 decls in _looptemp_ clauses, because the outer stmt
8317 		 they are combined into will look up just one inner_stmt.  */
8318 	      if (ctx->simt_stmt)
8319 		temp = OMP_CLAUSE_DECL (simtc);
8320 	      else
8321 		temp = create_tmp_var (type);
8322 	      insert_decl_map (&ctx->outer->cb, temp, temp);
8323 	    }
8324 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
8325 	  OMP_CLAUSE_DECL (*pc) = temp;
8326 	  pc = &OMP_CLAUSE_CHAIN (*pc);
8327 	  if (ctx->simt_stmt)
8328 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
8329 				     OMP_CLAUSE__LOOPTEMP_);
8330 	}
8331       *pc = clauses;
8332     }
8333 
8334   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
8335   dlist = NULL;
8336   body = NULL;
8337   tree rclauses
8338     = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
8339 				      OMP_CLAUSE_REDUCTION);
8340   tree rtmp = NULL_TREE;
8341   if (rclauses)
8342     {
8343       tree type = build_pointer_type (pointer_sized_int_node);
8344       tree temp = create_tmp_var (type);
8345       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8346       OMP_CLAUSE_DECL (c) = temp;
8347       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
8348       gimple_omp_for_set_clauses (stmt, c);
8349       lower_omp_task_reductions (ctx, OMP_FOR,
8350 				 gimple_omp_for_clauses (stmt),
8351 				 &tred_ilist, &tred_dlist);
8352       rclauses = c;
8353       rtmp = make_ssa_name (type);
8354       gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
8355     }
8356 
8357   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
8358 			   fdp);
8359   gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
8360 		      gimple_omp_for_pre_body (stmt));
8361 
8362   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8363 
8364   /* Lower the header expressions.  At this point, we can assume that
8365      the header is of the form:
8366 
8367      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
8368 
8369      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
8370      using the .omp_data_s mapping, if needed.  */
8371   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
8372     {
8373       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
8374       if (!is_gimple_min_invariant (*rhs_p))
8375 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8376       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8377 	recompute_tree_invariant_for_addr_expr (*rhs_p);
8378 
8379       rhs_p = gimple_omp_for_final_ptr (stmt, i);
8380       if (!is_gimple_min_invariant (*rhs_p))
8381 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8382       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
8383 	recompute_tree_invariant_for_addr_expr (*rhs_p);
8384 
8385       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
8386       if (!is_gimple_min_invariant (*rhs_p))
8387 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
8388     }
8389   if (rclauses)
8390     gimple_seq_add_seq (&tred_ilist, cnt_list);
8391   else
8392     gimple_seq_add_seq (&body, cnt_list);
8393 
8394   /* Once lowered, extract the bounds and clauses.  */
8395   omp_extract_for_data (stmt, &fd, NULL);
8396 
8397   if (is_gimple_omp_oacc (ctx->stmt)
8398       && !ctx_in_oacc_kernels_region (ctx))
8399     lower_oacc_head_tail (gimple_location (stmt),
8400 			  gimple_omp_for_clauses (stmt),
8401 			  &oacc_head, &oacc_tail, ctx);
8402 
8403   /* Add OpenACC partitioning and reduction markers just before the loop.  */
8404   if (oacc_head)
8405     gimple_seq_add_seq (&body, oacc_head);
8406 
8407   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
8408 
8409   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
8410     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
8411       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8412 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8413 	{
8414 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
8415 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
8416 	    OMP_CLAUSE_LINEAR_STEP (c)
8417 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
8418 						ctx);
8419 	}
8420 
8421   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
8422 		     && gimple_omp_for_grid_phony (stmt));
8423   if (!phony_loop)
8424     gimple_seq_add_stmt (&body, stmt);
8425   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
8426 
8427   if (!phony_loop)
8428     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
8429 							   fd.loop.v));
8430 
8431   /* After the loop, add exit clauses.  */
8432   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
8433 
8434   if (ctx->cancellable)
8435     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
8436 
8437   gimple_seq_add_seq (&body, dlist);
8438 
8439   if (rclauses)
8440     {
8441       gimple_seq_add_seq (&tred_ilist, body);
8442       body = tred_ilist;
8443     }
8444 
8445   body = maybe_catch_exception (body);
8446 
8447   if (!phony_loop)
8448     {
8449       /* Region exit marker goes at the end of the loop body.  */
8450       gimple *g = gimple_build_omp_return (fd.have_nowait);
8451       gimple_seq_add_stmt (&body, g);
8452 
8453       gimple_seq_add_seq (&body, tred_dlist);
8454 
8455       maybe_add_implicit_barrier_cancel (ctx, g, &body);
8456 
8457       if (rclauses)
8458 	OMP_CLAUSE_DECL (rclauses) = rtmp;
8459     }
8460 
8461   /* Add OpenACC joining and reduction markers just after the loop.  */
8462   if (oacc_tail)
8463     gimple_seq_add_seq (&body, oacc_tail);
8464 
8465   pop_gimplify_context (new_stmt);
8466 
8467   gimple_bind_append_vars (new_stmt, ctx->block_vars);
8468   maybe_remove_omp_member_access_dummy_vars (new_stmt);
8469   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
8470   if (BLOCK_VARS (block))
8471     TREE_USED (block) = 1;
8472 
8473   gimple_bind_set_body (new_stmt, body);
8474   gimple_omp_set_body (stmt, NULL);
8475   gimple_omp_for_set_pre_body (stmt, NULL);
8476 }
8477 
8478 /* Callback for walk_stmts.  Check if the current statement only contains
8479    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
8480 
8481 static tree
8482 check_combined_parallel (gimple_stmt_iterator *gsi_p,
8483     			 bool *handled_ops_p,
8484     			 struct walk_stmt_info *wi)
8485 {
8486   int *info = (int *) wi->info;
8487   gimple *stmt = gsi_stmt (*gsi_p);
8488 
8489   *handled_ops_p = true;
8490   switch (gimple_code (stmt))
8491     {
8492     WALK_SUBSTMTS;
8493 
8494     case GIMPLE_DEBUG:
8495       break;
8496     case GIMPLE_OMP_FOR:
8497     case GIMPLE_OMP_SECTIONS:
8498       *info = *info == 0 ? 1 : -1;
8499       break;
8500     default:
8501       *info = -1;
8502       break;
8503     }
8504   return NULL;
8505 }
8506 
8507 struct omp_taskcopy_context
8508 {
8509   /* This field must be at the beginning, as we do "inheritance": Some
8510      callback functions for tree-inline.c (e.g., omp_copy_decl)
8511      receive a copy_body_data pointer that is up-casted to an
8512      omp_context pointer.  */
8513   copy_body_data cb;
8514   omp_context *ctx;
8515 };
8516 
8517 static tree
8518 task_copyfn_copy_decl (tree var, copy_body_data *cb)
8519 {
8520   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
8521 
8522   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
8523     return create_tmp_var (TREE_TYPE (var));
8524 
8525   return var;
8526 }
8527 
8528 static tree
8529 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
8530 {
8531   tree name, new_fields = NULL, type, f;
8532 
8533   type = lang_hooks.types.make_type (RECORD_TYPE);
8534   name = DECL_NAME (TYPE_NAME (orig_type));
8535   name = build_decl (gimple_location (tcctx->ctx->stmt),
8536 		     TYPE_DECL, name, type);
8537   TYPE_NAME (type) = name;
8538 
8539   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
8540     {
8541       tree new_f = copy_node (f);
8542       DECL_CONTEXT (new_f) = type;
8543       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
8544       TREE_CHAIN (new_f) = new_fields;
8545       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8546       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
8547       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
8548 		 &tcctx->cb, NULL);
8549       new_fields = new_f;
8550       tcctx->cb.decl_map->put (f, new_f);
8551     }
8552   TYPE_FIELDS (type) = nreverse (new_fields);
8553   layout_type (type);
8554   return type;
8555 }
8556 
8557 /* Create task copyfn.  */
8558 
8559 static void
8560 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
8561 {
8562   struct function *child_cfun;
8563   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
8564   tree record_type, srecord_type, bind, list;
8565   bool record_needs_remap = false, srecord_needs_remap = false;
8566   splay_tree_node n;
8567   struct omp_taskcopy_context tcctx;
8568   location_t loc = gimple_location (task_stmt);
8569   size_t looptempno = 0;
8570 
8571   child_fn = gimple_omp_task_copy_fn (task_stmt);
8572   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
8573   gcc_assert (child_cfun->cfg == NULL);
8574   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
8575 
8576   /* Reset DECL_CONTEXT on function arguments.  */
8577   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
8578     DECL_CONTEXT (t) = child_fn;
8579 
8580   /* Populate the function.  */
8581   push_gimplify_context ();
8582   push_cfun (child_cfun);
8583 
8584   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
8585   TREE_SIDE_EFFECTS (bind) = 1;
8586   list = NULL;
8587   DECL_SAVED_TREE (child_fn) = bind;
8588   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
8589 
8590   /* Remap src and dst argument types if needed.  */
8591   record_type = ctx->record_type;
8592   srecord_type = ctx->srecord_type;
8593   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8594     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8595       {
8596 	record_needs_remap = true;
8597 	break;
8598       }
8599   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
8600     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
8601       {
8602 	srecord_needs_remap = true;
8603 	break;
8604       }
8605 
8606   if (record_needs_remap || srecord_needs_remap)
8607     {
8608       memset (&tcctx, '\0', sizeof (tcctx));
8609       tcctx.cb.src_fn = ctx->cb.src_fn;
8610       tcctx.cb.dst_fn = child_fn;
8611       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
8612       gcc_checking_assert (tcctx.cb.src_node);
8613       tcctx.cb.dst_node = tcctx.cb.src_node;
8614       tcctx.cb.src_cfun = ctx->cb.src_cfun;
8615       tcctx.cb.copy_decl = task_copyfn_copy_decl;
8616       tcctx.cb.eh_lp_nr = 0;
8617       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
8618       tcctx.cb.decl_map = new hash_map<tree, tree>;
8619       tcctx.ctx = ctx;
8620 
8621       if (record_needs_remap)
8622 	record_type = task_copyfn_remap_type (&tcctx, record_type);
8623       if (srecord_needs_remap)
8624 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
8625     }
8626   else
8627     tcctx.cb.decl_map = NULL;
8628 
8629   arg = DECL_ARGUMENTS (child_fn);
8630   TREE_TYPE (arg) = build_pointer_type (record_type);
8631   sarg = DECL_CHAIN (arg);
8632   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
8633 
8634   /* First pass: initialize temporaries used in record_type and srecord_type
8635      sizes and field offsets.  */
8636   if (tcctx.cb.decl_map)
8637     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8638       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8639 	{
8640 	  tree *p;
8641 
8642 	  decl = OMP_CLAUSE_DECL (c);
8643 	  p = tcctx.cb.decl_map->get (decl);
8644 	  if (p == NULL)
8645 	    continue;
8646 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8647 	  sf = (tree) n->value;
8648 	  sf = *tcctx.cb.decl_map->get (sf);
8649 	  src = build_simple_mem_ref_loc (loc, sarg);
8650 	  src = omp_build_component_ref (src, sf);
8651 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
8652 	  append_to_statement_list (t, &list);
8653 	}
8654 
8655   /* Second pass: copy shared var pointers and copy construct non-VLA
8656      firstprivate vars.  */
8657   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8658     switch (OMP_CLAUSE_CODE (c))
8659       {
8660 	splay_tree_key key;
8661       case OMP_CLAUSE_SHARED:
8662 	decl = OMP_CLAUSE_DECL (c);
8663 	key = (splay_tree_key) decl;
8664 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8665 	  key = (splay_tree_key) &DECL_UID (decl);
8666 	n = splay_tree_lookup (ctx->field_map, key);
8667 	if (n == NULL)
8668 	  break;
8669 	f = (tree) n->value;
8670 	if (tcctx.cb.decl_map)
8671 	  f = *tcctx.cb.decl_map->get (f);
8672 	n = splay_tree_lookup (ctx->sfield_map, key);
8673 	sf = (tree) n->value;
8674 	if (tcctx.cb.decl_map)
8675 	  sf = *tcctx.cb.decl_map->get (sf);
8676 	src = build_simple_mem_ref_loc (loc, sarg);
8677 	src = omp_build_component_ref (src, sf);
8678 	dst = build_simple_mem_ref_loc (loc, arg);
8679 	dst = omp_build_component_ref (dst, f);
8680 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8681 	append_to_statement_list (t, &list);
8682 	break;
8683       case OMP_CLAUSE_REDUCTION:
8684       case OMP_CLAUSE_IN_REDUCTION:
8685 	decl = OMP_CLAUSE_DECL (c);
8686 	if (TREE_CODE (decl) == MEM_REF)
8687 	  {
8688 	    decl = TREE_OPERAND (decl, 0);
8689 	    if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8690 	      decl = TREE_OPERAND (decl, 0);
8691 	    if (TREE_CODE (decl) == INDIRECT_REF
8692 		|| TREE_CODE (decl) == ADDR_EXPR)
8693 	      decl = TREE_OPERAND (decl, 0);
8694 	  }
8695 	key = (splay_tree_key) decl;
8696 	n = splay_tree_lookup (ctx->field_map, key);
8697 	if (n == NULL)
8698 	  break;
8699 	f = (tree) n->value;
8700 	if (tcctx.cb.decl_map)
8701 	  f = *tcctx.cb.decl_map->get (f);
8702 	n = splay_tree_lookup (ctx->sfield_map, key);
8703 	sf = (tree) n->value;
8704 	if (tcctx.cb.decl_map)
8705 	  sf = *tcctx.cb.decl_map->get (sf);
8706 	src = build_simple_mem_ref_loc (loc, sarg);
8707 	src = omp_build_component_ref (src, sf);
8708 	if (decl != OMP_CLAUSE_DECL (c)
8709 	    && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8710 	    && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8711 	  src = build_simple_mem_ref_loc (loc, src);
8712 	dst = build_simple_mem_ref_loc (loc, arg);
8713 	dst = omp_build_component_ref (dst, f);
8714 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8715 	append_to_statement_list (t, &list);
8716 	break;
8717       case OMP_CLAUSE__LOOPTEMP_:
8718 	/* Fields for first two _looptemp_ clauses are initialized by
8719 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
8720         if (looptempno < 2)
8721 	  {
8722 	    looptempno++;
8723 	    break;
8724 	  }
8725 	/* FALLTHRU */
8726       case OMP_CLAUSE__REDUCTEMP_:
8727       case OMP_CLAUSE_FIRSTPRIVATE:
8728 	decl = OMP_CLAUSE_DECL (c);
8729 	if (is_variable_sized (decl))
8730 	  break;
8731 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8732 	if (n == NULL)
8733 	  break;
8734 	f = (tree) n->value;
8735 	if (tcctx.cb.decl_map)
8736 	  f = *tcctx.cb.decl_map->get (f);
8737 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8738 	if (n != NULL)
8739 	  {
8740 	    sf = (tree) n->value;
8741 	    if (tcctx.cb.decl_map)
8742 	      sf = *tcctx.cb.decl_map->get (sf);
8743 	    src = build_simple_mem_ref_loc (loc, sarg);
8744 	    src = omp_build_component_ref (src, sf);
8745 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
8746 	      src = build_simple_mem_ref_loc (loc, src);
8747 	  }
8748 	else
8749 	  src = decl;
8750 	dst = build_simple_mem_ref_loc (loc, arg);
8751 	dst = omp_build_component_ref (dst, f);
8752 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
8753 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8754 	else
8755 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8756 	append_to_statement_list (t, &list);
8757 	break;
8758       case OMP_CLAUSE_PRIVATE:
8759 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
8760 	  break;
8761 	decl = OMP_CLAUSE_DECL (c);
8762 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8763 	f = (tree) n->value;
8764 	if (tcctx.cb.decl_map)
8765 	  f = *tcctx.cb.decl_map->get (f);
8766 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
8767 	if (n != NULL)
8768 	  {
8769 	    sf = (tree) n->value;
8770 	    if (tcctx.cb.decl_map)
8771 	      sf = *tcctx.cb.decl_map->get (sf);
8772 	    src = build_simple_mem_ref_loc (loc, sarg);
8773 	    src = omp_build_component_ref (src, sf);
8774 	    if (use_pointer_for_field (decl, NULL))
8775 	      src = build_simple_mem_ref_loc (loc, src);
8776 	  }
8777 	else
8778 	  src = decl;
8779 	dst = build_simple_mem_ref_loc (loc, arg);
8780 	dst = omp_build_component_ref (dst, f);
8781 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
8782 	append_to_statement_list (t, &list);
8783 	break;
8784       default:
8785 	break;
8786       }
8787 
8788   /* Last pass: handle VLA firstprivates.  */
8789   if (tcctx.cb.decl_map)
8790     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
8791       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8792 	{
8793 	  tree ind, ptr, df;
8794 
8795 	  decl = OMP_CLAUSE_DECL (c);
8796 	  if (!is_variable_sized (decl))
8797 	    continue;
8798 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
8799 	  if (n == NULL)
8800 	    continue;
8801 	  f = (tree) n->value;
8802 	  f = *tcctx.cb.decl_map->get (f);
8803 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
8804 	  ind = DECL_VALUE_EXPR (decl);
8805 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
8806 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
8807 	  n = splay_tree_lookup (ctx->sfield_map,
8808 				 (splay_tree_key) TREE_OPERAND (ind, 0));
8809 	  sf = (tree) n->value;
8810 	  sf = *tcctx.cb.decl_map->get (sf);
8811 	  src = build_simple_mem_ref_loc (loc, sarg);
8812 	  src = omp_build_component_ref (src, sf);
8813 	  src = build_simple_mem_ref_loc (loc, src);
8814 	  dst = build_simple_mem_ref_loc (loc, arg);
8815 	  dst = omp_build_component_ref (dst, f);
8816 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
8817 	  append_to_statement_list (t, &list);
8818 	  n = splay_tree_lookup (ctx->field_map,
8819 				 (splay_tree_key) TREE_OPERAND (ind, 0));
8820 	  df = (tree) n->value;
8821 	  df = *tcctx.cb.decl_map->get (df);
8822 	  ptr = build_simple_mem_ref_loc (loc, arg);
8823 	  ptr = omp_build_component_ref (ptr, df);
8824 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
8825 		      build_fold_addr_expr_loc (loc, dst));
8826 	  append_to_statement_list (t, &list);
8827 	}
8828 
8829   t = build1 (RETURN_EXPR, void_type_node, NULL);
8830   append_to_statement_list (t, &list);
8831 
8832   if (tcctx.cb.decl_map)
8833     delete tcctx.cb.decl_map;
8834   pop_gimplify_context (NULL);
8835   BIND_EXPR_BODY (bind) = list;
8836   pop_cfun ();
8837 }
8838 
8839 static void
8840 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
8841 {
8842   tree c, clauses;
8843   gimple *g;
8844   size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
8845 
8846   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
8847   gcc_assert (clauses);
8848   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8849     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8850       switch (OMP_CLAUSE_DEPEND_KIND (c))
8851 	{
8852 	case OMP_CLAUSE_DEPEND_LAST:
8853 	  /* Lowering already done at gimplification.  */
8854 	  return;
8855 	case OMP_CLAUSE_DEPEND_IN:
8856 	  cnt[2]++;
8857 	  break;
8858 	case OMP_CLAUSE_DEPEND_OUT:
8859 	case OMP_CLAUSE_DEPEND_INOUT:
8860 	  cnt[0]++;
8861 	  break;
8862 	case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8863 	  cnt[1]++;
8864 	  break;
8865 	case OMP_CLAUSE_DEPEND_DEPOBJ:
8866 	  cnt[3]++;
8867 	  break;
8868 	case OMP_CLAUSE_DEPEND_SOURCE:
8869 	case OMP_CLAUSE_DEPEND_SINK:
8870 	  /* FALLTHRU */
8871 	default:
8872 	  gcc_unreachable ();
8873 	}
8874   if (cnt[1] || cnt[3])
8875     idx = 5;
8876   size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
8877   tree type = build_array_type_nelts (ptr_type_node, total + idx);
8878   tree array = create_tmp_var (type);
8879   TREE_ADDRESSABLE (array) = 1;
8880   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
8881 		   NULL_TREE);
8882   if (idx == 5)
8883     {
8884       g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
8885       gimple_seq_add_stmt (iseq, g);
8886       r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
8887 		  NULL_TREE);
8888     }
8889   g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
8890   gimple_seq_add_stmt (iseq, g);
8891   for (i = 0; i < (idx == 5 ? 3 : 1); i++)
8892     {
8893       r = build4 (ARRAY_REF, ptr_type_node, array,
8894 		  size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
8895       g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
8896       gimple_seq_add_stmt (iseq, g);
8897     }
8898   for (i = 0; i < 4; i++)
8899     {
8900       if (cnt[i] == 0)
8901 	continue;
8902       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8903 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
8904 	  continue;
8905 	else
8906 	  {
8907 	    switch (OMP_CLAUSE_DEPEND_KIND (c))
8908 	      {
8909 	      case OMP_CLAUSE_DEPEND_IN:
8910 		if (i != 2)
8911 		  continue;
8912 		break;
8913 	      case OMP_CLAUSE_DEPEND_OUT:
8914 	      case OMP_CLAUSE_DEPEND_INOUT:
8915 		if (i != 0)
8916 		  continue;
8917 		break;
8918 	      case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8919 		if (i != 1)
8920 		  continue;
8921 		break;
8922 	      case OMP_CLAUSE_DEPEND_DEPOBJ:
8923 		if (i != 3)
8924 		  continue;
8925 		break;
8926 	      default:
8927 		gcc_unreachable ();
8928 	      }
8929 	    tree t = OMP_CLAUSE_DECL (c);
8930 	    t = fold_convert (ptr_type_node, t);
8931 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
8932 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
8933 			NULL_TREE, NULL_TREE);
8934 	    g = gimple_build_assign (r, t);
8935 	    gimple_seq_add_stmt (iseq, g);
8936 	  }
8937     }
8938   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8939   OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8940   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8941   OMP_CLAUSE_CHAIN (c) = *pclauses;
8942   *pclauses = c;
8943   tree clobber = build_constructor (type, NULL);
8944   TREE_THIS_VOLATILE (clobber) = 1;
8945   g = gimple_build_assign (array, clobber);
8946   gimple_seq_add_stmt (oseq, g);
8947 }
8948 
8949 /* Lower the OpenMP parallel or task directive in the current statement
8950    in GSI_P.  CTX holds context information for the directive.  */
8951 
8952 static void
8953 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8954 {
8955   tree clauses;
8956   tree child_fn, t;
8957   gimple *stmt = gsi_stmt (*gsi_p);
8958   gbind *par_bind, *bind, *dep_bind = NULL;
8959   gimple_seq par_body;
8960   location_t loc = gimple_location (stmt);
8961 
8962   clauses = gimple_omp_taskreg_clauses (stmt);
8963   if (gimple_code (stmt) == GIMPLE_OMP_TASK
8964       && gimple_omp_task_taskwait_p (stmt))
8965     {
8966       par_bind = NULL;
8967       par_body = NULL;
8968     }
8969   else
8970     {
8971       par_bind
8972 	= as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
8973       par_body = gimple_bind_body (par_bind);
8974     }
8975   child_fn = ctx->cb.dst_fn;
8976   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
8977       && !gimple_omp_parallel_combined_p (stmt))
8978     {
8979       struct walk_stmt_info wi;
8980       int ws_num = 0;
8981 
8982       memset (&wi, 0, sizeof (wi));
8983       wi.info = &ws_num;
8984       wi.val_only = true;
8985       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
8986       if (ws_num == 1)
8987 	gimple_omp_parallel_set_combined_p (stmt, true);
8988     }
8989   gimple_seq dep_ilist = NULL;
8990   gimple_seq dep_olist = NULL;
8991   if (gimple_code (stmt) == GIMPLE_OMP_TASK
8992       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
8993     {
8994       push_gimplify_context ();
8995       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
8996       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
8997 			    &dep_ilist, &dep_olist);
8998     }
8999 
9000   if (gimple_code (stmt) == GIMPLE_OMP_TASK
9001       && gimple_omp_task_taskwait_p (stmt))
9002     {
9003       if (dep_bind)
9004 	{
9005 	  gsi_replace (gsi_p, dep_bind, true);
9006 	  gimple_bind_add_seq (dep_bind, dep_ilist);
9007 	  gimple_bind_add_stmt (dep_bind, stmt);
9008 	  gimple_bind_add_seq (dep_bind, dep_olist);
9009 	  pop_gimplify_context (dep_bind);
9010 	}
9011       return;
9012     }
9013 
9014   if (ctx->srecord_type)
9015     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
9016 
9017   gimple_seq tskred_ilist = NULL;
9018   gimple_seq tskred_olist = NULL;
9019   if ((is_task_ctx (ctx)
9020        && gimple_omp_task_taskloop_p (ctx->stmt)
9021        && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
9022 			   OMP_CLAUSE_REDUCTION))
9023       || (is_parallel_ctx (ctx)
9024 	  && omp_find_clause (gimple_omp_parallel_clauses (stmt),
9025 			      OMP_CLAUSE__REDUCTEMP_)))
9026     {
9027       if (dep_bind == NULL)
9028 	{
9029 	  push_gimplify_context ();
9030 	  dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9031 	}
9032       lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
9033 							: OMP_PARALLEL,
9034 				 gimple_omp_taskreg_clauses (ctx->stmt),
9035 				 &tskred_ilist, &tskred_olist);
9036     }
9037 
9038   push_gimplify_context ();
9039 
9040   gimple_seq par_olist = NULL;
9041   gimple_seq par_ilist = NULL;
9042   gimple_seq par_rlist = NULL;
9043   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
9044     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
9045   if (phony_construct && ctx->record_type)
9046     {
9047       gcc_checking_assert (!ctx->receiver_decl);
9048       ctx->receiver_decl = create_tmp_var
9049 	(build_reference_type (ctx->record_type), ".omp_rec");
9050     }
9051   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
9052   lower_omp (&par_body, ctx);
9053   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
9054     lower_reduction_clauses (clauses, &par_rlist, ctx);
9055 
9056   /* Declare all the variables created by mapping and the variables
9057      declared in the scope of the parallel body.  */
9058   record_vars_into (ctx->block_vars, child_fn);
9059   maybe_remove_omp_member_access_dummy_vars (par_bind);
9060   record_vars_into (gimple_bind_vars (par_bind), child_fn);
9061 
9062   if (ctx->record_type)
9063     {
9064       ctx->sender_decl
9065 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
9066 			  : ctx->record_type, ".omp_data_o");
9067       DECL_NAMELESS (ctx->sender_decl) = 1;
9068       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9069       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
9070     }
9071 
9072   gimple_seq olist = NULL;
9073   gimple_seq ilist = NULL;
9074   lower_send_clauses (clauses, &ilist, &olist, ctx);
9075   lower_send_shared_vars (&ilist, &olist, ctx);
9076 
9077   if (ctx->record_type)
9078     {
9079       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
9080       TREE_THIS_VOLATILE (clobber) = 1;
9081       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9082 							clobber));
9083     }
9084 
9085   /* Once all the expansions are done, sequence all the different
9086      fragments inside gimple_omp_body.  */
9087 
9088   gimple_seq new_body = NULL;
9089 
9090   if (ctx->record_type)
9091     {
9092       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9093       /* fixup_child_record_type might have changed receiver_decl's type.  */
9094       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9095       gimple_seq_add_stmt (&new_body,
9096 	  		   gimple_build_assign (ctx->receiver_decl, t));
9097     }
9098 
9099   gimple_seq_add_seq (&new_body, par_ilist);
9100   gimple_seq_add_seq (&new_body, par_body);
9101   gimple_seq_add_seq (&new_body, par_rlist);
9102   if (ctx->cancellable)
9103     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
9104   gimple_seq_add_seq (&new_body, par_olist);
9105   new_body = maybe_catch_exception (new_body);
9106   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
9107     gimple_seq_add_stmt (&new_body,
9108 			 gimple_build_omp_continue (integer_zero_node,
9109 						    integer_zero_node));
9110   if (!phony_construct)
9111     {
9112       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
9113       gimple_omp_set_body (stmt, new_body);
9114     }
9115 
9116   if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
9117     bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9118   else
9119     bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
9120   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
9121   gimple_bind_add_seq (bind, ilist);
9122   if (!phony_construct)
9123     gimple_bind_add_stmt (bind, stmt);
9124   else
9125     gimple_bind_add_seq (bind, new_body);
9126   gimple_bind_add_seq (bind, olist);
9127 
9128   pop_gimplify_context (NULL);
9129 
9130   if (dep_bind)
9131     {
9132       gimple_bind_add_seq (dep_bind, dep_ilist);
9133       gimple_bind_add_seq (dep_bind, tskred_ilist);
9134       gimple_bind_add_stmt (dep_bind, bind);
9135       gimple_bind_add_seq (dep_bind, tskred_olist);
9136       gimple_bind_add_seq (dep_bind, dep_olist);
9137       pop_gimplify_context (dep_bind);
9138     }
9139 }
9140 
9141 /* Lower the GIMPLE_OMP_TARGET in the current statement
9142    in GSI_P.  CTX holds context information for the directive.  */
9143 
9144 static void
9145 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9146 {
9147   tree clauses;
9148   tree child_fn, t, c;
9149   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
9150   gbind *tgt_bind, *bind, *dep_bind = NULL;
9151   gimple_seq tgt_body, olist, ilist, fplist, new_body;
9152   location_t loc = gimple_location (stmt);
9153   bool offloaded, data_region;
9154   unsigned int map_cnt = 0;
9155 
9156   offloaded = is_gimple_omp_offloaded (stmt);
9157   switch (gimple_omp_target_kind (stmt))
9158     {
9159     case GF_OMP_TARGET_KIND_REGION:
9160     case GF_OMP_TARGET_KIND_UPDATE:
9161     case GF_OMP_TARGET_KIND_ENTER_DATA:
9162     case GF_OMP_TARGET_KIND_EXIT_DATA:
9163     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
9164     case GF_OMP_TARGET_KIND_OACC_KERNELS:
9165     case GF_OMP_TARGET_KIND_OACC_UPDATE:
9166     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
9167     case GF_OMP_TARGET_KIND_OACC_DECLARE:
9168       data_region = false;
9169       break;
9170     case GF_OMP_TARGET_KIND_DATA:
9171     case GF_OMP_TARGET_KIND_OACC_DATA:
9172     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
9173       data_region = true;
9174       break;
9175     default:
9176       gcc_unreachable ();
9177     }
9178 
9179   clauses = gimple_omp_target_clauses (stmt);
9180 
9181   gimple_seq dep_ilist = NULL;
9182   gimple_seq dep_olist = NULL;
9183   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
9184     {
9185       push_gimplify_context ();
9186       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
9187       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
9188 			    &dep_ilist, &dep_olist);
9189     }
9190 
9191   tgt_bind = NULL;
9192   tgt_body = NULL;
9193   if (offloaded)
9194     {
9195       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
9196       tgt_body = gimple_bind_body (tgt_bind);
9197     }
9198   else if (data_region)
9199     tgt_body = gimple_omp_body (stmt);
9200   child_fn = ctx->cb.dst_fn;
9201 
9202   push_gimplify_context ();
9203   fplist = NULL;
9204 
9205   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9206     switch (OMP_CLAUSE_CODE (c))
9207       {
9208 	tree var, x;
9209 
9210       default:
9211 	break;
9212       case OMP_CLAUSE_MAP:
9213 #if CHECKING_P
9214 	/* First check what we're prepared to handle in the following.  */
9215 	switch (OMP_CLAUSE_MAP_KIND (c))
9216 	  {
9217 	  case GOMP_MAP_ALLOC:
9218 	  case GOMP_MAP_TO:
9219 	  case GOMP_MAP_FROM:
9220 	  case GOMP_MAP_TOFROM:
9221 	  case GOMP_MAP_POINTER:
9222 	  case GOMP_MAP_TO_PSET:
9223 	  case GOMP_MAP_DELETE:
9224 	  case GOMP_MAP_RELEASE:
9225 	  case GOMP_MAP_ALWAYS_TO:
9226 	  case GOMP_MAP_ALWAYS_FROM:
9227 	  case GOMP_MAP_ALWAYS_TOFROM:
9228 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
9229 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
9230 	  case GOMP_MAP_STRUCT:
9231 	  case GOMP_MAP_ALWAYS_POINTER:
9232 	    break;
9233 	  case GOMP_MAP_FORCE_ALLOC:
9234 	  case GOMP_MAP_FORCE_TO:
9235 	  case GOMP_MAP_FORCE_FROM:
9236 	  case GOMP_MAP_FORCE_TOFROM:
9237 	  case GOMP_MAP_FORCE_PRESENT:
9238 	  case GOMP_MAP_FORCE_DEVICEPTR:
9239 	  case GOMP_MAP_DEVICE_RESIDENT:
9240 	  case GOMP_MAP_LINK:
9241 	    gcc_assert (is_gimple_omp_oacc (stmt));
9242 	    break;
9243 	  default:
9244 	    gcc_unreachable ();
9245 	  }
9246 #endif
9247 	  /* FALLTHRU */
9248       case OMP_CLAUSE_TO:
9249       case OMP_CLAUSE_FROM:
9250       oacc_firstprivate:
9251 	var = OMP_CLAUSE_DECL (c);
9252 	if (!DECL_P (var))
9253 	  {
9254 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
9255 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9256 		    && (OMP_CLAUSE_MAP_KIND (c)
9257 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
9258 	      map_cnt++;
9259 	    continue;
9260 	  }
9261 
9262 	if (DECL_SIZE (var)
9263 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
9264 	  {
9265 	    tree var2 = DECL_VALUE_EXPR (var);
9266 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
9267 	    var2 = TREE_OPERAND (var2, 0);
9268 	    gcc_assert (DECL_P (var2));
9269 	    var = var2;
9270 	  }
9271 
9272 	if (offloaded
9273 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9274 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9275 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9276 	  {
9277 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9278 	      {
9279 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
9280 		    && varpool_node::get_create (var)->offloadable)
9281 		  continue;
9282 
9283 		tree type = build_pointer_type (TREE_TYPE (var));
9284 		tree new_var = lookup_decl (var, ctx);
9285 		x = create_tmp_var_raw (type, get_name (new_var));
9286 		gimple_add_tmp_var (x);
9287 		x = build_simple_mem_ref (x);
9288 		SET_DECL_VALUE_EXPR (new_var, x);
9289 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9290 	      }
9291 	    continue;
9292 	  }
9293 
9294 	if (!maybe_lookup_field (var, ctx))
9295 	  continue;
9296 
9297 	/* Don't remap oacc parallel reduction variables, because the
9298 	   intermediate result must be local to each gang.  */
9299 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9300 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
9301 	  {
9302 	    x = build_receiver_ref (var, true, ctx);
9303 	    tree new_var = lookup_decl (var, ctx);
9304 
9305 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9306 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9307 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9308 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9309 	      x = build_simple_mem_ref (x);
9310 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9311 	      {
9312 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9313 		if (omp_is_reference (new_var)
9314 		    && TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE)
9315 		  {
9316 		    /* Create a local object to hold the instance
9317 		       value.  */
9318 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
9319 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
9320 		    tree inst = create_tmp_var (type, id);
9321 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
9322 		    x = build_fold_addr_expr (inst);
9323 		  }
9324 		gimplify_assign (new_var, x, &fplist);
9325 	      }
9326 	    else if (DECL_P (new_var))
9327 	      {
9328 		SET_DECL_VALUE_EXPR (new_var, x);
9329 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9330 	      }
9331 	    else
9332 	      gcc_unreachable ();
9333 	  }
9334 	map_cnt++;
9335 	break;
9336 
9337       case OMP_CLAUSE_FIRSTPRIVATE:
9338 	if (is_oacc_parallel (ctx))
9339 	  goto oacc_firstprivate;
9340 	map_cnt++;
9341 	var = OMP_CLAUSE_DECL (c);
9342 	if (!omp_is_reference (var)
9343 	    && !is_gimple_reg_type (TREE_TYPE (var)))
9344 	  {
9345 	    tree new_var = lookup_decl (var, ctx);
9346 	    if (is_variable_sized (var))
9347 	      {
9348 		tree pvar = DECL_VALUE_EXPR (var);
9349 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9350 		pvar = TREE_OPERAND (pvar, 0);
9351 		gcc_assert (DECL_P (pvar));
9352 		tree new_pvar = lookup_decl (pvar, ctx);
9353 		x = build_fold_indirect_ref (new_pvar);
9354 		TREE_THIS_NOTRAP (x) = 1;
9355 	      }
9356 	    else
9357 	      x = build_receiver_ref (var, true, ctx);
9358 	    SET_DECL_VALUE_EXPR (new_var, x);
9359 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9360 	  }
9361 	break;
9362 
9363       case OMP_CLAUSE_PRIVATE:
9364 	if (is_gimple_omp_oacc (ctx->stmt))
9365 	  break;
9366 	var = OMP_CLAUSE_DECL (c);
9367 	if (is_variable_sized (var))
9368 	  {
9369 	    tree new_var = lookup_decl (var, ctx);
9370 	    tree pvar = DECL_VALUE_EXPR (var);
9371 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9372 	    pvar = TREE_OPERAND (pvar, 0);
9373 	    gcc_assert (DECL_P (pvar));
9374 	    tree new_pvar = lookup_decl (pvar, ctx);
9375 	    x = build_fold_indirect_ref (new_pvar);
9376 	    TREE_THIS_NOTRAP (x) = 1;
9377 	    SET_DECL_VALUE_EXPR (new_var, x);
9378 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9379 	  }
9380 	break;
9381 
9382       case OMP_CLAUSE_USE_DEVICE_PTR:
9383       case OMP_CLAUSE_IS_DEVICE_PTR:
9384 	var = OMP_CLAUSE_DECL (c);
9385 	map_cnt++;
9386 	if (is_variable_sized (var))
9387 	  {
9388 	    tree new_var = lookup_decl (var, ctx);
9389 	    tree pvar = DECL_VALUE_EXPR (var);
9390 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9391 	    pvar = TREE_OPERAND (pvar, 0);
9392 	    gcc_assert (DECL_P (pvar));
9393 	    tree new_pvar = lookup_decl (pvar, ctx);
9394 	    x = build_fold_indirect_ref (new_pvar);
9395 	    TREE_THIS_NOTRAP (x) = 1;
9396 	    SET_DECL_VALUE_EXPR (new_var, x);
9397 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9398 	  }
9399 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9400 	  {
9401 	    tree new_var = lookup_decl (var, ctx);
9402 	    tree type = build_pointer_type (TREE_TYPE (var));
9403 	    x = create_tmp_var_raw (type, get_name (new_var));
9404 	    gimple_add_tmp_var (x);
9405 	    x = build_simple_mem_ref (x);
9406 	    SET_DECL_VALUE_EXPR (new_var, x);
9407 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9408 	  }
9409 	else
9410 	  {
9411 	    tree new_var = lookup_decl (var, ctx);
9412 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
9413 	    gimple_add_tmp_var (x);
9414 	    SET_DECL_VALUE_EXPR (new_var, x);
9415 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
9416 	  }
9417 	break;
9418       }
9419 
9420   if (offloaded)
9421     {
9422       target_nesting_level++;
9423       lower_omp (&tgt_body, ctx);
9424       target_nesting_level--;
9425     }
9426   else if (data_region)
9427     lower_omp (&tgt_body, ctx);
9428 
9429   if (offloaded)
9430     {
9431       /* Declare all the variables created by mapping and the variables
9432 	 declared in the scope of the target body.  */
9433       record_vars_into (ctx->block_vars, child_fn);
9434       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
9435       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
9436     }
9437 
9438   olist = NULL;
9439   ilist = NULL;
9440   if (ctx->record_type)
9441     {
9442       ctx->sender_decl
9443 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
9444       DECL_NAMELESS (ctx->sender_decl) = 1;
9445       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
9446       t = make_tree_vec (3);
9447       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
9448       TREE_VEC_ELT (t, 1)
9449 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
9450 			  ".omp_data_sizes");
9451       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
9452       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
9453       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
9454       tree tkind_type = short_unsigned_type_node;
9455       int talign_shift = 8;
9456       TREE_VEC_ELT (t, 2)
9457 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
9458 			  ".omp_data_kinds");
9459       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
9460       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
9461       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
9462       gimple_omp_target_set_data_arg (stmt, t);
9463 
9464       vec<constructor_elt, va_gc> *vsize;
9465       vec<constructor_elt, va_gc> *vkind;
9466       vec_alloc (vsize, map_cnt);
9467       vec_alloc (vkind, map_cnt);
9468       unsigned int map_idx = 0;
9469 
9470       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9471 	switch (OMP_CLAUSE_CODE (c))
9472 	  {
9473 	    tree ovar, nc, s, purpose, var, x, type;
9474 	    unsigned int talign;
9475 
9476 	  default:
9477 	    break;
9478 
9479 	  case OMP_CLAUSE_MAP:
9480 	  case OMP_CLAUSE_TO:
9481 	  case OMP_CLAUSE_FROM:
9482 	  oacc_firstprivate_map:
9483 	    nc = c;
9484 	    ovar = OMP_CLAUSE_DECL (c);
9485 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9486 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9487 		    || (OMP_CLAUSE_MAP_KIND (c)
9488 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
9489 	      break;
9490 	    if (!DECL_P (ovar))
9491 	      {
9492 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9493 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
9494 		  {
9495 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
9496 					 == get_base_address (ovar));
9497 		    nc = OMP_CLAUSE_CHAIN (c);
9498 		    ovar = OMP_CLAUSE_DECL (nc);
9499 		  }
9500 		else
9501 		  {
9502 		    tree x = build_sender_ref (ovar, ctx);
9503 		    tree v
9504 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
9505 		    gimplify_assign (x, v, &ilist);
9506 		    nc = NULL_TREE;
9507 		  }
9508 	      }
9509 	    else
9510 	      {
9511 		if (DECL_SIZE (ovar)
9512 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
9513 		  {
9514 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
9515 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
9516 		    ovar2 = TREE_OPERAND (ovar2, 0);
9517 		    gcc_assert (DECL_P (ovar2));
9518 		    ovar = ovar2;
9519 		  }
9520 		if (!maybe_lookup_field (ovar, ctx))
9521 		  continue;
9522 	      }
9523 
9524 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
9525 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
9526 	      talign = DECL_ALIGN_UNIT (ovar);
9527 	    if (nc)
9528 	      {
9529 		var = lookup_decl_in_outer_ctx (ovar, ctx);
9530 		x = build_sender_ref (ovar, ctx);
9531 
9532 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9533 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
9534 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
9535 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
9536 		  {
9537 		    gcc_assert (offloaded);
9538 		    tree avar
9539 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
9540 		    mark_addressable (avar);
9541 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
9542 		    talign = DECL_ALIGN_UNIT (avar);
9543 		    avar = build_fold_addr_expr (avar);
9544 		    gimplify_assign (x, avar, &ilist);
9545 		  }
9546 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9547 		  {
9548 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
9549 		    if (!omp_is_reference (var))
9550 		      {
9551 			if (is_gimple_reg (var)
9552 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9553 			  TREE_NO_WARNING (var) = 1;
9554 			var = build_fold_addr_expr (var);
9555 		      }
9556 		    else
9557 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9558 		    gimplify_assign (x, var, &ilist);
9559 		  }
9560 		else if (is_gimple_reg (var))
9561 		  {
9562 		    gcc_assert (offloaded);
9563 		    tree avar = create_tmp_var (TREE_TYPE (var));
9564 		    mark_addressable (avar);
9565 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
9566 		    if (GOMP_MAP_COPY_TO_P (map_kind)
9567 			|| map_kind == GOMP_MAP_POINTER
9568 			|| map_kind == GOMP_MAP_TO_PSET
9569 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9570 		      {
9571 			/* If we need to initialize a temporary
9572 			   with VAR because it is not addressable, and
9573 			   the variable hasn't been initialized yet, then
9574 			   we'll get a warning for the store to avar.
9575 			   Don't warn in that case, the mapping might
9576 			   be implicit.  */
9577 			TREE_NO_WARNING (var) = 1;
9578 			gimplify_assign (avar, var, &ilist);
9579 		      }
9580 		    avar = build_fold_addr_expr (avar);
9581 		    gimplify_assign (x, avar, &ilist);
9582 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
9583 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
9584 			&& !TYPE_READONLY (TREE_TYPE (var)))
9585 		      {
9586 			x = unshare_expr (x);
9587 			x = build_simple_mem_ref (x);
9588 			gimplify_assign (var, x, &olist);
9589 		      }
9590 		  }
9591 		else
9592 		  {
9593 		    var = build_fold_addr_expr (var);
9594 		    gimplify_assign (x, var, &ilist);
9595 		  }
9596 	      }
9597 	    s = NULL_TREE;
9598 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
9599 	      {
9600 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9601 		s = TREE_TYPE (ovar);
9602 		if (TREE_CODE (s) == REFERENCE_TYPE)
9603 		  s = TREE_TYPE (s);
9604 		s = TYPE_SIZE_UNIT (s);
9605 	      }
9606 	    else
9607 	      s = OMP_CLAUSE_SIZE (c);
9608 	    if (s == NULL_TREE)
9609 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9610 	    s = fold_convert (size_type_node, s);
9611 	    purpose = size_int (map_idx++);
9612 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9613 	    if (TREE_CODE (s) != INTEGER_CST)
9614 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9615 
9616 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
9617 	    switch (OMP_CLAUSE_CODE (c))
9618 	      {
9619 	      case OMP_CLAUSE_MAP:
9620 		tkind = OMP_CLAUSE_MAP_KIND (c);
9621 		tkind_zero = tkind;
9622 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
9623 		  switch (tkind)
9624 		    {
9625 		    case GOMP_MAP_ALLOC:
9626 		    case GOMP_MAP_TO:
9627 		    case GOMP_MAP_FROM:
9628 		    case GOMP_MAP_TOFROM:
9629 		    case GOMP_MAP_ALWAYS_TO:
9630 		    case GOMP_MAP_ALWAYS_FROM:
9631 		    case GOMP_MAP_ALWAYS_TOFROM:
9632 		    case GOMP_MAP_RELEASE:
9633 		    case GOMP_MAP_FORCE_TO:
9634 		    case GOMP_MAP_FORCE_FROM:
9635 		    case GOMP_MAP_FORCE_TOFROM:
9636 		    case GOMP_MAP_FORCE_PRESENT:
9637 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
9638 		      break;
9639 		    case GOMP_MAP_DELETE:
9640 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
9641 		    default:
9642 		      break;
9643 		    }
9644 		if (tkind_zero != tkind)
9645 		  {
9646 		    if (integer_zerop (s))
9647 		      tkind = tkind_zero;
9648 		    else if (integer_nonzerop (s))
9649 		      tkind_zero = tkind;
9650 		  }
9651 		break;
9652 	      case OMP_CLAUSE_FIRSTPRIVATE:
9653 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
9654 		tkind = GOMP_MAP_TO;
9655 		tkind_zero = tkind;
9656 		break;
9657 	      case OMP_CLAUSE_TO:
9658 		tkind = GOMP_MAP_TO;
9659 		tkind_zero = tkind;
9660 		break;
9661 	      case OMP_CLAUSE_FROM:
9662 		tkind = GOMP_MAP_FROM;
9663 		tkind_zero = tkind;
9664 		break;
9665 	      default:
9666 		gcc_unreachable ();
9667 	      }
9668 	    gcc_checking_assert (tkind
9669 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9670 	    gcc_checking_assert (tkind_zero
9671 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9672 	    talign = ceil_log2 (talign);
9673 	    tkind |= talign << talign_shift;
9674 	    tkind_zero |= talign << talign_shift;
9675 	    gcc_checking_assert (tkind
9676 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9677 	    gcc_checking_assert (tkind_zero
9678 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9679 	    if (tkind == tkind_zero)
9680 	      x = build_int_cstu (tkind_type, tkind);
9681 	    else
9682 	      {
9683 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
9684 		x = build3 (COND_EXPR, tkind_type,
9685 			    fold_build2 (EQ_EXPR, boolean_type_node,
9686 					 unshare_expr (s), size_zero_node),
9687 			    build_int_cstu (tkind_type, tkind_zero),
9688 			    build_int_cstu (tkind_type, tkind));
9689 	      }
9690 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
9691 	    if (nc && nc != c)
9692 	      c = nc;
9693 	    break;
9694 
9695 	  case OMP_CLAUSE_FIRSTPRIVATE:
9696 	    if (is_oacc_parallel (ctx))
9697 	      goto oacc_firstprivate_map;
9698 	    ovar = OMP_CLAUSE_DECL (c);
9699 	    if (omp_is_reference (ovar))
9700 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9701 	    else
9702 	      talign = DECL_ALIGN_UNIT (ovar);
9703 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
9704 	    x = build_sender_ref (ovar, ctx);
9705 	    tkind = GOMP_MAP_FIRSTPRIVATE;
9706 	    type = TREE_TYPE (ovar);
9707 	    if (omp_is_reference (ovar))
9708 	      type = TREE_TYPE (type);
9709 	    if ((INTEGRAL_TYPE_P (type)
9710 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
9711 		|| TREE_CODE (type) == POINTER_TYPE)
9712 	      {
9713 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9714 		tree t = var;
9715 		if (omp_is_reference (var))
9716 		  t = build_simple_mem_ref (var);
9717 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9718 		  TREE_NO_WARNING (var) = 1;
9719 		if (TREE_CODE (type) != POINTER_TYPE)
9720 		  t = fold_convert (pointer_sized_int_node, t);
9721 		t = fold_convert (TREE_TYPE (x), t);
9722 		gimplify_assign (x, t, &ilist);
9723 	      }
9724 	    else if (omp_is_reference (var))
9725 	      gimplify_assign (x, var, &ilist);
9726 	    else if (is_gimple_reg (var))
9727 	      {
9728 		tree avar = create_tmp_var (TREE_TYPE (var));
9729 		mark_addressable (avar);
9730 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
9731 		  TREE_NO_WARNING (var) = 1;
9732 		gimplify_assign (avar, var, &ilist);
9733 		avar = build_fold_addr_expr (avar);
9734 		gimplify_assign (x, avar, &ilist);
9735 	      }
9736 	    else
9737 	      {
9738 		var = build_fold_addr_expr (var);
9739 		gimplify_assign (x, var, &ilist);
9740 	      }
9741 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
9742 	      s = size_int (0);
9743 	    else if (omp_is_reference (ovar))
9744 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
9745 	    else
9746 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
9747 	    s = fold_convert (size_type_node, s);
9748 	    purpose = size_int (map_idx++);
9749 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9750 	    if (TREE_CODE (s) != INTEGER_CST)
9751 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
9752 
9753 	    gcc_checking_assert (tkind
9754 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9755 	    talign = ceil_log2 (talign);
9756 	    tkind |= talign << talign_shift;
9757 	    gcc_checking_assert (tkind
9758 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9759 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9760 				    build_int_cstu (tkind_type, tkind));
9761 	    break;
9762 
9763 	  case OMP_CLAUSE_USE_DEVICE_PTR:
9764 	  case OMP_CLAUSE_IS_DEVICE_PTR:
9765 	    ovar = OMP_CLAUSE_DECL (c);
9766 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
9767 	    x = build_sender_ref (ovar, ctx);
9768 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9769 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
9770 	    else
9771 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
9772 	    type = TREE_TYPE (ovar);
9773 	    if (TREE_CODE (type) == ARRAY_TYPE)
9774 	      var = build_fold_addr_expr (var);
9775 	    else
9776 	      {
9777 		if (omp_is_reference (ovar))
9778 		  {
9779 		    type = TREE_TYPE (type);
9780 		    if (TREE_CODE (type) != ARRAY_TYPE)
9781 		      var = build_simple_mem_ref (var);
9782 		    var = fold_convert (TREE_TYPE (x), var);
9783 		  }
9784 	      }
9785 	    gimplify_assign (x, var, &ilist);
9786 	    s = size_int (0);
9787 	    purpose = size_int (map_idx++);
9788 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
9789 	    gcc_checking_assert (tkind
9790 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
9791 	    gcc_checking_assert (tkind
9792 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
9793 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
9794 				    build_int_cstu (tkind_type, tkind));
9795 	    break;
9796 	  }
9797 
9798       gcc_assert (map_idx == map_cnt);
9799 
9800       DECL_INITIAL (TREE_VEC_ELT (t, 1))
9801 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
9802       DECL_INITIAL (TREE_VEC_ELT (t, 2))
9803 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
9804       for (int i = 1; i <= 2; i++)
9805 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
9806 	  {
9807 	    gimple_seq initlist = NULL;
9808 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
9809 					  TREE_VEC_ELT (t, i)),
9810 				  &initlist, true, NULL_TREE);
9811 	    gimple_seq_add_seq (&ilist, initlist);
9812 
9813 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
9814 					      NULL);
9815 	    TREE_THIS_VOLATILE (clobber) = 1;
9816 	    gimple_seq_add_stmt (&olist,
9817 				 gimple_build_assign (TREE_VEC_ELT (t, i),
9818 						      clobber));
9819 	  }
9820 
9821       tree clobber = build_constructor (ctx->record_type, NULL);
9822       TREE_THIS_VOLATILE (clobber) = 1;
9823       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
9824 							clobber));
9825     }
9826 
9827   /* Once all the expansions are done, sequence all the different
9828      fragments inside gimple_omp_body.  */
9829 
9830   new_body = NULL;
9831 
9832   if (offloaded
9833       && ctx->record_type)
9834     {
9835       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
9836       /* fixup_child_record_type might have changed receiver_decl's type.  */
9837       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
9838       gimple_seq_add_stmt (&new_body,
9839 	  		   gimple_build_assign (ctx->receiver_decl, t));
9840     }
9841   gimple_seq_add_seq (&new_body, fplist);
9842 
9843   if (offloaded || data_region)
9844     {
9845       tree prev = NULL_TREE;
9846       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
9847 	switch (OMP_CLAUSE_CODE (c))
9848 	  {
9849 	    tree var, x;
9850 	  default:
9851 	    break;
9852 	  case OMP_CLAUSE_FIRSTPRIVATE:
9853 	    if (is_gimple_omp_oacc (ctx->stmt))
9854 	      break;
9855 	    var = OMP_CLAUSE_DECL (c);
9856 	    if (omp_is_reference (var)
9857 		|| is_gimple_reg_type (TREE_TYPE (var)))
9858 	      {
9859 		tree new_var = lookup_decl (var, ctx);
9860 		tree type;
9861 		type = TREE_TYPE (var);
9862 		if (omp_is_reference (var))
9863 		  type = TREE_TYPE (type);
9864 		if ((INTEGRAL_TYPE_P (type)
9865 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
9866 		    || TREE_CODE (type) == POINTER_TYPE)
9867 		  {
9868 		    x = build_receiver_ref (var, false, ctx);
9869 		    if (TREE_CODE (type) != POINTER_TYPE)
9870 		      x = fold_convert (pointer_sized_int_node, x);
9871 		    x = fold_convert (type, x);
9872 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9873 				   fb_rvalue);
9874 		    if (omp_is_reference (var))
9875 		      {
9876 			tree v = create_tmp_var_raw (type, get_name (var));
9877 			gimple_add_tmp_var (v);
9878 			TREE_ADDRESSABLE (v) = 1;
9879 			gimple_seq_add_stmt (&new_body,
9880 					     gimple_build_assign (v, x));
9881 			x = build_fold_addr_expr (v);
9882 		      }
9883 		    gimple_seq_add_stmt (&new_body,
9884 					 gimple_build_assign (new_var, x));
9885 		  }
9886 		else
9887 		  {
9888 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
9889 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9890 				   fb_rvalue);
9891 		    gimple_seq_add_stmt (&new_body,
9892 					 gimple_build_assign (new_var, x));
9893 		  }
9894 	      }
9895 	    else if (is_variable_sized (var))
9896 	      {
9897 		tree pvar = DECL_VALUE_EXPR (var);
9898 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9899 		pvar = TREE_OPERAND (pvar, 0);
9900 		gcc_assert (DECL_P (pvar));
9901 		tree new_var = lookup_decl (pvar, ctx);
9902 		x = build_receiver_ref (var, false, ctx);
9903 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9904 		gimple_seq_add_stmt (&new_body,
9905 				     gimple_build_assign (new_var, x));
9906 	      }
9907 	    break;
9908 	  case OMP_CLAUSE_PRIVATE:
9909 	    if (is_gimple_omp_oacc (ctx->stmt))
9910 	      break;
9911 	    var = OMP_CLAUSE_DECL (c);
9912 	    if (omp_is_reference (var))
9913 	      {
9914 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9915 		tree new_var = lookup_decl (var, ctx);
9916 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
9917 		if (TREE_CONSTANT (x))
9918 		  {
9919 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
9920 					    get_name (var));
9921 		    gimple_add_tmp_var (x);
9922 		    TREE_ADDRESSABLE (x) = 1;
9923 		    x = build_fold_addr_expr_loc (clause_loc, x);
9924 		  }
9925 		else
9926 		  break;
9927 
9928 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
9929 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9930 		gimple_seq_add_stmt (&new_body,
9931 				     gimple_build_assign (new_var, x));
9932 	      }
9933 	    break;
9934 	  case OMP_CLAUSE_USE_DEVICE_PTR:
9935 	  case OMP_CLAUSE_IS_DEVICE_PTR:
9936 	    var = OMP_CLAUSE_DECL (c);
9937 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
9938 	      x = build_sender_ref (var, ctx);
9939 	    else
9940 	      x = build_receiver_ref (var, false, ctx);
9941 	    if (is_variable_sized (var))
9942 	      {
9943 		tree pvar = DECL_VALUE_EXPR (var);
9944 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
9945 		pvar = TREE_OPERAND (pvar, 0);
9946 		gcc_assert (DECL_P (pvar));
9947 		tree new_var = lookup_decl (pvar, ctx);
9948 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9949 		gimple_seq_add_stmt (&new_body,
9950 				     gimple_build_assign (new_var, x));
9951 	      }
9952 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
9953 	      {
9954 		tree new_var = lookup_decl (var, ctx);
9955 		new_var = DECL_VALUE_EXPR (new_var);
9956 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
9957 		new_var = TREE_OPERAND (new_var, 0);
9958 		gcc_assert (DECL_P (new_var));
9959 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9960 		gimple_seq_add_stmt (&new_body,
9961 				     gimple_build_assign (new_var, x));
9962 	      }
9963 	    else
9964 	      {
9965 		tree type = TREE_TYPE (var);
9966 		tree new_var = lookup_decl (var, ctx);
9967 		if (omp_is_reference (var))
9968 		  {
9969 		    type = TREE_TYPE (type);
9970 		    if (TREE_CODE (type) != ARRAY_TYPE)
9971 		      {
9972 			tree v = create_tmp_var_raw (type, get_name (var));
9973 			gimple_add_tmp_var (v);
9974 			TREE_ADDRESSABLE (v) = 1;
9975 			x = fold_convert (type, x);
9976 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
9977 				       fb_rvalue);
9978 			gimple_seq_add_stmt (&new_body,
9979 					     gimple_build_assign (v, x));
9980 			x = build_fold_addr_expr (v);
9981 		      }
9982 		  }
9983 		new_var = DECL_VALUE_EXPR (new_var);
9984 		x = fold_convert (TREE_TYPE (new_var), x);
9985 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
9986 		gimple_seq_add_stmt (&new_body,
9987 				     gimple_build_assign (new_var, x));
9988 	      }
9989 	    break;
9990 	  }
9991       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
9992 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
9993 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
9994 	 or references to VLAs.  */
9995       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
9996 	switch (OMP_CLAUSE_CODE (c))
9997 	  {
9998 	    tree var;
9999 	  default:
10000 	    break;
10001 	  case OMP_CLAUSE_MAP:
10002 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
10003 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10004 	      {
10005 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10006 		poly_int64 offset = 0;
10007 		gcc_assert (prev);
10008 		var = OMP_CLAUSE_DECL (c);
10009 		if (DECL_P (var)
10010 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
10011 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
10012 								      ctx))
10013 		    && varpool_node::get_create (var)->offloadable)
10014 		  break;
10015 		if (TREE_CODE (var) == INDIRECT_REF
10016 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
10017 		  var = TREE_OPERAND (var, 0);
10018 		if (TREE_CODE (var) == COMPONENT_REF)
10019 		  {
10020 		    var = get_addr_base_and_unit_offset (var, &offset);
10021 		    gcc_assert (var != NULL_TREE && DECL_P (var));
10022 		  }
10023 		else if (DECL_SIZE (var)
10024 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
10025 		  {
10026 		    tree var2 = DECL_VALUE_EXPR (var);
10027 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
10028 		    var2 = TREE_OPERAND (var2, 0);
10029 		    gcc_assert (DECL_P (var2));
10030 		    var = var2;
10031 		  }
10032 		tree new_var = lookup_decl (var, ctx), x;
10033 		tree type = TREE_TYPE (new_var);
10034 		bool is_ref;
10035 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
10036 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
10037 			== COMPONENT_REF))
10038 		  {
10039 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
10040 		    is_ref = true;
10041 		    new_var = build2 (MEM_REF, type,
10042 				      build_fold_addr_expr (new_var),
10043 				      build_int_cst (build_pointer_type (type),
10044 						     offset));
10045 		  }
10046 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
10047 		  {
10048 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
10049 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
10050 		    new_var = build2 (MEM_REF, type,
10051 				      build_fold_addr_expr (new_var),
10052 				      build_int_cst (build_pointer_type (type),
10053 						     offset));
10054 		  }
10055 		else
10056 		  is_ref = omp_is_reference (var);
10057 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
10058 		  is_ref = false;
10059 		bool ref_to_array = false;
10060 		if (is_ref)
10061 		  {
10062 		    type = TREE_TYPE (type);
10063 		    if (TREE_CODE (type) == ARRAY_TYPE)
10064 		      {
10065 			type = build_pointer_type (type);
10066 			ref_to_array = true;
10067 		      }
10068 		  }
10069 		else if (TREE_CODE (type) == ARRAY_TYPE)
10070 		  {
10071 		    tree decl2 = DECL_VALUE_EXPR (new_var);
10072 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
10073 		    decl2 = TREE_OPERAND (decl2, 0);
10074 		    gcc_assert (DECL_P (decl2));
10075 		    new_var = decl2;
10076 		    type = TREE_TYPE (new_var);
10077 		  }
10078 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
10079 		x = fold_convert_loc (clause_loc, type, x);
10080 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
10081 		  {
10082 		    tree bias = OMP_CLAUSE_SIZE (c);
10083 		    if (DECL_P (bias))
10084 		      bias = lookup_decl (bias, ctx);
10085 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
10086 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
10087 					    bias);
10088 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
10089 					 TREE_TYPE (x), x, bias);
10090 		  }
10091 		if (ref_to_array)
10092 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10093 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10094 		if (is_ref && !ref_to_array)
10095 		  {
10096 		    tree t = create_tmp_var_raw (type, get_name (var));
10097 		    gimple_add_tmp_var (t);
10098 		    TREE_ADDRESSABLE (t) = 1;
10099 		    gimple_seq_add_stmt (&new_body,
10100 					 gimple_build_assign (t, x));
10101 		    x = build_fold_addr_expr_loc (clause_loc, t);
10102 		  }
10103 		gimple_seq_add_stmt (&new_body,
10104 				     gimple_build_assign (new_var, x));
10105 		prev = NULL_TREE;
10106 	      }
10107 	    else if (OMP_CLAUSE_CHAIN (c)
10108 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
10109 			== OMP_CLAUSE_MAP
10110 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10111 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
10112 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
10113 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
10114 	      prev = c;
10115 	    break;
10116 	  case OMP_CLAUSE_PRIVATE:
10117 	    var = OMP_CLAUSE_DECL (c);
10118 	    if (is_variable_sized (var))
10119 	      {
10120 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10121 		tree new_var = lookup_decl (var, ctx);
10122 		tree pvar = DECL_VALUE_EXPR (var);
10123 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
10124 		pvar = TREE_OPERAND (pvar, 0);
10125 		gcc_assert (DECL_P (pvar));
10126 		tree new_pvar = lookup_decl (pvar, ctx);
10127 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10128 		tree al = size_int (DECL_ALIGN (var));
10129 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
10130 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10131 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
10132 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10133 		gimple_seq_add_stmt (&new_body,
10134 				     gimple_build_assign (new_pvar, x));
10135 	      }
10136 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
10137 	      {
10138 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10139 		tree new_var = lookup_decl (var, ctx);
10140 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
10141 		if (TREE_CONSTANT (x))
10142 		  break;
10143 		else
10144 		  {
10145 		    tree atmp
10146 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
10147 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
10148 		    tree al = size_int (TYPE_ALIGN (rtype));
10149 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
10150 		  }
10151 
10152 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
10153 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
10154 		gimple_seq_add_stmt (&new_body,
10155 				     gimple_build_assign (new_var, x));
10156 	      }
10157 	    break;
10158 	  }
10159 
10160       gimple_seq fork_seq = NULL;
10161       gimple_seq join_seq = NULL;
10162 
10163       if (is_oacc_parallel (ctx))
10164 	{
10165 	  /* If there are reductions on the offloaded region itself, treat
10166 	     them as a dummy GANG loop.  */
10167 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
10168 
10169 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
10170 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
10171 	}
10172 
10173       gimple_seq_add_seq (&new_body, fork_seq);
10174       gimple_seq_add_seq (&new_body, tgt_body);
10175       gimple_seq_add_seq (&new_body, join_seq);
10176 
10177       if (offloaded)
10178 	new_body = maybe_catch_exception (new_body);
10179 
10180       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
10181       gimple_omp_set_body (stmt, new_body);
10182     }
10183 
10184   bind = gimple_build_bind (NULL, NULL,
10185 			    tgt_bind ? gimple_bind_block (tgt_bind)
10186 				     : NULL_TREE);
10187   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
10188   gimple_bind_add_seq (bind, ilist);
10189   gimple_bind_add_stmt (bind, stmt);
10190   gimple_bind_add_seq (bind, olist);
10191 
10192   pop_gimplify_context (NULL);
10193 
10194   if (dep_bind)
10195     {
10196       gimple_bind_add_seq (dep_bind, dep_ilist);
10197       gimple_bind_add_stmt (dep_bind, bind);
10198       gimple_bind_add_seq (dep_bind, dep_olist);
10199       pop_gimplify_context (dep_bind);
10200     }
10201 }
10202 
10203 /* Expand code for an OpenMP teams directive.  */
10204 
10205 static void
10206 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10207 {
10208   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
10209   push_gimplify_context ();
10210 
10211   tree block = make_node (BLOCK);
10212   gbind *bind = gimple_build_bind (NULL, NULL, block);
10213   gsi_replace (gsi_p, bind, true);
10214   gimple_seq bind_body = NULL;
10215   gimple_seq dlist = NULL;
10216   gimple_seq olist = NULL;
10217 
10218   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10219 				    OMP_CLAUSE_NUM_TEAMS);
10220   if (num_teams == NULL_TREE)
10221     num_teams = build_int_cst (unsigned_type_node, 0);
10222   else
10223     {
10224       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
10225       num_teams = fold_convert (unsigned_type_node, num_teams);
10226       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
10227     }
10228   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
10229 				       OMP_CLAUSE_THREAD_LIMIT);
10230   if (thread_limit == NULL_TREE)
10231     thread_limit = build_int_cst (unsigned_type_node, 0);
10232   else
10233     {
10234       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
10235       thread_limit = fold_convert (unsigned_type_node, thread_limit);
10236       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
10237 		     fb_rvalue);
10238     }
10239 
10240   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
10241 			   &bind_body, &dlist, ctx, NULL);
10242   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
10243   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
10244   if (!gimple_omp_teams_grid_phony (teams_stmt))
10245     {
10246       gimple_seq_add_stmt (&bind_body, teams_stmt);
10247       location_t loc = gimple_location (teams_stmt);
10248       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
10249       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
10250       gimple_set_location (call, loc);
10251       gimple_seq_add_stmt (&bind_body, call);
10252     }
10253 
10254   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
10255   gimple_omp_set_body (teams_stmt, NULL);
10256   gimple_seq_add_seq (&bind_body, olist);
10257   gimple_seq_add_seq (&bind_body, dlist);
10258   if (!gimple_omp_teams_grid_phony (teams_stmt))
10259     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
10260   gimple_bind_set_body (bind, bind_body);
10261 
10262   pop_gimplify_context (bind);
10263 
10264   gimple_bind_append_vars (bind, ctx->block_vars);
10265   BLOCK_VARS (block) = ctx->block_vars;
10266   if (BLOCK_VARS (block))
10267     TREE_USED (block) = 1;
10268 }
10269 
10270 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
10271 
10272 static void
10273 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10274 {
10275   gimple *stmt = gsi_stmt (*gsi_p);
10276   lower_omp (gimple_omp_body_ptr (stmt), ctx);
10277   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
10278 		       gimple_build_omp_return (false));
10279 }
10280 
10281 
10282 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
10283    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
10284    of OMP context, but with task_shared_vars set.  */
10285 
10286 static tree
10287 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
10288     			void *data)
10289 {
10290   tree t = *tp;
10291 
10292   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
10293   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
10294     return t;
10295 
10296   if (task_shared_vars
10297       && DECL_P (t)
10298       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
10299     return t;
10300 
10301   /* If a global variable has been privatized, TREE_CONSTANT on
10302      ADDR_EXPR might be wrong.  */
10303   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
10304     recompute_tree_invariant_for_addr_expr (t);
10305 
10306   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
10307   return NULL_TREE;
10308 }
10309 
10310 /* Data to be communicated between lower_omp_regimplify_operands and
10311    lower_omp_regimplify_operands_p.  */
10312 
10313 struct lower_omp_regimplify_operands_data
10314 {
10315   omp_context *ctx;
10316   vec<tree> *decls;
10317 };
10318 
10319 /* Helper function for lower_omp_regimplify_operands.  Find
10320    omp_member_access_dummy_var vars and adjust temporarily their
10321    DECL_VALUE_EXPRs if needed.  */
10322 
10323 static tree
10324 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
10325 				 void *data)
10326 {
10327   tree t = omp_member_access_dummy_var (*tp);
10328   if (t)
10329     {
10330       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
10331       lower_omp_regimplify_operands_data *ldata
10332 	= (lower_omp_regimplify_operands_data *) wi->info;
10333       tree o = maybe_lookup_decl (t, ldata->ctx);
10334       if (o != t)
10335 	{
10336 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
10337 	  ldata->decls->safe_push (*tp);
10338 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
10339 	  SET_DECL_VALUE_EXPR (*tp, v);
10340 	}
10341     }
10342   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
10343   return NULL_TREE;
10344 }
10345 
10346 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
10347    of omp_member_access_dummy_var vars during regimplification.  */
10348 
10349 static void
10350 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
10351 			       gimple_stmt_iterator *gsi_p)
10352 {
10353   auto_vec<tree, 10> decls;
10354   if (ctx)
10355     {
10356       struct walk_stmt_info wi;
10357       memset (&wi, '\0', sizeof (wi));
10358       struct lower_omp_regimplify_operands_data data;
10359       data.ctx = ctx;
10360       data.decls = &decls;
10361       wi.info = &data;
10362       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
10363     }
10364   gimple_regimplify_operands (stmt, gsi_p);
10365   while (!decls.is_empty ())
10366     {
10367       tree t = decls.pop ();
10368       tree v = decls.pop ();
10369       SET_DECL_VALUE_EXPR (t, v);
10370     }
10371 }
10372 
10373 static void
10374 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10375 {
10376   gimple *stmt = gsi_stmt (*gsi_p);
10377   struct walk_stmt_info wi;
10378   gcall *call_stmt;
10379 
10380   if (gimple_has_location (stmt))
10381     input_location = gimple_location (stmt);
10382 
10383   if (task_shared_vars)
10384     memset (&wi, '\0', sizeof (wi));
10385 
10386   /* If we have issued syntax errors, avoid doing any heavy lifting.
10387      Just replace the OMP directives with a NOP to avoid
10388      confusing RTL expansion.  */
10389   if (seen_error () && is_gimple_omp (stmt))
10390     {
10391       gsi_replace (gsi_p, gimple_build_nop (), true);
10392       return;
10393     }
10394 
10395   switch (gimple_code (stmt))
10396     {
10397     case GIMPLE_COND:
10398       {
10399 	gcond *cond_stmt = as_a <gcond *> (stmt);
10400 	if ((ctx || task_shared_vars)
10401 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
10402 			   lower_omp_regimplify_p,
10403 			   ctx ? NULL : &wi, NULL)
10404 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
10405 			      lower_omp_regimplify_p,
10406 			      ctx ? NULL : &wi, NULL)))
10407 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
10408       }
10409       break;
10410     case GIMPLE_CATCH:
10411       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
10412       break;
10413     case GIMPLE_EH_FILTER:
10414       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
10415       break;
10416     case GIMPLE_TRY:
10417       lower_omp (gimple_try_eval_ptr (stmt), ctx);
10418       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
10419       break;
10420     case GIMPLE_TRANSACTION:
10421       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
10422 		 ctx);
10423       break;
10424     case GIMPLE_BIND:
10425       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
10426       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
10427       break;
10428     case GIMPLE_OMP_PARALLEL:
10429     case GIMPLE_OMP_TASK:
10430       ctx = maybe_lookup_ctx (stmt);
10431       gcc_assert (ctx);
10432       if (ctx->cancellable)
10433 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10434       lower_omp_taskreg (gsi_p, ctx);
10435       break;
10436     case GIMPLE_OMP_FOR:
10437       ctx = maybe_lookup_ctx (stmt);
10438       gcc_assert (ctx);
10439       if (ctx->cancellable)
10440 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10441       lower_omp_for (gsi_p, ctx);
10442       break;
10443     case GIMPLE_OMP_SECTIONS:
10444       ctx = maybe_lookup_ctx (stmt);
10445       gcc_assert (ctx);
10446       if (ctx->cancellable)
10447 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
10448       lower_omp_sections (gsi_p, ctx);
10449       break;
10450     case GIMPLE_OMP_SINGLE:
10451       ctx = maybe_lookup_ctx (stmt);
10452       gcc_assert (ctx);
10453       lower_omp_single (gsi_p, ctx);
10454       break;
10455     case GIMPLE_OMP_MASTER:
10456       ctx = maybe_lookup_ctx (stmt);
10457       gcc_assert (ctx);
10458       lower_omp_master (gsi_p, ctx);
10459       break;
10460     case GIMPLE_OMP_TASKGROUP:
10461       ctx = maybe_lookup_ctx (stmt);
10462       gcc_assert (ctx);
10463       lower_omp_taskgroup (gsi_p, ctx);
10464       break;
10465     case GIMPLE_OMP_ORDERED:
10466       ctx = maybe_lookup_ctx (stmt);
10467       gcc_assert (ctx);
10468       lower_omp_ordered (gsi_p, ctx);
10469       break;
10470     case GIMPLE_OMP_CRITICAL:
10471       ctx = maybe_lookup_ctx (stmt);
10472       gcc_assert (ctx);
10473       lower_omp_critical (gsi_p, ctx);
10474       break;
10475     case GIMPLE_OMP_ATOMIC_LOAD:
10476       if ((ctx || task_shared_vars)
10477 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
10478 			  as_a <gomp_atomic_load *> (stmt)),
10479 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
10480 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10481       break;
10482     case GIMPLE_OMP_TARGET:
10483       ctx = maybe_lookup_ctx (stmt);
10484       gcc_assert (ctx);
10485       lower_omp_target (gsi_p, ctx);
10486       break;
10487     case GIMPLE_OMP_TEAMS:
10488       ctx = maybe_lookup_ctx (stmt);
10489       gcc_assert (ctx);
10490       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
10491 	lower_omp_taskreg (gsi_p, ctx);
10492       else
10493 	lower_omp_teams (gsi_p, ctx);
10494       break;
10495     case GIMPLE_OMP_GRID_BODY:
10496       ctx = maybe_lookup_ctx (stmt);
10497       gcc_assert (ctx);
10498       lower_omp_grid_body (gsi_p, ctx);
10499       break;
10500     case GIMPLE_CALL:
10501       tree fndecl;
10502       call_stmt = as_a <gcall *> (stmt);
10503       fndecl = gimple_call_fndecl (call_stmt);
10504       if (fndecl
10505 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10506 	switch (DECL_FUNCTION_CODE (fndecl))
10507 	  {
10508 	  case BUILT_IN_GOMP_BARRIER:
10509 	    if (ctx == NULL)
10510 	      break;
10511 	    /* FALLTHRU */
10512 	  case BUILT_IN_GOMP_CANCEL:
10513 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
10514 	    omp_context *cctx;
10515 	    cctx = ctx;
10516 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
10517 	      cctx = cctx->outer;
10518 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
10519 	    if (!cctx->cancellable)
10520 	      {
10521 		if (DECL_FUNCTION_CODE (fndecl)
10522 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
10523 		  {
10524 		    stmt = gimple_build_nop ();
10525 		    gsi_replace (gsi_p, stmt, false);
10526 		  }
10527 		break;
10528 	      }
10529 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
10530 	      {
10531 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
10532 		gimple_call_set_fndecl (call_stmt, fndecl);
10533 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
10534 	      }
10535 	    tree lhs;
10536 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
10537 	    gimple_call_set_lhs (call_stmt, lhs);
10538 	    tree fallthru_label;
10539 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
10540 	    gimple *g;
10541 	    g = gimple_build_label (fallthru_label);
10542 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10543 	    g = gimple_build_cond (NE_EXPR, lhs,
10544 				   fold_convert (TREE_TYPE (lhs),
10545 						 boolean_false_node),
10546 				   cctx->cancel_label, fallthru_label);
10547 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
10548 	    break;
10549 	  default:
10550 	    break;
10551 	  }
10552       /* FALLTHRU */
10553     default:
10554       if ((ctx || task_shared_vars)
10555 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
10556 			     ctx ? NULL : &wi))
10557 	{
10558 	  /* Just remove clobbers, this should happen only if we have
10559 	     "privatized" local addressable variables in SIMD regions,
10560 	     the clobber isn't needed in that case and gimplifying address
10561 	     of the ARRAY_REF into a pointer and creating MEM_REF based
10562 	     clobber would create worse code than we get with the clobber
10563 	     dropped.  */
10564 	  if (gimple_clobber_p (stmt))
10565 	    {
10566 	      gsi_replace (gsi_p, gimple_build_nop (), true);
10567 	      break;
10568 	    }
10569 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
10570 	}
10571       break;
10572     }
10573 }
10574 
10575 static void
10576 lower_omp (gimple_seq *body, omp_context *ctx)
10577 {
10578   location_t saved_location = input_location;
10579   gimple_stmt_iterator gsi;
10580   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10581     lower_omp_1 (&gsi, ctx);
10582   /* During gimplification, we haven't folded statments inside offloading
10583      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
10584   if (target_nesting_level || taskreg_nesting_level)
10585     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
10586       fold_stmt (&gsi);
10587   input_location = saved_location;
10588 }
10589 
10590 /* Main entry point.  */
10591 
10592 static unsigned int
10593 execute_lower_omp (void)
10594 {
10595   gimple_seq body;
10596   int i;
10597   omp_context *ctx;
10598 
10599   /* This pass always runs, to provide PROP_gimple_lomp.
10600      But often, there is nothing to do.  */
10601   if (flag_openacc == 0 && flag_openmp == 0
10602       && flag_openmp_simd == 0)
10603     return 0;
10604 
10605   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
10606 				 delete_omp_context);
10607 
10608   body = gimple_body (current_function_decl);
10609 
10610   if (hsa_gen_requested_p ())
10611     omp_grid_gridify_all_targets (&body);
10612 
10613   scan_omp (&body, NULL);
10614   gcc_assert (taskreg_nesting_level == 0);
10615   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
10616     finish_taskreg_scan (ctx);
10617   taskreg_contexts.release ();
10618 
10619   if (all_contexts->root)
10620     {
10621       if (task_shared_vars)
10622 	push_gimplify_context ();
10623       lower_omp (&body, NULL);
10624       if (task_shared_vars)
10625 	pop_gimplify_context (NULL);
10626     }
10627 
10628   if (all_contexts)
10629     {
10630       splay_tree_delete (all_contexts);
10631       all_contexts = NULL;
10632     }
10633   BITMAP_FREE (task_shared_vars);
10634   BITMAP_FREE (global_nonaddressable_vars);
10635 
10636   /* If current function is a method, remove artificial dummy VAR_DECL created
10637      for non-static data member privatization, they aren't needed for
10638      debuginfo nor anything else, have been already replaced everywhere in the
10639      IL and cause problems with LTO.  */
10640   if (DECL_ARGUMENTS (current_function_decl)
10641       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
10642       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
10643 	  == POINTER_TYPE))
10644     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
10645   return 0;
10646 }
10647 
10648 namespace {
10649 
10650 const pass_data pass_data_lower_omp =
10651 {
10652   GIMPLE_PASS, /* type */
10653   "omplower", /* name */
10654   OPTGROUP_OMP, /* optinfo_flags */
10655   TV_NONE, /* tv_id */
10656   PROP_gimple_any, /* properties_required */
10657   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
10658   0, /* properties_destroyed */
10659   0, /* todo_flags_start */
10660   0, /* todo_flags_finish */
10661 };
10662 
10663 class pass_lower_omp : public gimple_opt_pass
10664 {
10665 public:
10666   pass_lower_omp (gcc::context *ctxt)
10667     : gimple_opt_pass (pass_data_lower_omp, ctxt)
10668   {}
10669 
10670   /* opt_pass methods: */
10671   virtual unsigned int execute (function *) { return execute_lower_omp (); }
10672 
10673 }; // class pass_lower_omp
10674 
10675 } // anon namespace
10676 
10677 gimple_opt_pass *
10678 make_pass_lower_omp (gcc::context *ctxt)
10679 {
10680   return new pass_lower_omp (ctxt);
10681 }
10682 
10683 /* The following is a utility to diagnose structured block violations.
10684    It is not part of the "omplower" pass, as that's invoked too late.  It
10685    should be invoked by the respective front ends after gimplification.  */
10686 
10687 static splay_tree all_labels;
10688 
10689 /* Check for mismatched contexts and generate an error if needed.  Return
10690    true if an error is detected.  */
10691 
10692 static bool
10693 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
10694 	       gimple *branch_ctx, gimple *label_ctx)
10695 {
10696   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
10697   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
10698 
10699   if (label_ctx == branch_ctx)
10700     return false;
10701 
10702   const char* kind = NULL;
10703 
10704   if (flag_openacc)
10705     {
10706       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
10707 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
10708 	{
10709 	  gcc_checking_assert (kind == NULL);
10710 	  kind = "OpenACC";
10711 	}
10712     }
10713   if (kind == NULL)
10714     {
10715       gcc_checking_assert (flag_openmp || flag_openmp_simd);
10716       kind = "OpenMP";
10717     }
10718 
10719   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
10720      so we could traverse it and issue a correct "exit" or "enter" error
10721      message upon a structured block violation.
10722 
10723      We built the context by building a list with tree_cons'ing, but there is
10724      no easy counterpart in gimple tuples.  It seems like far too much work
10725      for issuing exit/enter error messages.  If someone really misses the
10726      distinct error message... patches welcome.  */
10727 
10728 #if 0
10729   /* Try to avoid confusing the user by producing and error message
10730      with correct "exit" or "enter" verbiage.  We prefer "exit"
10731      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
10732   if (branch_ctx == NULL)
10733     exit_p = false;
10734   else
10735     {
10736       while (label_ctx)
10737 	{
10738 	  if (TREE_VALUE (label_ctx) == branch_ctx)
10739 	    {
10740 	      exit_p = false;
10741 	      break;
10742 	    }
10743 	  label_ctx = TREE_CHAIN (label_ctx);
10744 	}
10745     }
10746 
10747   if (exit_p)
10748     error ("invalid exit from %s structured block", kind);
10749   else
10750     error ("invalid entry to %s structured block", kind);
10751 #endif
10752 
10753   /* If it's obvious we have an invalid entry, be specific about the error.  */
10754   if (branch_ctx == NULL)
10755     error ("invalid entry to %s structured block", kind);
10756   else
10757     {
10758       /* Otherwise, be vague and lazy, but efficient.  */
10759       error ("invalid branch to/from %s structured block", kind);
10760     }
10761 
10762   gsi_replace (gsi_p, gimple_build_nop (), false);
10763   return true;
10764 }
10765 
10766 /* Pass 1: Create a minimal tree of structured blocks, and record
10767    where each label is found.  */
10768 
10769 static tree
10770 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10771     	       struct walk_stmt_info *wi)
10772 {
10773   gimple *context = (gimple *) wi->info;
10774   gimple *inner_context;
10775   gimple *stmt = gsi_stmt (*gsi_p);
10776 
10777   *handled_ops_p = true;
10778 
10779   switch (gimple_code (stmt))
10780     {
10781     WALK_SUBSTMTS;
10782 
10783     case GIMPLE_OMP_PARALLEL:
10784     case GIMPLE_OMP_TASK:
10785     case GIMPLE_OMP_SECTIONS:
10786     case GIMPLE_OMP_SINGLE:
10787     case GIMPLE_OMP_SECTION:
10788     case GIMPLE_OMP_MASTER:
10789     case GIMPLE_OMP_ORDERED:
10790     case GIMPLE_OMP_CRITICAL:
10791     case GIMPLE_OMP_TARGET:
10792     case GIMPLE_OMP_TEAMS:
10793     case GIMPLE_OMP_TASKGROUP:
10794       /* The minimal context here is just the current OMP construct.  */
10795       inner_context = stmt;
10796       wi->info = inner_context;
10797       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10798       wi->info = context;
10799       break;
10800 
10801     case GIMPLE_OMP_FOR:
10802       inner_context = stmt;
10803       wi->info = inner_context;
10804       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10805 	 walk them.  */
10806       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
10807 	  	       diagnose_sb_1, NULL, wi);
10808       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
10809       wi->info = context;
10810       break;
10811 
10812     case GIMPLE_LABEL:
10813       splay_tree_insert (all_labels,
10814 			 (splay_tree_key) gimple_label_label (
10815 					    as_a <glabel *> (stmt)),
10816 			 (splay_tree_value) context);
10817       break;
10818 
10819     default:
10820       break;
10821     }
10822 
10823   return NULL_TREE;
10824 }
10825 
10826 /* Pass 2: Check each branch and see if its context differs from that of
10827    the destination label's context.  */
10828 
10829 static tree
10830 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10831     	       struct walk_stmt_info *wi)
10832 {
10833   gimple *context = (gimple *) wi->info;
10834   splay_tree_node n;
10835   gimple *stmt = gsi_stmt (*gsi_p);
10836 
10837   *handled_ops_p = true;
10838 
10839   switch (gimple_code (stmt))
10840     {
10841     WALK_SUBSTMTS;
10842 
10843     case GIMPLE_OMP_PARALLEL:
10844     case GIMPLE_OMP_TASK:
10845     case GIMPLE_OMP_SECTIONS:
10846     case GIMPLE_OMP_SINGLE:
10847     case GIMPLE_OMP_SECTION:
10848     case GIMPLE_OMP_MASTER:
10849     case GIMPLE_OMP_ORDERED:
10850     case GIMPLE_OMP_CRITICAL:
10851     case GIMPLE_OMP_TARGET:
10852     case GIMPLE_OMP_TEAMS:
10853     case GIMPLE_OMP_TASKGROUP:
10854       wi->info = stmt;
10855       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10856       wi->info = context;
10857       break;
10858 
10859     case GIMPLE_OMP_FOR:
10860       wi->info = stmt;
10861       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
10862 	 walk them.  */
10863       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
10864 			   diagnose_sb_2, NULL, wi);
10865       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
10866       wi->info = context;
10867       break;
10868 
10869     case GIMPLE_COND:
10870 	{
10871 	  gcond *cond_stmt = as_a <gcond *> (stmt);
10872 	  tree lab = gimple_cond_true_label (cond_stmt);
10873 	  if (lab)
10874 	    {
10875 	      n = splay_tree_lookup (all_labels,
10876 				     (splay_tree_key) lab);
10877 	      diagnose_sb_0 (gsi_p, context,
10878 			     n ? (gimple *) n->value : NULL);
10879 	    }
10880 	  lab = gimple_cond_false_label (cond_stmt);
10881 	  if (lab)
10882 	    {
10883 	      n = splay_tree_lookup (all_labels,
10884 				     (splay_tree_key) lab);
10885 	      diagnose_sb_0 (gsi_p, context,
10886 			     n ? (gimple *) n->value : NULL);
10887 	    }
10888 	}
10889       break;
10890 
10891     case GIMPLE_GOTO:
10892       {
10893 	tree lab = gimple_goto_dest (stmt);
10894 	if (TREE_CODE (lab) != LABEL_DECL)
10895 	  break;
10896 
10897 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10898 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
10899       }
10900       break;
10901 
10902     case GIMPLE_SWITCH:
10903       {
10904 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
10905 	unsigned int i;
10906 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
10907 	  {
10908 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
10909 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
10910 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
10911 	      break;
10912 	  }
10913       }
10914       break;
10915 
10916     case GIMPLE_RETURN:
10917       diagnose_sb_0 (gsi_p, context, NULL);
10918       break;
10919 
10920     default:
10921       break;
10922     }
10923 
10924   return NULL_TREE;
10925 }
10926 
10927 static unsigned int
10928 diagnose_omp_structured_block_errors (void)
10929 {
10930   struct walk_stmt_info wi;
10931   gimple_seq body = gimple_body (current_function_decl);
10932 
10933   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
10934 
10935   memset (&wi, 0, sizeof (wi));
10936   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
10937 
10938   memset (&wi, 0, sizeof (wi));
10939   wi.want_locations = true;
10940   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
10941 
10942   gimple_set_body (current_function_decl, body);
10943 
10944   splay_tree_delete (all_labels);
10945   all_labels = NULL;
10946 
10947   return 0;
10948 }
10949 
10950 namespace {
10951 
10952 const pass_data pass_data_diagnose_omp_blocks =
10953 {
10954   GIMPLE_PASS, /* type */
10955   "*diagnose_omp_blocks", /* name */
10956   OPTGROUP_OMP, /* optinfo_flags */
10957   TV_NONE, /* tv_id */
10958   PROP_gimple_any, /* properties_required */
10959   0, /* properties_provided */
10960   0, /* properties_destroyed */
10961   0, /* todo_flags_start */
10962   0, /* todo_flags_finish */
10963 };
10964 
10965 class pass_diagnose_omp_blocks : public gimple_opt_pass
10966 {
10967 public:
10968   pass_diagnose_omp_blocks (gcc::context *ctxt)
10969     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
10970   {}
10971 
10972   /* opt_pass methods: */
10973   virtual bool gate (function *)
10974   {
10975     return flag_openacc || flag_openmp || flag_openmp_simd;
10976   }
10977   virtual unsigned int execute (function *)
10978     {
10979       return diagnose_omp_structured_block_errors ();
10980     }
10981 
10982 }; // class pass_diagnose_omp_blocks
10983 
10984 } // anon namespace
10985 
10986 gimple_opt_pass *
10987 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
10988 {
10989   return new pass_diagnose_omp_blocks (ctxt);
10990 }
10991 
10992 
10993 #include "gt-omp-low.h"
10994