xref: /dragonfly/contrib/gcc-8.0/gcc/omp-low.c (revision 7bcb6caf)
1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2018 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65    phases.  The first phase scans the function looking for OMP statements
66    and then for variables that must be replaced to satisfy data sharing
67    clauses.  The second phase expands code for the constructs, as well as
68    re-gimplifying things when variables have been replaced with complex
69    expressions.
70 
71    Final code generation is done by pass_expand_omp.  The flowgraph is
72    scanned for regions which are then moved to a new
73    function, to be invoked by the thread library, or offloaded.  */
74 
75 /* Context structure.  Used to store information about each parallel
76    directive in the code.  */
77 
78 struct omp_context
79 {
80   /* This field must be at the beginning, as we do "inheritance": Some
81      callback functions for tree-inline.c (e.g., omp_copy_decl)
82      receive a copy_body_data pointer that is up-casted to an
83      omp_context pointer.  */
84   copy_body_data cb;
85 
86   /* The tree of contexts corresponding to the encountered constructs.  */
87   struct omp_context *outer;
88   gimple *stmt;
89 
90   /* Map variables to fields in a structure that allows communication
91      between sending and receiving threads.  */
92   splay_tree field_map;
93   tree record_type;
94   tree sender_decl;
95   tree receiver_decl;
96 
97   /* These are used just by task contexts, if task firstprivate fn is
98      needed.  srecord_type is used to communicate from the thread
99      that encountered the task construct to task firstprivate fn,
100      record_type is allocated by GOMP_task, initialized by task firstprivate
101      fn and passed to the task body fn.  */
102   splay_tree sfield_map;
103   tree srecord_type;
104 
105   /* A chain of variables to add to the top-level block surrounding the
106      construct.  In the case of a parallel, this is in the child function.  */
107   tree block_vars;
108 
109   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110      barriers should jump to during omplower pass.  */
111   tree cancel_label;
112 
113   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114      otherwise.  */
115   gimple *simt_stmt;
116 
117   /* Nesting depth of this context.  Used to beautify error messages re
118      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
119      reserved for the main body of the function.  */
120   int depth;
121 
122   /* True if this parallel directive is nested within another.  */
123   bool is_nested;
124 
125   /* True if this construct can be cancelled.  */
126   bool cancellable;
127 };
128 
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
134 
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
137 
138 #define WALK_SUBSTMTS  \
139     case GIMPLE_BIND: \
140     case GIMPLE_TRY: \
141     case GIMPLE_CATCH: \
142     case GIMPLE_EH_FILTER: \
143     case GIMPLE_TRANSACTION: \
144       /* The sub-statements for these should be walked.  */ \
145       *handled_ops_p = false; \
146       break;
147 
148 /* Return true if CTX corresponds to an oacc parallel region.  */
149 
150 static bool
151 is_oacc_parallel (omp_context *ctx)
152 {
153   enum gimple_code outer_type = gimple_code (ctx->stmt);
154   return ((outer_type == GIMPLE_OMP_TARGET)
155 	  && (gimple_omp_target_kind (ctx->stmt)
156 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 }
158 
159 /* Return true if CTX corresponds to an oacc kernels region.  */
160 
161 static bool
162 is_oacc_kernels (omp_context *ctx)
163 {
164   enum gimple_code outer_type = gimple_code (ctx->stmt);
165   return ((outer_type == GIMPLE_OMP_TARGET)
166 	  && (gimple_omp_target_kind (ctx->stmt)
167 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 }
169 
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171    data member privatization, return the underlying "this" parameter,
172    otherwise return NULL.  */
173 
174 tree
175 omp_member_access_dummy_var (tree decl)
176 {
177   if (!VAR_P (decl)
178       || !DECL_ARTIFICIAL (decl)
179       || !DECL_IGNORED_P (decl)
180       || !DECL_HAS_VALUE_EXPR_P (decl)
181       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182     return NULL_TREE;
183 
184   tree v = DECL_VALUE_EXPR (decl);
185   if (TREE_CODE (v) != COMPONENT_REF)
186     return NULL_TREE;
187 
188   while (1)
189     switch (TREE_CODE (v))
190       {
191       case COMPONENT_REF:
192       case MEM_REF:
193       case INDIRECT_REF:
194       CASE_CONVERT:
195       case POINTER_PLUS_EXPR:
196 	v = TREE_OPERAND (v, 0);
197 	continue;
198       case PARM_DECL:
199 	if (DECL_CONTEXT (v) == current_function_decl
200 	    && DECL_ARTIFICIAL (v)
201 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 	  return v;
203 	return NULL_TREE;
204       default:
205 	return NULL_TREE;
206       }
207 }
208 
209 /* Helper for unshare_and_remap, called through walk_tree.  */
210 
211 static tree
212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
213 {
214   tree *pair = (tree *) data;
215   if (*tp == pair[0])
216     {
217       *tp = unshare_expr (pair[1]);
218       *walk_subtrees = 0;
219     }
220   else if (IS_TYPE_OR_DECL_P (*tp))
221     *walk_subtrees = 0;
222   return NULL_TREE;
223 }
224 
225 /* Return unshare_expr (X) with all occurrences of FROM
226    replaced with TO.  */
227 
228 static tree
229 unshare_and_remap (tree x, tree from, tree to)
230 {
231   tree pair[2] = { from, to };
232   x = unshare_expr (x);
233   walk_tree (&x, unshare_and_remap_1, pair, NULL);
234   return x;
235 }
236 
237 /* Convenience function for calling scan_omp_1_op on tree operands.  */
238 
239 static inline tree
240 scan_omp_op (tree *tp, omp_context *ctx)
241 {
242   struct walk_stmt_info wi;
243 
244   memset (&wi, 0, sizeof (wi));
245   wi.info = ctx;
246   wi.want_locations = true;
247 
248   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 }
250 
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
254 
255 /* Return true if CTX is for an omp parallel.  */
256 
257 static inline bool
258 is_parallel_ctx (omp_context *ctx)
259 {
260   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
261 }
262 
263 
264 /* Return true if CTX is for an omp task.  */
265 
266 static inline bool
267 is_task_ctx (omp_context *ctx)
268 {
269   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
270 }
271 
272 
273 /* Return true if CTX is for an omp taskloop.  */
274 
275 static inline bool
276 is_taskloop_ctx (omp_context *ctx)
277 {
278   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
280 }
281 
282 
283 /* Return true if CTX is for an omp parallel or omp task.  */
284 
285 static inline bool
286 is_taskreg_ctx (omp_context *ctx)
287 {
288   return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 }
290 
291 /* Return true if EXPR is variable sized.  */
292 
293 static inline bool
294 is_variable_sized (const_tree expr)
295 {
296   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 }
298 
299 /* Lookup variables.  The "maybe" form
300    allows for the variable form to not have been entered, otherwise we
301    assert that the variable must have been entered.  */
302 
303 static inline tree
304 lookup_decl (tree var, omp_context *ctx)
305 {
306   tree *n = ctx->cb.decl_map->get (var);
307   return *n;
308 }
309 
310 static inline tree
311 maybe_lookup_decl (const_tree var, omp_context *ctx)
312 {
313   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314   return n ? *n : NULL_TREE;
315 }
316 
317 static inline tree
318 lookup_field (tree var, omp_context *ctx)
319 {
320   splay_tree_node n;
321   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322   return (tree) n->value;
323 }
324 
325 static inline tree
326 lookup_sfield (splay_tree_key key, omp_context *ctx)
327 {
328   splay_tree_node n;
329   n = splay_tree_lookup (ctx->sfield_map
330 			 ? ctx->sfield_map : ctx->field_map, key);
331   return (tree) n->value;
332 }
333 
334 static inline tree
335 lookup_sfield (tree var, omp_context *ctx)
336 {
337   return lookup_sfield ((splay_tree_key) var, ctx);
338 }
339 
340 static inline tree
341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
342 {
343   splay_tree_node n;
344   n = splay_tree_lookup (ctx->field_map, key);
345   return n ? (tree) n->value : NULL_TREE;
346 }
347 
348 static inline tree
349 maybe_lookup_field (tree var, omp_context *ctx)
350 {
351   return maybe_lookup_field ((splay_tree_key) var, ctx);
352 }
353 
354 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
355    the parallel context if DECL is to be shared.  */
356 
357 static bool
358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
359 {
360   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361       || TYPE_ATOMIC (TREE_TYPE (decl)))
362     return true;
363 
364   /* We can only use copy-in/copy-out semantics for shared variables
365      when we know the value is not accessible from an outer scope.  */
366   if (shared_ctx)
367     {
368       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
369 
370       /* ??? Trivially accessible from anywhere.  But why would we even
371 	 be passing an address in this case?  Should we simply assert
372 	 this to be false, or should we have a cleanup pass that removes
373 	 these from the list of mappings?  */
374       if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 	return true;
376 
377       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 	 without analyzing the expression whether or not its location
379 	 is accessible to anyone else.  In the case of nested parallel
380 	 regions it certainly may be.  */
381       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 	return true;
383 
384       /* Do not use copy-in/copy-out for variables that have their
385 	 address taken.  */
386       if (TREE_ADDRESSABLE (decl))
387 	return true;
388 
389       /* lower_send_shared_vars only uses copy-in, but not copy-out
390 	 for these.  */
391       if (TREE_READONLY (decl)
392 	  || ((TREE_CODE (decl) == RESULT_DECL
393 	       || TREE_CODE (decl) == PARM_DECL)
394 	      && DECL_BY_REFERENCE (decl)))
395 	return false;
396 
397       /* Disallow copy-in/out in nested parallel if
398 	 decl is shared in outer parallel, otherwise
399 	 each thread could store the shared variable
400 	 in its own copy-in location, making the
401 	 variable no longer really shared.  */
402       if (shared_ctx->is_nested)
403 	{
404 	  omp_context *up;
405 
406 	  for (up = shared_ctx->outer; up; up = up->outer)
407 	    if (is_taskreg_ctx (up) && maybe_lookup_decl (decl, up))
408 	      break;
409 
410 	  if (up)
411 	    {
412 	      tree c;
413 
414 	      for (c = gimple_omp_taskreg_clauses (up->stmt);
415 		   c; c = OMP_CLAUSE_CHAIN (c))
416 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
417 		    && OMP_CLAUSE_DECL (c) == decl)
418 		  break;
419 
420 	      if (c)
421 		goto maybe_mark_addressable_and_ret;
422 	    }
423 	}
424 
425       /* For tasks avoid using copy-in/out.  As tasks can be
426 	 deferred or executed in different thread, when GOMP_task
427 	 returns, the task hasn't necessarily terminated.  */
428       if (is_task_ctx (shared_ctx))
429 	{
430 	  tree outer;
431 	maybe_mark_addressable_and_ret:
432 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
433 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
434 	    {
435 	      /* Taking address of OUTER in lower_send_shared_vars
436 		 might need regimplification of everything that uses the
437 		 variable.  */
438 	      if (!task_shared_vars)
439 		task_shared_vars = BITMAP_ALLOC (NULL);
440 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
441 	      TREE_ADDRESSABLE (outer) = 1;
442 	    }
443 	  return true;
444 	}
445     }
446 
447   return false;
448 }
449 
450 /* Construct a new automatic decl similar to VAR.  */
451 
452 static tree
453 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
454 {
455   tree copy = copy_var_decl (var, name, type);
456 
457   DECL_CONTEXT (copy) = current_function_decl;
458   DECL_CHAIN (copy) = ctx->block_vars;
459   /* If VAR is listed in task_shared_vars, it means it wasn't
460      originally addressable and is just because task needs to take
461      it's address.  But we don't need to take address of privatizations
462      from that var.  */
463   if (TREE_ADDRESSABLE (var)
464       && task_shared_vars
465       && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
466     TREE_ADDRESSABLE (copy) = 0;
467   ctx->block_vars = copy;
468 
469   return copy;
470 }
471 
472 static tree
473 omp_copy_decl_1 (tree var, omp_context *ctx)
474 {
475   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
476 }
477 
478 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
479    as appropriate.  */
480 static tree
481 omp_build_component_ref (tree obj, tree field)
482 {
483   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
484   if (TREE_THIS_VOLATILE (field))
485     TREE_THIS_VOLATILE (ret) |= 1;
486   if (TREE_READONLY (field))
487     TREE_READONLY (ret) |= 1;
488   return ret;
489 }
490 
491 /* Build tree nodes to access the field for VAR on the receiver side.  */
492 
493 static tree
494 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
495 {
496   tree x, field = lookup_field (var, ctx);
497 
498   /* If the receiver record type was remapped in the child function,
499      remap the field into the new record type.  */
500   x = maybe_lookup_field (field, ctx);
501   if (x != NULL)
502     field = x;
503 
504   x = build_simple_mem_ref (ctx->receiver_decl);
505   TREE_THIS_NOTRAP (x) = 1;
506   x = omp_build_component_ref (x, field);
507   if (by_ref)
508     {
509       x = build_simple_mem_ref (x);
510       TREE_THIS_NOTRAP (x) = 1;
511     }
512 
513   return x;
514 }
515 
516 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
517    of a parallel, this is a component reference; for workshare constructs
518    this is some variable.  */
519 
520 static tree
521 build_outer_var_ref (tree var, omp_context *ctx,
522 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
523 {
524   tree x;
525 
526   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
527     x = var;
528   else if (is_variable_sized (var))
529     {
530       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
531       x = build_outer_var_ref (x, ctx, code);
532       x = build_simple_mem_ref (x);
533     }
534   else if (is_taskreg_ctx (ctx))
535     {
536       bool by_ref = use_pointer_for_field (var, NULL);
537       x = build_receiver_ref (var, by_ref, ctx);
538     }
539   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
540 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
541 	   || (code == OMP_CLAUSE_PRIVATE
542 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
543 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
544 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
545     {
546       /* #pragma omp simd isn't a worksharing construct, and can reference
547 	 even private vars in its linear etc. clauses.
548 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
549 	 to private vars in all worksharing constructs.  */
550       x = NULL_TREE;
551       if (ctx->outer && is_taskreg_ctx (ctx))
552 	x = lookup_decl (var, ctx->outer);
553       else if (ctx->outer)
554 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
555       if (x == NULL_TREE)
556 	x = var;
557     }
558   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
559     {
560       gcc_assert (ctx->outer);
561       splay_tree_node n
562 	= splay_tree_lookup (ctx->outer->field_map,
563 			     (splay_tree_key) &DECL_UID (var));
564       if (n == NULL)
565 	{
566 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
567 	    x = var;
568 	  else
569 	    x = lookup_decl (var, ctx->outer);
570 	}
571       else
572 	{
573 	  tree field = (tree) n->value;
574 	  /* If the receiver record type was remapped in the child function,
575 	     remap the field into the new record type.  */
576 	  x = maybe_lookup_field (field, ctx->outer);
577 	  if (x != NULL)
578 	    field = x;
579 
580 	  x = build_simple_mem_ref (ctx->outer->receiver_decl);
581 	  x = omp_build_component_ref (x, field);
582 	  if (use_pointer_for_field (var, ctx->outer))
583 	    x = build_simple_mem_ref (x);
584 	}
585     }
586   else if (ctx->outer)
587     {
588       omp_context *outer = ctx->outer;
589       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
590 	{
591 	  outer = outer->outer;
592 	  gcc_assert (outer
593 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
594 	}
595       x = lookup_decl (var, outer);
596     }
597   else if (omp_is_reference (var))
598     /* This can happen with orphaned constructs.  If var is reference, it is
599        possible it is shared and as such valid.  */
600     x = var;
601   else if (omp_member_access_dummy_var (var))
602     x = var;
603   else
604     gcc_unreachable ();
605 
606   if (x == var)
607     {
608       tree t = omp_member_access_dummy_var (var);
609       if (t)
610 	{
611 	  x = DECL_VALUE_EXPR (var);
612 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
613 	  if (o != t)
614 	    x = unshare_and_remap (x, t, o);
615 	  else
616 	    x = unshare_expr (x);
617 	}
618     }
619 
620   if (omp_is_reference (var))
621     x = build_simple_mem_ref (x);
622 
623   return x;
624 }
625 
626 /* Build tree nodes to access the field for VAR on the sender side.  */
627 
628 static tree
629 build_sender_ref (splay_tree_key key, omp_context *ctx)
630 {
631   tree field = lookup_sfield (key, ctx);
632   return omp_build_component_ref (ctx->sender_decl, field);
633 }
634 
635 static tree
636 build_sender_ref (tree var, omp_context *ctx)
637 {
638   return build_sender_ref ((splay_tree_key) var, ctx);
639 }
640 
641 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
642    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
643 
644 static void
645 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
646 		   bool base_pointers_restrict = false)
647 {
648   tree field, type, sfield = NULL_TREE;
649   splay_tree_key key = (splay_tree_key) var;
650 
651   if ((mask & 8) != 0)
652     {
653       key = (splay_tree_key) &DECL_UID (var);
654       gcc_checking_assert (key != (splay_tree_key) var);
655     }
656   gcc_assert ((mask & 1) == 0
657 	      || !splay_tree_lookup (ctx->field_map, key));
658   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
659 	      || !splay_tree_lookup (ctx->sfield_map, key));
660   gcc_assert ((mask & 3) == 3
661 	      || !is_gimple_omp_oacc (ctx->stmt));
662 
663   type = TREE_TYPE (var);
664   /* Prevent redeclaring the var in the split-off function with a restrict
665      pointer type.  Note that we only clear type itself, restrict qualifiers in
666      the pointed-to type will be ignored by points-to analysis.  */
667   if (POINTER_TYPE_P (type)
668       && TYPE_RESTRICT (type))
669     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
670 
671   if (mask & 4)
672     {
673       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
674       type = build_pointer_type (build_pointer_type (type));
675     }
676   else if (by_ref)
677     {
678       type = build_pointer_type (type);
679       if (base_pointers_restrict)
680 	type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
681     }
682   else if ((mask & 3) == 1 && omp_is_reference (var))
683     type = TREE_TYPE (type);
684 
685   field = build_decl (DECL_SOURCE_LOCATION (var),
686 		      FIELD_DECL, DECL_NAME (var), type);
687 
688   /* Remember what variable this field was created for.  This does have a
689      side effect of making dwarf2out ignore this member, so for helpful
690      debugging we clear it later in delete_omp_context.  */
691   DECL_ABSTRACT_ORIGIN (field) = var;
692   if (type == TREE_TYPE (var))
693     {
694       SET_DECL_ALIGN (field, DECL_ALIGN (var));
695       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
696       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
697     }
698   else
699     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
700 
701   if ((mask & 3) == 3)
702     {
703       insert_field_into_struct (ctx->record_type, field);
704       if (ctx->srecord_type)
705 	{
706 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
707 			       FIELD_DECL, DECL_NAME (var), type);
708 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
709 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
710 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
711 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
712 	  insert_field_into_struct (ctx->srecord_type, sfield);
713 	}
714     }
715   else
716     {
717       if (ctx->srecord_type == NULL_TREE)
718 	{
719 	  tree t;
720 
721 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
722 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
723 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
724 	    {
725 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
726 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
727 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
728 	      insert_field_into_struct (ctx->srecord_type, sfield);
729 	      splay_tree_insert (ctx->sfield_map,
730 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
731 				 (splay_tree_value) sfield);
732 	    }
733 	}
734       sfield = field;
735       insert_field_into_struct ((mask & 1) ? ctx->record_type
736 				: ctx->srecord_type, field);
737     }
738 
739   if (mask & 1)
740     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
741   if ((mask & 2) && ctx->sfield_map)
742     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
743 }
744 
745 static tree
746 install_var_local (tree var, omp_context *ctx)
747 {
748   tree new_var = omp_copy_decl_1 (var, ctx);
749   insert_decl_map (&ctx->cb, var, new_var);
750   return new_var;
751 }
752 
753 /* Adjust the replacement for DECL in CTX for the new context.  This means
754    copying the DECL_VALUE_EXPR, and fixing up the type.  */
755 
756 static void
757 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
758 {
759   tree new_decl, size;
760 
761   new_decl = lookup_decl (decl, ctx);
762 
763   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
764 
765   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
766       && DECL_HAS_VALUE_EXPR_P (decl))
767     {
768       tree ve = DECL_VALUE_EXPR (decl);
769       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
770       SET_DECL_VALUE_EXPR (new_decl, ve);
771       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
772     }
773 
774   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
775     {
776       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
777       if (size == error_mark_node)
778 	size = TYPE_SIZE (TREE_TYPE (new_decl));
779       DECL_SIZE (new_decl) = size;
780 
781       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
782       if (size == error_mark_node)
783 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
784       DECL_SIZE_UNIT (new_decl) = size;
785     }
786 }
787 
788 /* The callback for remap_decl.  Search all containing contexts for a
789    mapping of the variable; this avoids having to duplicate the splay
790    tree ahead of time.  We know a mapping doesn't already exist in the
791    given context.  Create new mappings to implement default semantics.  */
792 
793 static tree
794 omp_copy_decl (tree var, copy_body_data *cb)
795 {
796   omp_context *ctx = (omp_context *) cb;
797   tree new_var;
798 
799   if (TREE_CODE (var) == LABEL_DECL)
800     {
801       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
802 	return var;
803       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
804       DECL_CONTEXT (new_var) = current_function_decl;
805       insert_decl_map (&ctx->cb, var, new_var);
806       return new_var;
807     }
808 
809   while (!is_taskreg_ctx (ctx))
810     {
811       ctx = ctx->outer;
812       if (ctx == NULL)
813 	return var;
814       new_var = maybe_lookup_decl (var, ctx);
815       if (new_var)
816 	return new_var;
817     }
818 
819   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
820     return var;
821 
822   return error_mark_node;
823 }
824 
825 /* Create a new context, with OUTER_CTX being the surrounding context.  */
826 
827 static omp_context *
828 new_omp_context (gimple *stmt, omp_context *outer_ctx)
829 {
830   omp_context *ctx = XCNEW (omp_context);
831 
832   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
833 		     (splay_tree_value) ctx);
834   ctx->stmt = stmt;
835 
836   if (outer_ctx)
837     {
838       ctx->outer = outer_ctx;
839       ctx->cb = outer_ctx->cb;
840       ctx->cb.block = NULL;
841       ctx->depth = outer_ctx->depth + 1;
842     }
843   else
844     {
845       ctx->cb.src_fn = current_function_decl;
846       ctx->cb.dst_fn = current_function_decl;
847       ctx->cb.src_node = cgraph_node::get (current_function_decl);
848       gcc_checking_assert (ctx->cb.src_node);
849       ctx->cb.dst_node = ctx->cb.src_node;
850       ctx->cb.src_cfun = cfun;
851       ctx->cb.copy_decl = omp_copy_decl;
852       ctx->cb.eh_lp_nr = 0;
853       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
854       ctx->depth = 1;
855     }
856 
857   ctx->cb.decl_map = new hash_map<tree, tree>;
858 
859   return ctx;
860 }
861 
862 static gimple_seq maybe_catch_exception (gimple_seq);
863 
864 /* Finalize task copyfn.  */
865 
866 static void
867 finalize_task_copyfn (gomp_task *task_stmt)
868 {
869   struct function *child_cfun;
870   tree child_fn;
871   gimple_seq seq = NULL, new_seq;
872   gbind *bind;
873 
874   child_fn = gimple_omp_task_copy_fn (task_stmt);
875   if (child_fn == NULL_TREE)
876     return;
877 
878   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
879   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
880 
881   push_cfun (child_cfun);
882   bind = gimplify_body (child_fn, false);
883   gimple_seq_add_stmt (&seq, bind);
884   new_seq = maybe_catch_exception (seq);
885   if (new_seq != seq)
886     {
887       bind = gimple_build_bind (NULL, new_seq, NULL);
888       seq = NULL;
889       gimple_seq_add_stmt (&seq, bind);
890     }
891   gimple_set_body (child_fn, seq);
892   pop_cfun ();
893 
894   /* Inform the callgraph about the new function.  */
895   cgraph_node *node = cgraph_node::get_create (child_fn);
896   node->parallelized_function = 1;
897   cgraph_node::add_new_function (child_fn, false);
898 }
899 
900 /* Destroy a omp_context data structures.  Called through the splay tree
901    value delete callback.  */
902 
903 static void
904 delete_omp_context (splay_tree_value value)
905 {
906   omp_context *ctx = (omp_context *) value;
907 
908   delete ctx->cb.decl_map;
909 
910   if (ctx->field_map)
911     splay_tree_delete (ctx->field_map);
912   if (ctx->sfield_map)
913     splay_tree_delete (ctx->sfield_map);
914 
915   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
916      it produces corrupt debug information.  */
917   if (ctx->record_type)
918     {
919       tree t;
920       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
921 	DECL_ABSTRACT_ORIGIN (t) = NULL;
922     }
923   if (ctx->srecord_type)
924     {
925       tree t;
926       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
927 	DECL_ABSTRACT_ORIGIN (t) = NULL;
928     }
929 
930   if (is_task_ctx (ctx))
931     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
932 
933   XDELETE (ctx);
934 }
935 
936 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
937    context.  */
938 
939 static void
940 fixup_child_record_type (omp_context *ctx)
941 {
942   tree f, type = ctx->record_type;
943 
944   if (!ctx->receiver_decl)
945     return;
946   /* ??? It isn't sufficient to just call remap_type here, because
947      variably_modified_type_p doesn't work the way we expect for
948      record types.  Testing each field for whether it needs remapping
949      and creating a new record by hand works, however.  */
950   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
951     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
952       break;
953   if (f)
954     {
955       tree name, new_fields = NULL;
956 
957       type = lang_hooks.types.make_type (RECORD_TYPE);
958       name = DECL_NAME (TYPE_NAME (ctx->record_type));
959       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
960 			 TYPE_DECL, name, type);
961       TYPE_NAME (type) = name;
962 
963       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
964 	{
965 	  tree new_f = copy_node (f);
966 	  DECL_CONTEXT (new_f) = type;
967 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
968 	  DECL_CHAIN (new_f) = new_fields;
969 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
970 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
971 		     &ctx->cb, NULL);
972 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
973 		     &ctx->cb, NULL);
974 	  new_fields = new_f;
975 
976 	  /* Arrange to be able to look up the receiver field
977 	     given the sender field.  */
978 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
979 			     (splay_tree_value) new_f);
980 	}
981       TYPE_FIELDS (type) = nreverse (new_fields);
982       layout_type (type);
983     }
984 
985   /* In a target region we never modify any of the pointers in *.omp_data_i,
986      so attempt to help the optimizers.  */
987   if (is_gimple_omp_offloaded (ctx->stmt))
988     type = build_qualified_type (type, TYPE_QUAL_CONST);
989 
990   TREE_TYPE (ctx->receiver_decl)
991     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
992 }
993 
994 /* Instantiate decls as necessary in CTX to satisfy the data sharing
995    specified by CLAUSES.  If BASE_POINTERS_RESTRICT, install var field with
996    restrict.  */
997 
998 static void
999 scan_sharing_clauses (tree clauses, omp_context *ctx,
1000 		      bool base_pointers_restrict = false)
1001 {
1002   tree c, decl;
1003   bool scan_array_reductions = false;
1004 
1005   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1006     {
1007       bool by_ref;
1008 
1009       switch (OMP_CLAUSE_CODE (c))
1010 	{
1011 	case OMP_CLAUSE_PRIVATE:
1012 	  decl = OMP_CLAUSE_DECL (c);
1013 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1014 	    goto do_private;
1015 	  else if (!is_variable_sized (decl))
1016 	    install_var_local (decl, ctx);
1017 	  break;
1018 
1019 	case OMP_CLAUSE_SHARED:
1020 	  decl = OMP_CLAUSE_DECL (c);
1021 	  /* Ignore shared directives in teams construct.  */
1022 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1023 	    {
1024 	      /* Global variables don't need to be copied,
1025 		 the receiver side will use them directly.  */
1026 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1027 	      if (is_global_var (odecl))
1028 		break;
1029 	      insert_decl_map (&ctx->cb, decl, odecl);
1030 	      break;
1031 	    }
1032 	  gcc_assert (is_taskreg_ctx (ctx));
1033 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1034 		      || !is_variable_sized (decl));
1035 	  /* Global variables don't need to be copied,
1036 	     the receiver side will use them directly.  */
1037 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1038 	    break;
1039 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1040 	    {
1041 	      use_pointer_for_field (decl, ctx);
1042 	      break;
1043 	    }
1044 	  by_ref = use_pointer_for_field (decl, NULL);
1045 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1046 	      || TREE_ADDRESSABLE (decl)
1047 	      || by_ref
1048 	      || omp_is_reference (decl))
1049 	    {
1050 	      by_ref = use_pointer_for_field (decl, ctx);
1051 	      install_var_field (decl, by_ref, 3, ctx);
1052 	      install_var_local (decl, ctx);
1053 	      break;
1054 	    }
1055 	  /* We don't need to copy const scalar vars back.  */
1056 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1057 	  goto do_private;
1058 
1059 	case OMP_CLAUSE_REDUCTION:
1060 	  decl = OMP_CLAUSE_DECL (c);
1061 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1062 	      && TREE_CODE (decl) == MEM_REF)
1063 	    {
1064 	      tree t = TREE_OPERAND (decl, 0);
1065 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1066 		t = TREE_OPERAND (t, 0);
1067 	      if (TREE_CODE (t) == INDIRECT_REF
1068 		  || TREE_CODE (t) == ADDR_EXPR)
1069 		t = TREE_OPERAND (t, 0);
1070 	      install_var_local (t, ctx);
1071 	      if (is_taskreg_ctx (ctx)
1072 		  && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1073 		  && !is_variable_sized (t))
1074 		{
1075 		  by_ref = use_pointer_for_field (t, ctx);
1076 		  install_var_field (t, by_ref, 3, ctx);
1077 		}
1078 	      break;
1079 	    }
1080 	  goto do_private;
1081 
1082 	case OMP_CLAUSE_LASTPRIVATE:
1083 	  /* Let the corresponding firstprivate clause create
1084 	     the variable.  */
1085 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1086 	    break;
1087 	  /* FALLTHRU */
1088 
1089 	case OMP_CLAUSE_FIRSTPRIVATE:
1090 	case OMP_CLAUSE_LINEAR:
1091 	  decl = OMP_CLAUSE_DECL (c);
1092 	do_private:
1093 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1094 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1095 	      && is_gimple_omp_offloaded (ctx->stmt))
1096 	    {
1097 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1098 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1099 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1100 		install_var_field (decl, true, 3, ctx);
1101 	      else
1102 		install_var_field (decl, false, 3, ctx);
1103 	    }
1104 	  if (is_variable_sized (decl))
1105 	    {
1106 	      if (is_task_ctx (ctx))
1107 		install_var_field (decl, false, 1, ctx);
1108 	      break;
1109 	    }
1110 	  else if (is_taskreg_ctx (ctx))
1111 	    {
1112 	      bool global
1113 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1114 	      by_ref = use_pointer_for_field (decl, NULL);
1115 
1116 	      if (is_task_ctx (ctx)
1117 		  && (global || by_ref || omp_is_reference (decl)))
1118 		{
1119 		  install_var_field (decl, false, 1, ctx);
1120 		  if (!global)
1121 		    install_var_field (decl, by_ref, 2, ctx);
1122 		}
1123 	      else if (!global)
1124 		install_var_field (decl, by_ref, 3, ctx);
1125 	    }
1126 	  install_var_local (decl, ctx);
1127 	  break;
1128 
1129 	case OMP_CLAUSE_USE_DEVICE_PTR:
1130 	  decl = OMP_CLAUSE_DECL (c);
1131 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1132 	    install_var_field (decl, true, 3, ctx);
1133 	  else
1134 	    install_var_field (decl, false, 3, ctx);
1135 	  if (DECL_SIZE (decl)
1136 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1137 	    {
1138 	      tree decl2 = DECL_VALUE_EXPR (decl);
1139 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1140 	      decl2 = TREE_OPERAND (decl2, 0);
1141 	      gcc_assert (DECL_P (decl2));
1142 	      install_var_local (decl2, ctx);
1143 	    }
1144 	  install_var_local (decl, ctx);
1145 	  break;
1146 
1147 	case OMP_CLAUSE_IS_DEVICE_PTR:
1148 	  decl = OMP_CLAUSE_DECL (c);
1149 	  goto do_private;
1150 
1151 	case OMP_CLAUSE__LOOPTEMP_:
1152 	  gcc_assert (is_taskreg_ctx (ctx));
1153 	  decl = OMP_CLAUSE_DECL (c);
1154 	  install_var_field (decl, false, 3, ctx);
1155 	  install_var_local (decl, ctx);
1156 	  break;
1157 
1158 	case OMP_CLAUSE_COPYPRIVATE:
1159 	case OMP_CLAUSE_COPYIN:
1160 	  decl = OMP_CLAUSE_DECL (c);
1161 	  by_ref = use_pointer_for_field (decl, NULL);
1162 	  install_var_field (decl, by_ref, 3, ctx);
1163 	  break;
1164 
1165 	case OMP_CLAUSE_FINAL:
1166 	case OMP_CLAUSE_IF:
1167 	case OMP_CLAUSE_NUM_THREADS:
1168 	case OMP_CLAUSE_NUM_TEAMS:
1169 	case OMP_CLAUSE_THREAD_LIMIT:
1170 	case OMP_CLAUSE_DEVICE:
1171 	case OMP_CLAUSE_SCHEDULE:
1172 	case OMP_CLAUSE_DIST_SCHEDULE:
1173 	case OMP_CLAUSE_DEPEND:
1174 	case OMP_CLAUSE_PRIORITY:
1175 	case OMP_CLAUSE_GRAINSIZE:
1176 	case OMP_CLAUSE_NUM_TASKS:
1177 	case OMP_CLAUSE_NUM_GANGS:
1178 	case OMP_CLAUSE_NUM_WORKERS:
1179 	case OMP_CLAUSE_VECTOR_LENGTH:
1180 	  if (ctx->outer)
1181 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1182 	  break;
1183 
1184 	case OMP_CLAUSE_TO:
1185 	case OMP_CLAUSE_FROM:
1186 	case OMP_CLAUSE_MAP:
1187 	  if (ctx->outer)
1188 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1189 	  decl = OMP_CLAUSE_DECL (c);
1190 	  /* Global variables with "omp declare target" attribute
1191 	     don't need to be copied, the receiver side will use them
1192 	     directly.  However, global variables with "omp declare target link"
1193 	     attribute need to be copied.  */
1194 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1195 	      && DECL_P (decl)
1196 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1197 		   && (OMP_CLAUSE_MAP_KIND (c)
1198 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1199 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1200 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1201 	      && varpool_node::get_create (decl)->offloadable
1202 	      && !lookup_attribute ("omp declare target link",
1203 				    DECL_ATTRIBUTES (decl)))
1204 	    break;
1205 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1206 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1207 	    {
1208 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1209 		 not offloaded; there is nothing to map for those.  */
1210 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1211 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1212 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1213 		break;
1214 	    }
1215 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1216 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1217 		  || (OMP_CLAUSE_MAP_KIND (c)
1218 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1219 	    {
1220 	      if (TREE_CODE (decl) == COMPONENT_REF
1221 		  || (TREE_CODE (decl) == INDIRECT_REF
1222 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1223 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1224 			  == REFERENCE_TYPE)))
1225 		break;
1226 	      if (DECL_SIZE (decl)
1227 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1228 		{
1229 		  tree decl2 = DECL_VALUE_EXPR (decl);
1230 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1231 		  decl2 = TREE_OPERAND (decl2, 0);
1232 		  gcc_assert (DECL_P (decl2));
1233 		  install_var_local (decl2, ctx);
1234 		}
1235 	      install_var_local (decl, ctx);
1236 	      break;
1237 	    }
1238 	  if (DECL_P (decl))
1239 	    {
1240 	      if (DECL_SIZE (decl)
1241 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1242 		{
1243 		  tree decl2 = DECL_VALUE_EXPR (decl);
1244 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1245 		  decl2 = TREE_OPERAND (decl2, 0);
1246 		  gcc_assert (DECL_P (decl2));
1247 		  install_var_field (decl2, true, 3, ctx);
1248 		  install_var_local (decl2, ctx);
1249 		  install_var_local (decl, ctx);
1250 		}
1251 	      else
1252 		{
1253 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1254 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1255 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1256 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 		    install_var_field (decl, true, 7, ctx);
1258 		  else
1259 		    install_var_field (decl, true, 3, ctx,
1260 				       base_pointers_restrict);
1261 		  if (is_gimple_omp_offloaded (ctx->stmt)
1262 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1263 		    install_var_local (decl, ctx);
1264 		}
1265 	    }
1266 	  else
1267 	    {
1268 	      tree base = get_base_address (decl);
1269 	      tree nc = OMP_CLAUSE_CHAIN (c);
1270 	      if (DECL_P (base)
1271 		  && nc != NULL_TREE
1272 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1273 		  && OMP_CLAUSE_DECL (nc) == base
1274 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1275 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1276 		{
1277 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1278 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1279 		}
1280 	      else
1281 		{
1282 		  if (ctx->outer)
1283 		    {
1284 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1285 		      decl = OMP_CLAUSE_DECL (c);
1286 		    }
1287 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1288 						  (splay_tree_key) decl));
1289 		  tree field
1290 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1291 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1292 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1293 		  insert_field_into_struct (ctx->record_type, field);
1294 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1295 				     (splay_tree_value) field);
1296 		}
1297 	    }
1298 	  break;
1299 
1300 	case OMP_CLAUSE__GRIDDIM_:
1301 	  if (ctx->outer)
1302 	    {
1303 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1304 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1305 	    }
1306 	  break;
1307 
1308 	case OMP_CLAUSE_NOWAIT:
1309 	case OMP_CLAUSE_ORDERED:
1310 	case OMP_CLAUSE_COLLAPSE:
1311 	case OMP_CLAUSE_UNTIED:
1312 	case OMP_CLAUSE_MERGEABLE:
1313 	case OMP_CLAUSE_PROC_BIND:
1314 	case OMP_CLAUSE_SAFELEN:
1315 	case OMP_CLAUSE_SIMDLEN:
1316 	case OMP_CLAUSE_THREADS:
1317 	case OMP_CLAUSE_SIMD:
1318 	case OMP_CLAUSE_NOGROUP:
1319 	case OMP_CLAUSE_DEFAULTMAP:
1320 	case OMP_CLAUSE_ASYNC:
1321 	case OMP_CLAUSE_WAIT:
1322 	case OMP_CLAUSE_GANG:
1323 	case OMP_CLAUSE_WORKER:
1324 	case OMP_CLAUSE_VECTOR:
1325 	case OMP_CLAUSE_INDEPENDENT:
1326 	case OMP_CLAUSE_AUTO:
1327 	case OMP_CLAUSE_SEQ:
1328 	case OMP_CLAUSE_TILE:
1329 	case OMP_CLAUSE__SIMT_:
1330 	case OMP_CLAUSE_DEFAULT:
1331 	  break;
1332 
1333 	case OMP_CLAUSE_ALIGNED:
1334 	  decl = OMP_CLAUSE_DECL (c);
1335 	  if (is_global_var (decl)
1336 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1337 	    install_var_local (decl, ctx);
1338 	  break;
1339 
1340 	case OMP_CLAUSE__CACHE_:
1341 	default:
1342 	  gcc_unreachable ();
1343 	}
1344     }
1345 
1346   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1347     {
1348       switch (OMP_CLAUSE_CODE (c))
1349 	{
1350 	case OMP_CLAUSE_LASTPRIVATE:
1351 	  /* Let the corresponding firstprivate clause create
1352 	     the variable.  */
1353 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1354 	    scan_array_reductions = true;
1355 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1356 	    break;
1357 	  /* FALLTHRU */
1358 
1359 	case OMP_CLAUSE_FIRSTPRIVATE:
1360 	case OMP_CLAUSE_PRIVATE:
1361 	case OMP_CLAUSE_LINEAR:
1362 	case OMP_CLAUSE_IS_DEVICE_PTR:
1363 	  decl = OMP_CLAUSE_DECL (c);
1364 	  if (is_variable_sized (decl))
1365 	    {
1366 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1367 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1368 		  && is_gimple_omp_offloaded (ctx->stmt))
1369 		{
1370 		  tree decl2 = DECL_VALUE_EXPR (decl);
1371 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1372 		  decl2 = TREE_OPERAND (decl2, 0);
1373 		  gcc_assert (DECL_P (decl2));
1374 		  install_var_local (decl2, ctx);
1375 		  fixup_remapped_decl (decl2, ctx, false);
1376 		}
1377 	      install_var_local (decl, ctx);
1378 	    }
1379 	  fixup_remapped_decl (decl, ctx,
1380 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1381 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1382 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1383 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1384 	    scan_array_reductions = true;
1385 	  break;
1386 
1387 	case OMP_CLAUSE_REDUCTION:
1388 	  decl = OMP_CLAUSE_DECL (c);
1389 	  if (TREE_CODE (decl) != MEM_REF)
1390 	    {
1391 	      if (is_variable_sized (decl))
1392 		install_var_local (decl, ctx);
1393 	      fixup_remapped_decl (decl, ctx, false);
1394 	    }
1395 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1396 	    scan_array_reductions = true;
1397 	  break;
1398 
1399 	case OMP_CLAUSE_SHARED:
1400 	  /* Ignore shared directives in teams construct.  */
1401 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1402 	    break;
1403 	  decl = OMP_CLAUSE_DECL (c);
1404 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1405 	    break;
1406 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1407 	    {
1408 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1409 								 ctx->outer)))
1410 		break;
1411 	      bool by_ref = use_pointer_for_field (decl, ctx);
1412 	      install_var_field (decl, by_ref, 11, ctx);
1413 	      break;
1414 	    }
1415 	  fixup_remapped_decl (decl, ctx, false);
1416 	  break;
1417 
1418 	case OMP_CLAUSE_MAP:
1419 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1420 	    break;
1421 	  decl = OMP_CLAUSE_DECL (c);
1422 	  if (DECL_P (decl)
1423 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1424 		   && (OMP_CLAUSE_MAP_KIND (c)
1425 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1426 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1427 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1428 	      && varpool_node::get_create (decl)->offloadable)
1429 	    break;
1430 	  if (DECL_P (decl))
1431 	    {
1432 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1433 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1434 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1435 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1436 		{
1437 		  tree new_decl = lookup_decl (decl, ctx);
1438 		  TREE_TYPE (new_decl)
1439 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1440 		}
1441 	      else if (DECL_SIZE (decl)
1442 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1443 		{
1444 		  tree decl2 = DECL_VALUE_EXPR (decl);
1445 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1446 		  decl2 = TREE_OPERAND (decl2, 0);
1447 		  gcc_assert (DECL_P (decl2));
1448 		  fixup_remapped_decl (decl2, ctx, false);
1449 		  fixup_remapped_decl (decl, ctx, true);
1450 		}
1451 	      else
1452 		fixup_remapped_decl (decl, ctx, false);
1453 	    }
1454 	  break;
1455 
1456 	case OMP_CLAUSE_COPYPRIVATE:
1457 	case OMP_CLAUSE_COPYIN:
1458 	case OMP_CLAUSE_DEFAULT:
1459 	case OMP_CLAUSE_IF:
1460 	case OMP_CLAUSE_NUM_THREADS:
1461 	case OMP_CLAUSE_NUM_TEAMS:
1462 	case OMP_CLAUSE_THREAD_LIMIT:
1463 	case OMP_CLAUSE_DEVICE:
1464 	case OMP_CLAUSE_SCHEDULE:
1465 	case OMP_CLAUSE_DIST_SCHEDULE:
1466 	case OMP_CLAUSE_NOWAIT:
1467 	case OMP_CLAUSE_ORDERED:
1468 	case OMP_CLAUSE_COLLAPSE:
1469 	case OMP_CLAUSE_UNTIED:
1470 	case OMP_CLAUSE_FINAL:
1471 	case OMP_CLAUSE_MERGEABLE:
1472 	case OMP_CLAUSE_PROC_BIND:
1473 	case OMP_CLAUSE_SAFELEN:
1474 	case OMP_CLAUSE_SIMDLEN:
1475 	case OMP_CLAUSE_ALIGNED:
1476 	case OMP_CLAUSE_DEPEND:
1477 	case OMP_CLAUSE__LOOPTEMP_:
1478 	case OMP_CLAUSE_TO:
1479 	case OMP_CLAUSE_FROM:
1480 	case OMP_CLAUSE_PRIORITY:
1481 	case OMP_CLAUSE_GRAINSIZE:
1482 	case OMP_CLAUSE_NUM_TASKS:
1483 	case OMP_CLAUSE_THREADS:
1484 	case OMP_CLAUSE_SIMD:
1485 	case OMP_CLAUSE_NOGROUP:
1486 	case OMP_CLAUSE_DEFAULTMAP:
1487 	case OMP_CLAUSE_USE_DEVICE_PTR:
1488 	case OMP_CLAUSE_ASYNC:
1489 	case OMP_CLAUSE_WAIT:
1490 	case OMP_CLAUSE_NUM_GANGS:
1491 	case OMP_CLAUSE_NUM_WORKERS:
1492 	case OMP_CLAUSE_VECTOR_LENGTH:
1493 	case OMP_CLAUSE_GANG:
1494 	case OMP_CLAUSE_WORKER:
1495 	case OMP_CLAUSE_VECTOR:
1496 	case OMP_CLAUSE_INDEPENDENT:
1497 	case OMP_CLAUSE_AUTO:
1498 	case OMP_CLAUSE_SEQ:
1499 	case OMP_CLAUSE_TILE:
1500 	case OMP_CLAUSE__GRIDDIM_:
1501 	case OMP_CLAUSE__SIMT_:
1502 	  break;
1503 
1504 	case OMP_CLAUSE__CACHE_:
1505 	default:
1506 	  gcc_unreachable ();
1507 	}
1508     }
1509 
1510   gcc_checking_assert (!scan_array_reductions
1511 		       || !is_gimple_omp_oacc (ctx->stmt));
1512   if (scan_array_reductions)
1513     {
1514       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1515 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1516 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1517 	  {
1518 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1519 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1520 	  }
1521 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1522 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1523 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1524 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1525 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1526 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1527     }
1528 }
1529 
1530 /* Create a new name for omp child function.  Returns an identifier. */
1531 
1532 static tree
1533 create_omp_child_function_name (bool task_copy)
1534 {
1535   return clone_function_name (current_function_decl,
1536 			      task_copy ? "_omp_cpyfn" : "_omp_fn");
1537 }
1538 
1539 /* Return true if CTX may belong to offloaded code: either if current function
1540    is offloaded, or any enclosing context corresponds to a target region.  */
1541 
1542 static bool
1543 omp_maybe_offloaded_ctx (omp_context *ctx)
1544 {
1545   if (cgraph_node::get (current_function_decl)->offloadable)
1546     return true;
1547   for (; ctx; ctx = ctx->outer)
1548     if (is_gimple_omp_offloaded (ctx->stmt))
1549       return true;
1550   return false;
1551 }
1552 
1553 /* Build a decl for the omp child function.  It'll not contain a body
1554    yet, just the bare decl.  */
1555 
1556 static void
1557 create_omp_child_function (omp_context *ctx, bool task_copy)
1558 {
1559   tree decl, type, name, t;
1560 
1561   name = create_omp_child_function_name (task_copy);
1562   if (task_copy)
1563     type = build_function_type_list (void_type_node, ptr_type_node,
1564 				     ptr_type_node, NULL_TREE);
1565   else
1566     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1567 
1568   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1569 
1570   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1571 		       || !task_copy);
1572   if (!task_copy)
1573     ctx->cb.dst_fn = decl;
1574   else
1575     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1576 
1577   TREE_STATIC (decl) = 1;
1578   TREE_USED (decl) = 1;
1579   DECL_ARTIFICIAL (decl) = 1;
1580   DECL_IGNORED_P (decl) = 0;
1581   TREE_PUBLIC (decl) = 0;
1582   DECL_UNINLINABLE (decl) = 1;
1583   DECL_EXTERNAL (decl) = 0;
1584   DECL_CONTEXT (decl) = NULL_TREE;
1585   DECL_INITIAL (decl) = make_node (BLOCK);
1586   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1587   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1588   /* Remove omp declare simd attribute from the new attributes.  */
1589   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1590     {
1591       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1592 	a = a2;
1593       a = TREE_CHAIN (a);
1594       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1595 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1596 	  *p = TREE_CHAIN (*p);
1597 	else
1598 	  {
1599 	    tree chain = TREE_CHAIN (*p);
1600 	    *p = copy_node (*p);
1601 	    p = &TREE_CHAIN (*p);
1602 	    *p = chain;
1603 	  }
1604     }
1605   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1606     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1607   DECL_FUNCTION_SPECIFIC_TARGET (decl)
1608     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1609   DECL_FUNCTION_VERSIONED (decl)
1610     = DECL_FUNCTION_VERSIONED (current_function_decl);
1611 
1612   if (omp_maybe_offloaded_ctx (ctx))
1613     {
1614       cgraph_node::get_create (decl)->offloadable = 1;
1615       if (ENABLE_OFFLOADING)
1616 	g->have_offload = true;
1617     }
1618 
1619   if (cgraph_node::get_create (decl)->offloadable
1620       && !lookup_attribute ("omp declare target",
1621                            DECL_ATTRIBUTES (current_function_decl)))
1622     {
1623       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1624 				 ? "omp target entrypoint"
1625 				 : "omp declare target");
1626       DECL_ATTRIBUTES (decl)
1627 	= tree_cons (get_identifier (target_attr),
1628 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1629     }
1630 
1631   t = build_decl (DECL_SOURCE_LOCATION (decl),
1632 		  RESULT_DECL, NULL_TREE, void_type_node);
1633   DECL_ARTIFICIAL (t) = 1;
1634   DECL_IGNORED_P (t) = 1;
1635   DECL_CONTEXT (t) = decl;
1636   DECL_RESULT (decl) = t;
1637 
1638   tree data_name = get_identifier (".omp_data_i");
1639   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1640 		  ptr_type_node);
1641   DECL_ARTIFICIAL (t) = 1;
1642   DECL_NAMELESS (t) = 1;
1643   DECL_ARG_TYPE (t) = ptr_type_node;
1644   DECL_CONTEXT (t) = current_function_decl;
1645   TREE_USED (t) = 1;
1646   TREE_READONLY (t) = 1;
1647   DECL_ARGUMENTS (decl) = t;
1648   if (!task_copy)
1649     ctx->receiver_decl = t;
1650   else
1651     {
1652       t = build_decl (DECL_SOURCE_LOCATION (decl),
1653 		      PARM_DECL, get_identifier (".omp_data_o"),
1654 		      ptr_type_node);
1655       DECL_ARTIFICIAL (t) = 1;
1656       DECL_NAMELESS (t) = 1;
1657       DECL_ARG_TYPE (t) = ptr_type_node;
1658       DECL_CONTEXT (t) = current_function_decl;
1659       TREE_USED (t) = 1;
1660       TREE_ADDRESSABLE (t) = 1;
1661       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1662       DECL_ARGUMENTS (decl) = t;
1663     }
1664 
1665   /* Allocate memory for the function structure.  The call to
1666      allocate_struct_function clobbers CFUN, so we need to restore
1667      it afterward.  */
1668   push_struct_function (decl);
1669   cfun->function_end_locus = gimple_location (ctx->stmt);
1670   init_tree_ssa (cfun);
1671   pop_cfun ();
1672 }
1673 
1674 /* Callback for walk_gimple_seq.  Check if combined parallel
1675    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1676 
1677 tree
1678 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1679 		       bool *handled_ops_p,
1680 		       struct walk_stmt_info *wi)
1681 {
1682   gimple *stmt = gsi_stmt (*gsi_p);
1683 
1684   *handled_ops_p = true;
1685   switch (gimple_code (stmt))
1686     {
1687     WALK_SUBSTMTS;
1688 
1689     case GIMPLE_OMP_FOR:
1690       if (gimple_omp_for_combined_into_p (stmt)
1691 	  && gimple_omp_for_kind (stmt)
1692 	     == *(const enum gf_mask *) (wi->info))
1693 	{
1694 	  wi->info = stmt;
1695 	  return integer_zero_node;
1696 	}
1697       break;
1698     default:
1699       break;
1700     }
1701   return NULL;
1702 }
1703 
1704 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task.  */
1705 
1706 static void
1707 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1708 			      omp_context *outer_ctx)
1709 {
1710   struct walk_stmt_info wi;
1711 
1712   memset (&wi, 0, sizeof (wi));
1713   wi.val_only = true;
1714   wi.info = (void *) &msk;
1715   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1716   if (wi.info != (void *) &msk)
1717     {
1718       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1719       struct omp_for_data fd;
1720       omp_extract_for_data (for_stmt, &fd, NULL);
1721       /* We need two temporaries with fd.loop.v type (istart/iend)
1722 	 and then (fd.collapse - 1) temporaries with the same
1723 	 type for count2 ... countN-1 vars if not constant.  */
1724       size_t count = 2, i;
1725       tree type = fd.iter_type;
1726       if (fd.collapse > 1
1727 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1728 	{
1729 	  count += fd.collapse - 1;
1730 	  /* If there are lastprivate clauses on the inner
1731 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1732 	     of iterations (product of count1 ... countN-1).  */
1733 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1734 			       OMP_CLAUSE_LASTPRIVATE))
1735 	    count++;
1736 	  else if (msk == GF_OMP_FOR_KIND_FOR
1737 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1738 				       OMP_CLAUSE_LASTPRIVATE))
1739 	    count++;
1740 	}
1741       for (i = 0; i < count; i++)
1742 	{
1743 	  tree temp = create_tmp_var (type);
1744 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1745 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1746 	  OMP_CLAUSE_DECL (c) = temp;
1747 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1748 	  gimple_omp_taskreg_set_clauses (stmt, c);
1749 	}
1750     }
1751 }
1752 
1753 /* Scan an OpenMP parallel directive.  */
1754 
1755 static void
1756 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1757 {
1758   omp_context *ctx;
1759   tree name;
1760   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1761 
1762   /* Ignore parallel directives with empty bodies, unless there
1763      are copyin clauses.  */
1764   if (optimize > 0
1765       && empty_body_p (gimple_omp_body (stmt))
1766       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1767 			  OMP_CLAUSE_COPYIN) == NULL)
1768     {
1769       gsi_replace (gsi, gimple_build_nop (), false);
1770       return;
1771     }
1772 
1773   if (gimple_omp_parallel_combined_p (stmt))
1774     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1775 
1776   ctx = new_omp_context (stmt, outer_ctx);
1777   taskreg_contexts.safe_push (ctx);
1778   if (taskreg_nesting_level > 1)
1779     ctx->is_nested = true;
1780   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1781   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1782   name = create_tmp_var_name (".omp_data_s");
1783   name = build_decl (gimple_location (stmt),
1784 		     TYPE_DECL, name, ctx->record_type);
1785   DECL_ARTIFICIAL (name) = 1;
1786   DECL_NAMELESS (name) = 1;
1787   TYPE_NAME (ctx->record_type) = name;
1788   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1789   if (!gimple_omp_parallel_grid_phony (stmt))
1790     {
1791       create_omp_child_function (ctx, false);
1792       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1793     }
1794 
1795   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1796   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1797 
1798   if (TYPE_FIELDS (ctx->record_type) == NULL)
1799     ctx->record_type = ctx->receiver_decl = NULL;
1800 }
1801 
1802 /* Scan an OpenMP task directive.  */
1803 
1804 static void
1805 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1806 {
1807   omp_context *ctx;
1808   tree name, t;
1809   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1810 
1811   /* Ignore task directives with empty bodies, unless they have depend
1812      clause.  */
1813   if (optimize > 0
1814       && empty_body_p (gimple_omp_body (stmt))
1815       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1816     {
1817       gsi_replace (gsi, gimple_build_nop (), false);
1818       return;
1819     }
1820 
1821   if (gimple_omp_task_taskloop_p (stmt))
1822     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1823 
1824   ctx = new_omp_context (stmt, outer_ctx);
1825   taskreg_contexts.safe_push (ctx);
1826   if (taskreg_nesting_level > 1)
1827     ctx->is_nested = true;
1828   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1829   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1830   name = create_tmp_var_name (".omp_data_s");
1831   name = build_decl (gimple_location (stmt),
1832 		     TYPE_DECL, name, ctx->record_type);
1833   DECL_ARTIFICIAL (name) = 1;
1834   DECL_NAMELESS (name) = 1;
1835   TYPE_NAME (ctx->record_type) = name;
1836   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1837   create_omp_child_function (ctx, false);
1838   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1839 
1840   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1841 
1842   if (ctx->srecord_type)
1843     {
1844       name = create_tmp_var_name (".omp_data_a");
1845       name = build_decl (gimple_location (stmt),
1846 			 TYPE_DECL, name, ctx->srecord_type);
1847       DECL_ARTIFICIAL (name) = 1;
1848       DECL_NAMELESS (name) = 1;
1849       TYPE_NAME (ctx->srecord_type) = name;
1850       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1851       create_omp_child_function (ctx, true);
1852     }
1853 
1854   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1855 
1856   if (TYPE_FIELDS (ctx->record_type) == NULL)
1857     {
1858       ctx->record_type = ctx->receiver_decl = NULL;
1859       t = build_int_cst (long_integer_type_node, 0);
1860       gimple_omp_task_set_arg_size (stmt, t);
1861       t = build_int_cst (long_integer_type_node, 1);
1862       gimple_omp_task_set_arg_align (stmt, t);
1863     }
1864 }
1865 
1866 /* Helper function for finish_taskreg_scan, called through walk_tree.
1867    If maybe_lookup_decl_in_outer_context returns non-NULL for some
1868    tree, replace it in the expression.  */
1869 
1870 static tree
1871 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1872 {
1873   if (VAR_P (*tp))
1874     {
1875       omp_context *ctx = (omp_context *) data;
1876       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1877       if (t != *tp)
1878 	{
1879 	  if (DECL_HAS_VALUE_EXPR_P (t))
1880 	    t = unshare_expr (DECL_VALUE_EXPR (t));
1881 	  *tp = t;
1882 	}
1883       *walk_subtrees = 0;
1884     }
1885   else if (IS_TYPE_OR_DECL_P (*tp))
1886     *walk_subtrees = 0;
1887   return NULL_TREE;
1888 }
1889 
1890 /* If any decls have been made addressable during scan_omp,
1891    adjust their fields if needed, and layout record types
1892    of parallel/task constructs.  */
1893 
1894 static void
1895 finish_taskreg_scan (omp_context *ctx)
1896 {
1897   if (ctx->record_type == NULL_TREE)
1898     return;
1899 
1900   /* If any task_shared_vars were needed, verify all
1901      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1902      statements if use_pointer_for_field hasn't changed
1903      because of that.  If it did, update field types now.  */
1904   if (task_shared_vars)
1905     {
1906       tree c;
1907 
1908       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1909 	   c; c = OMP_CLAUSE_CHAIN (c))
1910 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1911 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1912 	  {
1913 	    tree decl = OMP_CLAUSE_DECL (c);
1914 
1915 	    /* Global variables don't need to be copied,
1916 	       the receiver side will use them directly.  */
1917 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1918 	      continue;
1919 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1920 		|| !use_pointer_for_field (decl, ctx))
1921 	      continue;
1922 	    tree field = lookup_field (decl, ctx);
1923 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1924 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1925 	      continue;
1926 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1927 	    TREE_THIS_VOLATILE (field) = 0;
1928 	    DECL_USER_ALIGN (field) = 0;
1929 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1930 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1931 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1932 	    if (ctx->srecord_type)
1933 	      {
1934 		tree sfield = lookup_sfield (decl, ctx);
1935 		TREE_TYPE (sfield) = TREE_TYPE (field);
1936 		TREE_THIS_VOLATILE (sfield) = 0;
1937 		DECL_USER_ALIGN (sfield) = 0;
1938 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1939 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1940 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1941 	      }
1942 	  }
1943     }
1944 
1945   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1946     {
1947       layout_type (ctx->record_type);
1948       fixup_child_record_type (ctx);
1949     }
1950   else
1951     {
1952       location_t loc = gimple_location (ctx->stmt);
1953       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1954       /* Move VLA fields to the end.  */
1955       p = &TYPE_FIELDS (ctx->record_type);
1956       while (*p)
1957 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1958 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1959 	  {
1960 	    *q = *p;
1961 	    *p = TREE_CHAIN (*p);
1962 	    TREE_CHAIN (*q) = NULL_TREE;
1963 	    q = &TREE_CHAIN (*q);
1964 	  }
1965 	else
1966 	  p = &DECL_CHAIN (*p);
1967       *p = vla_fields;
1968       if (gimple_omp_task_taskloop_p (ctx->stmt))
1969 	{
1970 	  /* Move fields corresponding to first and second _looptemp_
1971 	     clause first.  There are filled by GOMP_taskloop
1972 	     and thus need to be in specific positions.  */
1973 	  tree c1 = gimple_omp_task_clauses (ctx->stmt);
1974 	  c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1975 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1976 				     OMP_CLAUSE__LOOPTEMP_);
1977 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1978 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1979 	  p = &TYPE_FIELDS (ctx->record_type);
1980 	  while (*p)
1981 	    if (*p == f1 || *p == f2)
1982 	      *p = DECL_CHAIN (*p);
1983 	    else
1984 	      p = &DECL_CHAIN (*p);
1985 	  DECL_CHAIN (f1) = f2;
1986 	  DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
1987 	  TYPE_FIELDS (ctx->record_type) = f1;
1988 	  if (ctx->srecord_type)
1989 	    {
1990 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
1991 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
1992 	      p = &TYPE_FIELDS (ctx->srecord_type);
1993 	      while (*p)
1994 		if (*p == f1 || *p == f2)
1995 		  *p = DECL_CHAIN (*p);
1996 		else
1997 		  p = &DECL_CHAIN (*p);
1998 	      DECL_CHAIN (f1) = f2;
1999 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2000 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2001 	    }
2002 	}
2003       layout_type (ctx->record_type);
2004       fixup_child_record_type (ctx);
2005       if (ctx->srecord_type)
2006 	layout_type (ctx->srecord_type);
2007       tree t = fold_convert_loc (loc, long_integer_type_node,
2008 				 TYPE_SIZE_UNIT (ctx->record_type));
2009       if (TREE_CODE (t) != INTEGER_CST)
2010 	{
2011 	  t = unshare_expr (t);
2012 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2013 	}
2014       gimple_omp_task_set_arg_size (ctx->stmt, t);
2015       t = build_int_cst (long_integer_type_node,
2016 			 TYPE_ALIGN_UNIT (ctx->record_type));
2017       gimple_omp_task_set_arg_align (ctx->stmt, t);
2018     }
2019 }
2020 
2021 /* Find the enclosing offload context.  */
2022 
2023 static omp_context *
2024 enclosing_target_ctx (omp_context *ctx)
2025 {
2026   for (; ctx; ctx = ctx->outer)
2027     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2028       break;
2029 
2030   return ctx;
2031 }
2032 
2033 /* Return true if ctx is part of an oacc kernels region.  */
2034 
2035 static bool
2036 ctx_in_oacc_kernels_region (omp_context *ctx)
2037 {
2038   for (;ctx != NULL; ctx = ctx->outer)
2039     {
2040       gimple *stmt = ctx->stmt;
2041       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2042 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2043 	return true;
2044     }
2045 
2046   return false;
2047 }
2048 
2049 /* Check the parallelism clauses inside a kernels regions.
2050    Until kernels handling moves to use the same loop indirection
2051    scheme as parallel, we need to do this checking early.  */
2052 
2053 static unsigned
2054 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2055 {
2056   bool checking = true;
2057   unsigned outer_mask = 0;
2058   unsigned this_mask = 0;
2059   bool has_seq = false, has_auto = false;
2060 
2061   if (ctx->outer)
2062     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2063   if (!stmt)
2064     {
2065       checking = false;
2066       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2067 	return outer_mask;
2068       stmt = as_a <gomp_for *> (ctx->stmt);
2069     }
2070 
2071   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2072     {
2073       switch (OMP_CLAUSE_CODE (c))
2074 	{
2075 	case OMP_CLAUSE_GANG:
2076 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2077 	  break;
2078 	case OMP_CLAUSE_WORKER:
2079 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2080 	  break;
2081 	case OMP_CLAUSE_VECTOR:
2082 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2083 	  break;
2084 	case OMP_CLAUSE_SEQ:
2085 	  has_seq = true;
2086 	  break;
2087 	case OMP_CLAUSE_AUTO:
2088 	  has_auto = true;
2089 	  break;
2090 	default:
2091 	  break;
2092 	}
2093     }
2094 
2095   if (checking)
2096     {
2097       if (has_seq && (this_mask || has_auto))
2098 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2099 		  " OpenACC loop specifiers");
2100       else if (has_auto && this_mask)
2101 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2102 		  " OpenACC loop specifiers");
2103 
2104       if (this_mask & outer_mask)
2105 	error_at (gimple_location (stmt), "inner loop uses same"
2106 		  " OpenACC parallelism as containing loop");
2107     }
2108 
2109   return outer_mask | this_mask;
2110 }
2111 
2112 /* Scan a GIMPLE_OMP_FOR.  */
2113 
2114 static omp_context *
2115 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2116 {
2117   omp_context *ctx;
2118   size_t i;
2119   tree clauses = gimple_omp_for_clauses (stmt);
2120 
2121   ctx = new_omp_context (stmt, outer_ctx);
2122 
2123   if (is_gimple_omp_oacc (stmt))
2124     {
2125       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2126 
2127       if (!tgt || is_oacc_parallel (tgt))
2128 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2129 	  {
2130 	    char const *check = NULL;
2131 
2132 	    switch (OMP_CLAUSE_CODE (c))
2133 	      {
2134 	      case OMP_CLAUSE_GANG:
2135 		check = "gang";
2136 		break;
2137 
2138 	      case OMP_CLAUSE_WORKER:
2139 		check = "worker";
2140 		break;
2141 
2142 	      case OMP_CLAUSE_VECTOR:
2143 		check = "vector";
2144 		break;
2145 
2146 	      default:
2147 		break;
2148 	      }
2149 
2150 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2151 	      error_at (gimple_location (stmt),
2152 			"argument not permitted on %qs clause in"
2153 			" OpenACC %<parallel%>", check);
2154 	  }
2155 
2156       if (tgt && is_oacc_kernels (tgt))
2157 	{
2158 	  /* Strip out reductions, as they are not  handled yet.  */
2159 	  tree *prev_ptr = &clauses;
2160 
2161 	  while (tree probe = *prev_ptr)
2162 	    {
2163 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2164 
2165 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2166 		*prev_ptr = *next_ptr;
2167 	      else
2168 		prev_ptr = next_ptr;
2169 	    }
2170 
2171 	  gimple_omp_for_set_clauses (stmt, clauses);
2172 	  check_oacc_kernel_gwv (stmt, ctx);
2173 	}
2174     }
2175 
2176   scan_sharing_clauses (clauses, ctx);
2177 
2178   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2179   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2180     {
2181       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2182       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2183       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2184       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2185     }
2186   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2187   return ctx;
2188 }
2189 
2190 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2191 
2192 static void
2193 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2194 	       omp_context *outer_ctx)
2195 {
2196   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2197   gsi_replace (gsi, bind, false);
2198   gimple_seq seq = NULL;
2199   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2200   tree cond = create_tmp_var_raw (integer_type_node);
2201   DECL_CONTEXT (cond) = current_function_decl;
2202   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2203   gimple_bind_set_vars (bind, cond);
2204   gimple_call_set_lhs (g, cond);
2205   gimple_seq_add_stmt (&seq, g);
2206   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2207   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2208   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2209   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2210   gimple_seq_add_stmt (&seq, g);
2211   g = gimple_build_label (lab1);
2212   gimple_seq_add_stmt (&seq, g);
2213   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2214   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2215   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2216   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2217   gimple_omp_for_set_clauses (new_stmt, clause);
2218   gimple_seq_add_stmt (&seq, new_stmt);
2219   g = gimple_build_goto (lab3);
2220   gimple_seq_add_stmt (&seq, g);
2221   g = gimple_build_label (lab2);
2222   gimple_seq_add_stmt (&seq, g);
2223   gimple_seq_add_stmt (&seq, stmt);
2224   g = gimple_build_label (lab3);
2225   gimple_seq_add_stmt (&seq, g);
2226   gimple_bind_set_body (bind, seq);
2227   update_stmt (bind);
2228   scan_omp_for (new_stmt, outer_ctx);
2229   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2230 }
2231 
2232 /* Scan an OpenMP sections directive.  */
2233 
2234 static void
2235 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2236 {
2237   omp_context *ctx;
2238 
2239   ctx = new_omp_context (stmt, outer_ctx);
2240   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2241   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2242 }
2243 
2244 /* Scan an OpenMP single directive.  */
2245 
2246 static void
2247 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2248 {
2249   omp_context *ctx;
2250   tree name;
2251 
2252   ctx = new_omp_context (stmt, outer_ctx);
2253   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2254   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2255   name = create_tmp_var_name (".omp_copy_s");
2256   name = build_decl (gimple_location (stmt),
2257 		     TYPE_DECL, name, ctx->record_type);
2258   TYPE_NAME (ctx->record_type) = name;
2259 
2260   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2261   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2262 
2263   if (TYPE_FIELDS (ctx->record_type) == NULL)
2264     ctx->record_type = NULL;
2265   else
2266     layout_type (ctx->record_type);
2267 }
2268 
2269 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2270    used in the corresponding offloaded function are restrict.  */
2271 
2272 static bool
2273 omp_target_base_pointers_restrict_p (tree clauses)
2274 {
2275   /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2276      used by OpenACC.  */
2277   if (flag_openacc == 0)
2278     return false;
2279 
2280   /* I.  Basic example:
2281 
2282        void foo (void)
2283        {
2284 	 unsigned int a[2], b[2];
2285 
2286 	 #pragma acc kernels \
2287 	   copyout (a) \
2288 	   copyout (b)
2289 	 {
2290 	   a[0] = 0;
2291 	   b[0] = 1;
2292 	 }
2293        }
2294 
2295      After gimplification, we have:
2296 
2297        #pragma omp target oacc_kernels \
2298 	 map(force_from:a [len: 8]) \
2299 	 map(force_from:b [len: 8])
2300        {
2301 	 a[0] = 0;
2302 	 b[0] = 1;
2303        }
2304 
2305      Because both mappings have the force prefix, we know that they will be
2306      allocated when calling the corresponding offloaded function, which means we
2307      can mark the base pointers for a and b in the offloaded function as
2308      restrict.  */
2309 
2310   tree c;
2311   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2312     {
2313       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2314 	return false;
2315 
2316       switch (OMP_CLAUSE_MAP_KIND (c))
2317 	{
2318 	case GOMP_MAP_FORCE_ALLOC:
2319 	case GOMP_MAP_FORCE_TO:
2320 	case GOMP_MAP_FORCE_FROM:
2321 	case GOMP_MAP_FORCE_TOFROM:
2322 	  break;
2323 	default:
2324 	  return false;
2325 	}
2326     }
2327 
2328   return true;
2329 }
2330 
2331 /* Scan a GIMPLE_OMP_TARGET.  */
2332 
2333 static void
2334 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2335 {
2336   omp_context *ctx;
2337   tree name;
2338   bool offloaded = is_gimple_omp_offloaded (stmt);
2339   tree clauses = gimple_omp_target_clauses (stmt);
2340 
2341   ctx = new_omp_context (stmt, outer_ctx);
2342   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2343   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2344   name = create_tmp_var_name (".omp_data_t");
2345   name = build_decl (gimple_location (stmt),
2346 		     TYPE_DECL, name, ctx->record_type);
2347   DECL_ARTIFICIAL (name) = 1;
2348   DECL_NAMELESS (name) = 1;
2349   TYPE_NAME (ctx->record_type) = name;
2350   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2351 
2352   bool base_pointers_restrict = false;
2353   if (offloaded)
2354     {
2355       create_omp_child_function (ctx, false);
2356       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2357 
2358       base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2359       if (base_pointers_restrict
2360 	  && dump_file && (dump_flags & TDF_DETAILS))
2361 	fprintf (dump_file,
2362 		 "Base pointers in offloaded function are restrict\n");
2363     }
2364 
2365   scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2366   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2367 
2368   if (TYPE_FIELDS (ctx->record_type) == NULL)
2369     ctx->record_type = ctx->receiver_decl = NULL;
2370   else
2371     {
2372       TYPE_FIELDS (ctx->record_type)
2373 	= nreverse (TYPE_FIELDS (ctx->record_type));
2374       if (flag_checking)
2375 	{
2376 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2377 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2378 	       field;
2379 	       field = DECL_CHAIN (field))
2380 	    gcc_assert (DECL_ALIGN (field) == align);
2381 	}
2382       layout_type (ctx->record_type);
2383       if (offloaded)
2384 	fixup_child_record_type (ctx);
2385     }
2386 }
2387 
2388 /* Scan an OpenMP teams directive.  */
2389 
2390 static void
2391 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2392 {
2393   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2394   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2395   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2396 }
2397 
2398 /* Check nesting restrictions.  */
2399 static bool
2400 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2401 {
2402   tree c;
2403 
2404   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2405     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2406        the original copy of its contents.  */
2407     return true;
2408 
2409   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2410      inside an OpenACC CTX.  */
2411   if (!(is_gimple_omp (stmt)
2412 	&& is_gimple_omp_oacc (stmt))
2413       /* Except for atomic codes that we share with OpenMP.  */
2414       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2415 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2416     {
2417       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2418 	{
2419 	  error_at (gimple_location (stmt),
2420 		    "non-OpenACC construct inside of OpenACC routine");
2421 	  return false;
2422 	}
2423       else
2424 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2425 	  if (is_gimple_omp (octx->stmt)
2426 	      && is_gimple_omp_oacc (octx->stmt))
2427 	    {
2428 	      error_at (gimple_location (stmt),
2429 			"non-OpenACC construct inside of OpenACC region");
2430 	      return false;
2431 	    }
2432     }
2433 
2434   if (ctx != NULL)
2435     {
2436       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2437 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2438 	{
2439 	  c = NULL_TREE;
2440 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2441 	    {
2442 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2443 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2444 		{
2445 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2446 		      && (ctx->outer == NULL
2447 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2448 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2449 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2450 			      != GF_OMP_FOR_KIND_FOR)
2451 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2452 		    {
2453 		      error_at (gimple_location (stmt),
2454 				"%<ordered simd threads%> must be closely "
2455 				"nested inside of %<for simd%> region");
2456 		      return false;
2457 		    }
2458 		  return true;
2459 		}
2460 	    }
2461 	  error_at (gimple_location (stmt),
2462 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2463 		    " may not be nested inside %<simd%> region");
2464 	  return false;
2465 	}
2466       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2467 	{
2468 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2469 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2470 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2471 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2472 	    {
2473 	      error_at (gimple_location (stmt),
2474 			"only %<distribute%> or %<parallel%> regions are "
2475 			"allowed to be strictly nested inside %<teams%> "
2476 			"region");
2477 	      return false;
2478 	    }
2479 	}
2480     }
2481   switch (gimple_code (stmt))
2482     {
2483     case GIMPLE_OMP_FOR:
2484       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2485 	return true;
2486       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2487 	{
2488 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2489 	    {
2490 	      error_at (gimple_location (stmt),
2491 			"%<distribute%> region must be strictly nested "
2492 			"inside %<teams%> construct");
2493 	      return false;
2494 	    }
2495 	  return true;
2496 	}
2497       /* We split taskloop into task and nested taskloop in it.  */
2498       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2499 	return true;
2500       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2501 	{
2502 	  bool ok = false;
2503 
2504 	  if (ctx)
2505 	    switch (gimple_code (ctx->stmt))
2506 	      {
2507 	      case GIMPLE_OMP_FOR:
2508 		ok = (gimple_omp_for_kind (ctx->stmt)
2509 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2510 		break;
2511 
2512 	      case GIMPLE_OMP_TARGET:
2513 		switch (gimple_omp_target_kind (ctx->stmt))
2514 		  {
2515 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2516 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2517 		    ok = true;
2518 		    break;
2519 
2520 		  default:
2521 		    break;
2522 		  }
2523 
2524 	      default:
2525 		break;
2526 	      }
2527 	  else if (oacc_get_fn_attrib (current_function_decl))
2528 	    ok = true;
2529 	  if (!ok)
2530 	    {
2531 	      error_at (gimple_location (stmt),
2532 			"OpenACC loop directive must be associated with"
2533 			" an OpenACC compute region");
2534 	      return false;
2535 	    }
2536 	}
2537       /* FALLTHRU */
2538     case GIMPLE_CALL:
2539       if (is_gimple_call (stmt)
2540 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2541 	      == BUILT_IN_GOMP_CANCEL
2542 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2543 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2544 	{
2545 	  const char *bad = NULL;
2546 	  const char *kind = NULL;
2547 	  const char *construct
2548 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2549 	       == BUILT_IN_GOMP_CANCEL)
2550 	      ? "#pragma omp cancel"
2551 	      : "#pragma omp cancellation point";
2552 	  if (ctx == NULL)
2553 	    {
2554 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2555 			construct);
2556 	      return false;
2557 	    }
2558 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2559 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2560 		  : 0)
2561 	    {
2562 	    case 1:
2563 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2564 		bad = "#pragma omp parallel";
2565 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2566 		       == BUILT_IN_GOMP_CANCEL
2567 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2568 		ctx->cancellable = true;
2569 	      kind = "parallel";
2570 	      break;
2571 	    case 2:
2572 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2573 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2574 		bad = "#pragma omp for";
2575 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2576 		       == BUILT_IN_GOMP_CANCEL
2577 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2578 		{
2579 		  ctx->cancellable = true;
2580 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2581 				       OMP_CLAUSE_NOWAIT))
2582 		    warning_at (gimple_location (stmt), 0,
2583 				"%<#pragma omp cancel for%> inside "
2584 				"%<nowait%> for construct");
2585 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2586 				       OMP_CLAUSE_ORDERED))
2587 		    warning_at (gimple_location (stmt), 0,
2588 				"%<#pragma omp cancel for%> inside "
2589 				"%<ordered%> for construct");
2590 		}
2591 	      kind = "for";
2592 	      break;
2593 	    case 4:
2594 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2595 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2596 		bad = "#pragma omp sections";
2597 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2598 		       == BUILT_IN_GOMP_CANCEL
2599 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2600 		{
2601 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2602 		    {
2603 		      ctx->cancellable = true;
2604 		      if (omp_find_clause (gimple_omp_sections_clauses
2605 								(ctx->stmt),
2606 					   OMP_CLAUSE_NOWAIT))
2607 			warning_at (gimple_location (stmt), 0,
2608 				    "%<#pragma omp cancel sections%> inside "
2609 				    "%<nowait%> sections construct");
2610 		    }
2611 		  else
2612 		    {
2613 		      gcc_assert (ctx->outer
2614 				  && gimple_code (ctx->outer->stmt)
2615 				     == GIMPLE_OMP_SECTIONS);
2616 		      ctx->outer->cancellable = true;
2617 		      if (omp_find_clause (gimple_omp_sections_clauses
2618 							(ctx->outer->stmt),
2619 					   OMP_CLAUSE_NOWAIT))
2620 			warning_at (gimple_location (stmt), 0,
2621 				    "%<#pragma omp cancel sections%> inside "
2622 				    "%<nowait%> sections construct");
2623 		    }
2624 		}
2625 	      kind = "sections";
2626 	      break;
2627 	    case 8:
2628 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2629 		bad = "#pragma omp task";
2630 	      else
2631 		{
2632 		  for (omp_context *octx = ctx->outer;
2633 		       octx; octx = octx->outer)
2634 		    {
2635 		      switch (gimple_code (octx->stmt))
2636 			{
2637 			case GIMPLE_OMP_TASKGROUP:
2638 			  break;
2639 			case GIMPLE_OMP_TARGET:
2640 			  if (gimple_omp_target_kind (octx->stmt)
2641 			      != GF_OMP_TARGET_KIND_REGION)
2642 			    continue;
2643 			  /* FALLTHRU */
2644 			case GIMPLE_OMP_PARALLEL:
2645 			case GIMPLE_OMP_TEAMS:
2646 			  error_at (gimple_location (stmt),
2647 				    "%<%s taskgroup%> construct not closely "
2648 				    "nested inside of %<taskgroup%> region",
2649 				    construct);
2650 			  return false;
2651 			default:
2652 			  continue;
2653 			}
2654 		      break;
2655 		    }
2656 		  ctx->cancellable = true;
2657 		}
2658 	      kind = "taskgroup";
2659 	      break;
2660 	    default:
2661 	      error_at (gimple_location (stmt), "invalid arguments");
2662 	      return false;
2663 	    }
2664 	  if (bad)
2665 	    {
2666 	      error_at (gimple_location (stmt),
2667 			"%<%s %s%> construct not closely nested inside of %qs",
2668 			construct, kind, bad);
2669 	      return false;
2670 	    }
2671 	}
2672       /* FALLTHRU */
2673     case GIMPLE_OMP_SECTIONS:
2674     case GIMPLE_OMP_SINGLE:
2675       for (; ctx != NULL; ctx = ctx->outer)
2676 	switch (gimple_code (ctx->stmt))
2677 	  {
2678 	  case GIMPLE_OMP_FOR:
2679 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2680 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2681 	      break;
2682 	    /* FALLTHRU */
2683 	  case GIMPLE_OMP_SECTIONS:
2684 	  case GIMPLE_OMP_SINGLE:
2685 	  case GIMPLE_OMP_ORDERED:
2686 	  case GIMPLE_OMP_MASTER:
2687 	  case GIMPLE_OMP_TASK:
2688 	  case GIMPLE_OMP_CRITICAL:
2689 	    if (is_gimple_call (stmt))
2690 	      {
2691 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2692 		    != BUILT_IN_GOMP_BARRIER)
2693 		  return true;
2694 		error_at (gimple_location (stmt),
2695 			  "barrier region may not be closely nested inside "
2696 			  "of work-sharing, %<critical%>, %<ordered%>, "
2697 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2698 			  "region");
2699 		return false;
2700 	      }
2701 	    error_at (gimple_location (stmt),
2702 		      "work-sharing region may not be closely nested inside "
2703 		      "of work-sharing, %<critical%>, %<ordered%>, "
2704 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2705 	    return false;
2706 	  case GIMPLE_OMP_PARALLEL:
2707 	  case GIMPLE_OMP_TEAMS:
2708 	    return true;
2709 	  case GIMPLE_OMP_TARGET:
2710 	    if (gimple_omp_target_kind (ctx->stmt)
2711 		== GF_OMP_TARGET_KIND_REGION)
2712 	      return true;
2713 	    break;
2714 	  default:
2715 	    break;
2716 	  }
2717       break;
2718     case GIMPLE_OMP_MASTER:
2719       for (; ctx != NULL; ctx = ctx->outer)
2720 	switch (gimple_code (ctx->stmt))
2721 	  {
2722 	  case GIMPLE_OMP_FOR:
2723 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2724 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2725 	      break;
2726 	    /* FALLTHRU */
2727 	  case GIMPLE_OMP_SECTIONS:
2728 	  case GIMPLE_OMP_SINGLE:
2729 	  case GIMPLE_OMP_TASK:
2730 	    error_at (gimple_location (stmt),
2731 		      "%<master%> region may not be closely nested inside "
2732 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2733 		      "region");
2734 	    return false;
2735 	  case GIMPLE_OMP_PARALLEL:
2736 	  case GIMPLE_OMP_TEAMS:
2737 	    return true;
2738 	  case GIMPLE_OMP_TARGET:
2739 	    if (gimple_omp_target_kind (ctx->stmt)
2740 		== GF_OMP_TARGET_KIND_REGION)
2741 	      return true;
2742 	    break;
2743 	  default:
2744 	    break;
2745 	  }
2746       break;
2747     case GIMPLE_OMP_TASK:
2748       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2749 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2750 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2751 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2752 	  {
2753 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2754 	    error_at (OMP_CLAUSE_LOCATION (c),
2755 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2756 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2757 	    return false;
2758 	  }
2759       break;
2760     case GIMPLE_OMP_ORDERED:
2761       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2762 	   c; c = OMP_CLAUSE_CHAIN (c))
2763 	{
2764 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2765 	    {
2766 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2767 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2768 	      continue;
2769 	    }
2770 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2771 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2772 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2773 	    {
2774 	      tree oclause;
2775 	      /* Look for containing ordered(N) loop.  */
2776 	      if (ctx == NULL
2777 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2778 		  || (oclause
2779 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2780 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2781 		{
2782 		  error_at (OMP_CLAUSE_LOCATION (c),
2783 			    "%<ordered%> construct with %<depend%> clause "
2784 			    "must be closely nested inside an %<ordered%> "
2785 			    "loop");
2786 		  return false;
2787 		}
2788 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2789 		{
2790 		  error_at (OMP_CLAUSE_LOCATION (c),
2791 			    "%<ordered%> construct with %<depend%> clause "
2792 			    "must be closely nested inside a loop with "
2793 			    "%<ordered%> clause with a parameter");
2794 		  return false;
2795 		}
2796 	    }
2797 	  else
2798 	    {
2799 	      error_at (OMP_CLAUSE_LOCATION (c),
2800 			"invalid depend kind in omp %<ordered%> %<depend%>");
2801 	      return false;
2802 	    }
2803 	}
2804       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2805       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2806 	{
2807 	  /* ordered simd must be closely nested inside of simd region,
2808 	     and simd region must not encounter constructs other than
2809 	     ordered simd, therefore ordered simd may be either orphaned,
2810 	     or ctx->stmt must be simd.  The latter case is handled already
2811 	     earlier.  */
2812 	  if (ctx != NULL)
2813 	    {
2814 	      error_at (gimple_location (stmt),
2815 			"%<ordered%> %<simd%> must be closely nested inside "
2816 			"%<simd%> region");
2817 	      return false;
2818 	    }
2819 	}
2820       for (; ctx != NULL; ctx = ctx->outer)
2821 	switch (gimple_code (ctx->stmt))
2822 	  {
2823 	  case GIMPLE_OMP_CRITICAL:
2824 	  case GIMPLE_OMP_TASK:
2825 	  case GIMPLE_OMP_ORDERED:
2826 	  ordered_in_taskloop:
2827 	    error_at (gimple_location (stmt),
2828 		      "%<ordered%> region may not be closely nested inside "
2829 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2830 		      "%<taskloop%> region");
2831 	    return false;
2832 	  case GIMPLE_OMP_FOR:
2833 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2834 	      goto ordered_in_taskloop;
2835 	    if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2836 				 OMP_CLAUSE_ORDERED) == NULL)
2837 	      {
2838 		error_at (gimple_location (stmt),
2839 			  "%<ordered%> region must be closely nested inside "
2840 			  "a loop region with an %<ordered%> clause");
2841 		return false;
2842 	      }
2843 	    return true;
2844 	  case GIMPLE_OMP_TARGET:
2845 	    if (gimple_omp_target_kind (ctx->stmt)
2846 		!= GF_OMP_TARGET_KIND_REGION)
2847 	      break;
2848 	    /* FALLTHRU */
2849 	  case GIMPLE_OMP_PARALLEL:
2850 	  case GIMPLE_OMP_TEAMS:
2851 	    error_at (gimple_location (stmt),
2852 		      "%<ordered%> region must be closely nested inside "
2853 		      "a loop region with an %<ordered%> clause");
2854 	    return false;
2855 	  default:
2856 	    break;
2857 	  }
2858       break;
2859     case GIMPLE_OMP_CRITICAL:
2860       {
2861 	tree this_stmt_name
2862 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2863 	for (; ctx != NULL; ctx = ctx->outer)
2864 	  if (gomp_critical *other_crit
2865 	        = dyn_cast <gomp_critical *> (ctx->stmt))
2866 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
2867 	      {
2868 		error_at (gimple_location (stmt),
2869 			  "%<critical%> region may not be nested inside "
2870 			   "a %<critical%> region with the same name");
2871 		return false;
2872 	      }
2873       }
2874       break;
2875     case GIMPLE_OMP_TEAMS:
2876       if (ctx == NULL
2877 	  || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2878 	  || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2879 	{
2880 	  error_at (gimple_location (stmt),
2881 		    "%<teams%> construct not closely nested inside of "
2882 		    "%<target%> construct");
2883 	  return false;
2884 	}
2885       break;
2886     case GIMPLE_OMP_TARGET:
2887       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2888 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2889 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2890 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2891 	  {
2892 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2893 	    error_at (OMP_CLAUSE_LOCATION (c),
2894 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2895 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2896 	    return false;
2897 	  }
2898       if (is_gimple_omp_offloaded (stmt)
2899 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
2900 	{
2901 	  error_at (gimple_location (stmt),
2902 		    "OpenACC region inside of OpenACC routine, nested "
2903 		    "parallelism not supported yet");
2904 	  return false;
2905 	}
2906       for (; ctx != NULL; ctx = ctx->outer)
2907 	{
2908 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2909 	    {
2910 	      if (is_gimple_omp (stmt)
2911 		  && is_gimple_omp_oacc (stmt)
2912 		  && is_gimple_omp (ctx->stmt))
2913 		{
2914 		  error_at (gimple_location (stmt),
2915 			    "OpenACC construct inside of non-OpenACC region");
2916 		  return false;
2917 		}
2918 	      continue;
2919 	    }
2920 
2921 	  const char *stmt_name, *ctx_stmt_name;
2922 	  switch (gimple_omp_target_kind (stmt))
2923 	    {
2924 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2925 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2926 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2927 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
2928 	      stmt_name = "target enter data"; break;
2929 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
2930 	      stmt_name = "target exit data"; break;
2931 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2932 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2933 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2934 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2935 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2936 	      stmt_name = "enter/exit data"; break;
2937 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2938 	      break;
2939 	    default: gcc_unreachable ();
2940 	    }
2941 	  switch (gimple_omp_target_kind (ctx->stmt))
2942 	    {
2943 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2944 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2945 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2946 	      ctx_stmt_name = "parallel"; break;
2947 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
2948 	      ctx_stmt_name = "kernels"; break;
2949 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2950 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2951 	      ctx_stmt_name = "host_data"; break;
2952 	    default: gcc_unreachable ();
2953 	    }
2954 
2955 	  /* OpenACC/OpenMP mismatch?  */
2956 	  if (is_gimple_omp_oacc (stmt)
2957 	      != is_gimple_omp_oacc (ctx->stmt))
2958 	    {
2959 	      error_at (gimple_location (stmt),
2960 			"%s %qs construct inside of %s %qs region",
2961 			(is_gimple_omp_oacc (stmt)
2962 			 ? "OpenACC" : "OpenMP"), stmt_name,
2963 			(is_gimple_omp_oacc (ctx->stmt)
2964 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2965 	      return false;
2966 	    }
2967 	  if (is_gimple_omp_offloaded (ctx->stmt))
2968 	    {
2969 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
2970 	      if (is_gimple_omp_oacc (ctx->stmt))
2971 		{
2972 		  error_at (gimple_location (stmt),
2973 			    "%qs construct inside of %qs region",
2974 			    stmt_name, ctx_stmt_name);
2975 		  return false;
2976 		}
2977 	      else
2978 		{
2979 		  warning_at (gimple_location (stmt), 0,
2980 			      "%qs construct inside of %qs region",
2981 			      stmt_name, ctx_stmt_name);
2982 		}
2983 	    }
2984 	}
2985       break;
2986     default:
2987       break;
2988     }
2989   return true;
2990 }
2991 
2992 
2993 /* Helper function scan_omp.
2994 
2995    Callback for walk_tree or operators in walk_gimple_stmt used to
2996    scan for OMP directives in TP.  */
2997 
2998 static tree
2999 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3000 {
3001   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3002   omp_context *ctx = (omp_context *) wi->info;
3003   tree t = *tp;
3004 
3005   switch (TREE_CODE (t))
3006     {
3007     case VAR_DECL:
3008     case PARM_DECL:
3009     case LABEL_DECL:
3010     case RESULT_DECL:
3011       if (ctx)
3012 	{
3013 	  tree repl = remap_decl (t, &ctx->cb);
3014 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3015 	  *tp = repl;
3016 	}
3017       break;
3018 
3019     default:
3020       if (ctx && TYPE_P (t))
3021 	*tp = remap_type (t, &ctx->cb);
3022       else if (!DECL_P (t))
3023 	{
3024 	  *walk_subtrees = 1;
3025 	  if (ctx)
3026 	    {
3027 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3028 	      if (tem != TREE_TYPE (t))
3029 		{
3030 		  if (TREE_CODE (t) == INTEGER_CST)
3031 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3032 		  else
3033 		    TREE_TYPE (t) = tem;
3034 		}
3035 	    }
3036 	}
3037       break;
3038     }
3039 
3040   return NULL_TREE;
3041 }
3042 
3043 /* Return true if FNDECL is a setjmp or a longjmp.  */
3044 
3045 static bool
3046 setjmp_or_longjmp_p (const_tree fndecl)
3047 {
3048   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3049       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3050 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3051     return true;
3052 
3053   tree declname = DECL_NAME (fndecl);
3054   if (!declname)
3055     return false;
3056   const char *name = IDENTIFIER_POINTER (declname);
3057   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3058 }
3059 
3060 
3061 /* Helper function for scan_omp.
3062 
3063    Callback for walk_gimple_stmt used to scan for OMP directives in
3064    the current statement in GSI.  */
3065 
3066 static tree
3067 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3068 		 struct walk_stmt_info *wi)
3069 {
3070   gimple *stmt = gsi_stmt (*gsi);
3071   omp_context *ctx = (omp_context *) wi->info;
3072 
3073   if (gimple_has_location (stmt))
3074     input_location = gimple_location (stmt);
3075 
3076   /* Check the nesting restrictions.  */
3077   bool remove = false;
3078   if (is_gimple_omp (stmt))
3079     remove = !check_omp_nesting_restrictions (stmt, ctx);
3080   else if (is_gimple_call (stmt))
3081     {
3082       tree fndecl = gimple_call_fndecl (stmt);
3083       if (fndecl)
3084 	{
3085 	  if (setjmp_or_longjmp_p (fndecl)
3086 	      && ctx
3087 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3088 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3089 	    {
3090 	      remove = true;
3091 	      error_at (gimple_location (stmt),
3092 			"setjmp/longjmp inside simd construct");
3093 	    }
3094 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3095 	    switch (DECL_FUNCTION_CODE (fndecl))
3096 	      {
3097 	      case BUILT_IN_GOMP_BARRIER:
3098 	      case BUILT_IN_GOMP_CANCEL:
3099 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3100 	      case BUILT_IN_GOMP_TASKYIELD:
3101 	      case BUILT_IN_GOMP_TASKWAIT:
3102 	      case BUILT_IN_GOMP_TASKGROUP_START:
3103 	      case BUILT_IN_GOMP_TASKGROUP_END:
3104 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3105 		break;
3106 	      default:
3107 		break;
3108 	      }
3109 	}
3110     }
3111   if (remove)
3112     {
3113       stmt = gimple_build_nop ();
3114       gsi_replace (gsi, stmt, false);
3115     }
3116 
3117   *handled_ops_p = true;
3118 
3119   switch (gimple_code (stmt))
3120     {
3121     case GIMPLE_OMP_PARALLEL:
3122       taskreg_nesting_level++;
3123       scan_omp_parallel (gsi, ctx);
3124       taskreg_nesting_level--;
3125       break;
3126 
3127     case GIMPLE_OMP_TASK:
3128       taskreg_nesting_level++;
3129       scan_omp_task (gsi, ctx);
3130       taskreg_nesting_level--;
3131       break;
3132 
3133     case GIMPLE_OMP_FOR:
3134       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3135 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3136 	  && omp_maybe_offloaded_ctx (ctx)
3137 	  && omp_max_simt_vf ())
3138 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3139       else
3140 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3141       break;
3142 
3143     case GIMPLE_OMP_SECTIONS:
3144       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3145       break;
3146 
3147     case GIMPLE_OMP_SINGLE:
3148       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3149       break;
3150 
3151     case GIMPLE_OMP_SECTION:
3152     case GIMPLE_OMP_MASTER:
3153     case GIMPLE_OMP_TASKGROUP:
3154     case GIMPLE_OMP_ORDERED:
3155     case GIMPLE_OMP_CRITICAL:
3156     case GIMPLE_OMP_GRID_BODY:
3157       ctx = new_omp_context (stmt, ctx);
3158       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3159       break;
3160 
3161     case GIMPLE_OMP_TARGET:
3162       scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3163       break;
3164 
3165     case GIMPLE_OMP_TEAMS:
3166       scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3167       break;
3168 
3169     case GIMPLE_BIND:
3170       {
3171 	tree var;
3172 
3173 	*handled_ops_p = false;
3174 	if (ctx)
3175 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3176 	       var ;
3177 	       var = DECL_CHAIN (var))
3178 	    insert_decl_map (&ctx->cb, var, var);
3179       }
3180       break;
3181     default:
3182       *handled_ops_p = false;
3183       break;
3184     }
3185 
3186   return NULL_TREE;
3187 }
3188 
3189 
3190 /* Scan all the statements starting at the current statement.  CTX
3191    contains context information about the OMP directives and
3192    clauses found during the scan.  */
3193 
3194 static void
3195 scan_omp (gimple_seq *body_p, omp_context *ctx)
3196 {
3197   location_t saved_location;
3198   struct walk_stmt_info wi;
3199 
3200   memset (&wi, 0, sizeof (wi));
3201   wi.info = ctx;
3202   wi.want_locations = true;
3203 
3204   saved_location = input_location;
3205   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3206   input_location = saved_location;
3207 }
3208 
3209 /* Re-gimplification and code generation routines.  */
3210 
3211 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3212    of BIND if in a method.  */
3213 
3214 static void
3215 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3216 {
3217   if (DECL_ARGUMENTS (current_function_decl)
3218       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3219       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3220 	  == POINTER_TYPE))
3221     {
3222       tree vars = gimple_bind_vars (bind);
3223       for (tree *pvar = &vars; *pvar; )
3224 	if (omp_member_access_dummy_var (*pvar))
3225 	  *pvar = DECL_CHAIN (*pvar);
3226 	else
3227 	  pvar = &DECL_CHAIN (*pvar);
3228       gimple_bind_set_vars (bind, vars);
3229     }
3230 }
3231 
3232 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3233    block and its subblocks.  */
3234 
3235 static void
3236 remove_member_access_dummy_vars (tree block)
3237 {
3238   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3239     if (omp_member_access_dummy_var (*pvar))
3240       *pvar = DECL_CHAIN (*pvar);
3241     else
3242       pvar = &DECL_CHAIN (*pvar);
3243 
3244   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3245     remove_member_access_dummy_vars (block);
3246 }
3247 
3248 /* If a context was created for STMT when it was scanned, return it.  */
3249 
3250 static omp_context *
3251 maybe_lookup_ctx (gimple *stmt)
3252 {
3253   splay_tree_node n;
3254   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3255   return n ? (omp_context *) n->value : NULL;
3256 }
3257 
3258 
3259 /* Find the mapping for DECL in CTX or the immediately enclosing
3260    context that has a mapping for DECL.
3261 
3262    If CTX is a nested parallel directive, we may have to use the decl
3263    mappings created in CTX's parent context.  Suppose that we have the
3264    following parallel nesting (variable UIDs showed for clarity):
3265 
3266 	iD.1562 = 0;
3267      	#omp parallel shared(iD.1562)		-> outer parallel
3268 	  iD.1562 = iD.1562 + 1;
3269 
3270 	  #omp parallel shared (iD.1562)	-> inner parallel
3271 	     iD.1562 = iD.1562 - 1;
3272 
3273    Each parallel structure will create a distinct .omp_data_s structure
3274    for copying iD.1562 in/out of the directive:
3275 
3276   	outer parallel		.omp_data_s.1.i -> iD.1562
3277 	inner parallel		.omp_data_s.2.i -> iD.1562
3278 
3279    A shared variable mapping will produce a copy-out operation before
3280    the parallel directive and a copy-in operation after it.  So, in
3281    this case we would have:
3282 
3283   	iD.1562 = 0;
3284 	.omp_data_o.1.i = iD.1562;
3285 	#omp parallel shared(iD.1562)		-> outer parallel
3286 	  .omp_data_i.1 = &.omp_data_o.1
3287 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3288 
3289 	  .omp_data_o.2.i = iD.1562;		-> **
3290 	  #omp parallel shared(iD.1562)		-> inner parallel
3291 	    .omp_data_i.2 = &.omp_data_o.2
3292 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3293 
3294 
3295     ** This is a problem.  The symbol iD.1562 cannot be referenced
3296        inside the body of the outer parallel region.  But since we are
3297        emitting this copy operation while expanding the inner parallel
3298        directive, we need to access the CTX structure of the outer
3299        parallel directive to get the correct mapping:
3300 
3301 	  .omp_data_o.2.i = .omp_data_i.1->i
3302 
3303     Since there may be other workshare or parallel directives enclosing
3304     the parallel directive, it may be necessary to walk up the context
3305     parent chain.  This is not a problem in general because nested
3306     parallelism happens only rarely.  */
3307 
3308 static tree
3309 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3310 {
3311   tree t;
3312   omp_context *up;
3313 
3314   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3315     t = maybe_lookup_decl (decl, up);
3316 
3317   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3318 
3319   return t ? t : decl;
3320 }
3321 
3322 
3323 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3324    in outer contexts.  */
3325 
3326 static tree
3327 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3328 {
3329   tree t = NULL;
3330   omp_context *up;
3331 
3332   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3333     t = maybe_lookup_decl (decl, up);
3334 
3335   return t ? t : decl;
3336 }
3337 
3338 
3339 /* Construct the initialization value for reduction operation OP.  */
3340 
3341 tree
3342 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3343 {
3344   switch (op)
3345     {
3346     case PLUS_EXPR:
3347     case MINUS_EXPR:
3348     case BIT_IOR_EXPR:
3349     case BIT_XOR_EXPR:
3350     case TRUTH_OR_EXPR:
3351     case TRUTH_ORIF_EXPR:
3352     case TRUTH_XOR_EXPR:
3353     case NE_EXPR:
3354       return build_zero_cst (type);
3355 
3356     case MULT_EXPR:
3357     case TRUTH_AND_EXPR:
3358     case TRUTH_ANDIF_EXPR:
3359     case EQ_EXPR:
3360       return fold_convert_loc (loc, type, integer_one_node);
3361 
3362     case BIT_AND_EXPR:
3363       return fold_convert_loc (loc, type, integer_minus_one_node);
3364 
3365     case MAX_EXPR:
3366       if (SCALAR_FLOAT_TYPE_P (type))
3367 	{
3368 	  REAL_VALUE_TYPE max, min;
3369 	  if (HONOR_INFINITIES (type))
3370 	    {
3371 	      real_inf (&max);
3372 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3373 	    }
3374 	  else
3375 	    real_maxval (&min, 1, TYPE_MODE (type));
3376 	  return build_real (type, min);
3377 	}
3378       else if (POINTER_TYPE_P (type))
3379 	{
3380 	  wide_int min
3381 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3382 	  return wide_int_to_tree (type, min);
3383 	}
3384       else
3385 	{
3386 	  gcc_assert (INTEGRAL_TYPE_P (type));
3387 	  return TYPE_MIN_VALUE (type);
3388 	}
3389 
3390     case MIN_EXPR:
3391       if (SCALAR_FLOAT_TYPE_P (type))
3392 	{
3393 	  REAL_VALUE_TYPE max;
3394 	  if (HONOR_INFINITIES (type))
3395 	    real_inf (&max);
3396 	  else
3397 	    real_maxval (&max, 0, TYPE_MODE (type));
3398 	  return build_real (type, max);
3399 	}
3400       else if (POINTER_TYPE_P (type))
3401 	{
3402 	  wide_int max
3403 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3404 	  return wide_int_to_tree (type, max);
3405 	}
3406       else
3407 	{
3408 	  gcc_assert (INTEGRAL_TYPE_P (type));
3409 	  return TYPE_MAX_VALUE (type);
3410 	}
3411 
3412     default:
3413       gcc_unreachable ();
3414     }
3415 }
3416 
3417 /* Construct the initialization value for reduction CLAUSE.  */
3418 
3419 tree
3420 omp_reduction_init (tree clause, tree type)
3421 {
3422   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3423 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3424 }
3425 
3426 /* Return alignment to be assumed for var in CLAUSE, which should be
3427    OMP_CLAUSE_ALIGNED.  */
3428 
3429 static tree
3430 omp_clause_aligned_alignment (tree clause)
3431 {
3432   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3433     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3434 
3435   /* Otherwise return implementation defined alignment.  */
3436   unsigned int al = 1;
3437   opt_scalar_mode mode_iter;
3438   auto_vector_sizes sizes;
3439   targetm.vectorize.autovectorize_vector_sizes (&sizes);
3440   poly_uint64 vs = 0;
3441   for (unsigned int i = 0; i < sizes.length (); ++i)
3442     vs = ordered_max (vs, sizes[i]);
3443   static enum mode_class classes[]
3444     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3445   for (int i = 0; i < 4; i += 2)
3446     /* The for loop above dictates that we only walk through scalar classes.  */
3447     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3448       {
3449 	scalar_mode mode = mode_iter.require ();
3450 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3451 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3452 	  continue;
3453 	while (maybe_ne (vs, 0U)
3454 	       && known_lt (GET_MODE_SIZE (vmode), vs)
3455 	       && GET_MODE_2XWIDER_MODE (vmode).exists ())
3456 	  vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3457 
3458 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3459 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3460 	  continue;
3461 	poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3462 				       GET_MODE_SIZE (mode));
3463 	type = build_vector_type (type, nelts);
3464 	if (TYPE_MODE (type) != vmode)
3465 	  continue;
3466 	if (TYPE_ALIGN_UNIT (type) > al)
3467 	  al = TYPE_ALIGN_UNIT (type);
3468       }
3469   return build_int_cst (integer_type_node, al);
3470 }
3471 
3472 
3473 /* This structure is part of the interface between lower_rec_simd_input_clauses
3474    and lower_rec_input_clauses.  */
3475 
3476 struct omplow_simd_context {
3477   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3478   tree idx;
3479   tree lane;
3480   vec<tree, va_heap> simt_eargs;
3481   gimple_seq simt_dlist;
3482   poly_uint64_pod max_vf;
3483   bool is_simt;
3484 };
3485 
3486 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3487    privatization.  */
3488 
3489 static bool
3490 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3491 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3492 {
3493   if (known_eq (sctx->max_vf, 0U))
3494     {
3495       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3496       if (maybe_gt (sctx->max_vf, 1U))
3497 	{
3498 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3499 				    OMP_CLAUSE_SAFELEN);
3500 	  if (c)
3501 	    {
3502 	      poly_uint64 safe_len;
3503 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3504 		  || maybe_lt (safe_len, 1U))
3505 		sctx->max_vf = 1;
3506 	      else
3507 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3508 	    }
3509 	}
3510       if (maybe_gt (sctx->max_vf, 1U))
3511 	{
3512 	  sctx->idx = create_tmp_var (unsigned_type_node);
3513 	  sctx->lane = create_tmp_var (unsigned_type_node);
3514 	}
3515     }
3516   if (known_eq (sctx->max_vf, 1U))
3517     return false;
3518 
3519   if (sctx->is_simt)
3520     {
3521       if (is_gimple_reg (new_var))
3522 	{
3523 	  ivar = lvar = new_var;
3524 	  return true;
3525 	}
3526       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3527       ivar = lvar = create_tmp_var (type);
3528       TREE_ADDRESSABLE (ivar) = 1;
3529       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3530 					  NULL, DECL_ATTRIBUTES (ivar));
3531       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3532       tree clobber = build_constructor (type, NULL);
3533       TREE_THIS_VOLATILE (clobber) = 1;
3534       gimple *g = gimple_build_assign (ivar, clobber);
3535       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3536     }
3537   else
3538     {
3539       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3540       tree avar = create_tmp_var_raw (atype);
3541       if (TREE_ADDRESSABLE (new_var))
3542 	TREE_ADDRESSABLE (avar) = 1;
3543       DECL_ATTRIBUTES (avar)
3544 	= tree_cons (get_identifier ("omp simd array"), NULL,
3545 		     DECL_ATTRIBUTES (avar));
3546       gimple_add_tmp_var (avar);
3547       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3548 		     NULL_TREE, NULL_TREE);
3549       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3550 		     NULL_TREE, NULL_TREE);
3551     }
3552   if (DECL_P (new_var))
3553     {
3554       SET_DECL_VALUE_EXPR (new_var, lvar);
3555       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3556     }
3557   return true;
3558 }
3559 
3560 /* Helper function of lower_rec_input_clauses.  For a reference
3561    in simd reduction, add an underlying variable it will reference.  */
3562 
3563 static void
3564 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3565 {
3566   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3567   if (TREE_CONSTANT (z))
3568     {
3569       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3570 			      get_name (new_vard));
3571       gimple_add_tmp_var (z);
3572       TREE_ADDRESSABLE (z) = 1;
3573       z = build_fold_addr_expr_loc (loc, z);
3574       gimplify_assign (new_vard, z, ilist);
3575     }
3576 }
3577 
3578 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3579    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3580    private variables.  Initialization statements go in ILIST, while calls
3581    to destructors go in DLIST.  */
3582 
3583 static void
3584 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3585 			 omp_context *ctx, struct omp_for_data *fd)
3586 {
3587   tree c, dtor, copyin_seq, x, ptr;
3588   bool copyin_by_ref = false;
3589   bool lastprivate_firstprivate = false;
3590   bool reduction_omp_orig_ref = false;
3591   int pass;
3592   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3593 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3594   omplow_simd_context sctx = omplow_simd_context ();
3595   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3596   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3597   gimple_seq llist[3] = { };
3598 
3599   copyin_seq = NULL;
3600   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3601 
3602   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3603      with data sharing clauses referencing variable sized vars.  That
3604      is unnecessarily hard to support and very unlikely to result in
3605      vectorized code anyway.  */
3606   if (is_simd)
3607     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3608       switch (OMP_CLAUSE_CODE (c))
3609 	{
3610 	case OMP_CLAUSE_LINEAR:
3611 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3612 	    sctx.max_vf = 1;
3613 	  /* FALLTHRU */
3614 	case OMP_CLAUSE_PRIVATE:
3615 	case OMP_CLAUSE_FIRSTPRIVATE:
3616 	case OMP_CLAUSE_LASTPRIVATE:
3617 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3618 	    sctx.max_vf = 1;
3619 	  break;
3620 	case OMP_CLAUSE_REDUCTION:
3621 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3622 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3623 	    sctx.max_vf = 1;
3624 	  break;
3625 	default:
3626 	  continue;
3627 	}
3628 
3629   /* Add a placeholder for simduid.  */
3630   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3631     sctx.simt_eargs.safe_push (NULL_TREE);
3632 
3633   /* Do all the fixed sized types in the first pass, and the variable sized
3634      types in the second pass.  This makes sure that the scalar arguments to
3635      the variable sized types are processed before we use them in the
3636      variable sized operations.  */
3637   for (pass = 0; pass < 2; ++pass)
3638     {
3639       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3640 	{
3641 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3642 	  tree var, new_var;
3643 	  bool by_ref;
3644 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3645 
3646 	  switch (c_kind)
3647 	    {
3648 	    case OMP_CLAUSE_PRIVATE:
3649 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3650 		continue;
3651 	      break;
3652 	    case OMP_CLAUSE_SHARED:
3653 	      /* Ignore shared directives in teams construct.  */
3654 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3655 		continue;
3656 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3657 		{
3658 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3659 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3660 		  continue;
3661 		}
3662 	    case OMP_CLAUSE_FIRSTPRIVATE:
3663 	    case OMP_CLAUSE_COPYIN:
3664 	      break;
3665 	    case OMP_CLAUSE_LINEAR:
3666 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3667 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3668 		lastprivate_firstprivate = true;
3669 	      break;
3670 	    case OMP_CLAUSE_REDUCTION:
3671 	      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3672 		reduction_omp_orig_ref = true;
3673 	      break;
3674 	    case OMP_CLAUSE__LOOPTEMP_:
3675 	      /* Handle _looptemp_ clauses only on parallel/task.  */
3676 	      if (fd)
3677 		continue;
3678 	      break;
3679 	    case OMP_CLAUSE_LASTPRIVATE:
3680 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3681 		{
3682 		  lastprivate_firstprivate = true;
3683 		  if (pass != 0 || is_taskloop_ctx (ctx))
3684 		    continue;
3685 		}
3686 	      /* Even without corresponding firstprivate, if
3687 		 decl is Fortran allocatable, it needs outer var
3688 		 reference.  */
3689 	      else if (pass == 0
3690 		       && lang_hooks.decls.omp_private_outer_ref
3691 							(OMP_CLAUSE_DECL (c)))
3692 		lastprivate_firstprivate = true;
3693 	      break;
3694 	    case OMP_CLAUSE_ALIGNED:
3695 	      if (pass == 0)
3696 		continue;
3697 	      var = OMP_CLAUSE_DECL (c);
3698 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3699 		  && !is_global_var (var))
3700 		{
3701 		  new_var = maybe_lookup_decl (var, ctx);
3702 		  if (new_var == NULL_TREE)
3703 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3704 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3705 		  tree alarg = omp_clause_aligned_alignment (c);
3706 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3707 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3708 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3709 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3710 		  gimplify_and_add (x, ilist);
3711 		}
3712 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3713 		       && is_global_var (var))
3714 		{
3715 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3716 		  new_var = lookup_decl (var, ctx);
3717 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3718 		  t = build_fold_addr_expr_loc (clause_loc, t);
3719 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3720 		  tree alarg = omp_clause_aligned_alignment (c);
3721 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3722 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3723 		  t = fold_convert_loc (clause_loc, ptype, t);
3724 		  x = create_tmp_var (ptype);
3725 		  t = build2 (MODIFY_EXPR, ptype, x, t);
3726 		  gimplify_and_add (t, ilist);
3727 		  t = build_simple_mem_ref_loc (clause_loc, x);
3728 		  SET_DECL_VALUE_EXPR (new_var, t);
3729 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3730 		}
3731 	      continue;
3732 	    default:
3733 	      continue;
3734 	    }
3735 
3736 	  new_var = var = OMP_CLAUSE_DECL (c);
3737 	  if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3738 	    {
3739 	      var = TREE_OPERAND (var, 0);
3740 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3741 		var = TREE_OPERAND (var, 0);
3742 	      if (TREE_CODE (var) == INDIRECT_REF
3743 		  || TREE_CODE (var) == ADDR_EXPR)
3744 		var = TREE_OPERAND (var, 0);
3745 	      if (is_variable_sized (var))
3746 		{
3747 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3748 		  var = DECL_VALUE_EXPR (var);
3749 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3750 		  var = TREE_OPERAND (var, 0);
3751 		  gcc_assert (DECL_P (var));
3752 		}
3753 	      new_var = var;
3754 	    }
3755 	  if (c_kind != OMP_CLAUSE_COPYIN)
3756 	    new_var = lookup_decl (var, ctx);
3757 
3758 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3759 	    {
3760 	      if (pass != 0)
3761 		continue;
3762 	    }
3763 	  /* C/C++ array section reductions.  */
3764 	  else if (c_kind == OMP_CLAUSE_REDUCTION
3765 		   && var != OMP_CLAUSE_DECL (c))
3766 	    {
3767 	      if (pass == 0)
3768 		continue;
3769 
3770 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3771 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3772 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3773 		{
3774 		  tree b = TREE_OPERAND (orig_var, 1);
3775 		  b = maybe_lookup_decl (b, ctx);
3776 		  if (b == NULL)
3777 		    {
3778 		      b = TREE_OPERAND (orig_var, 1);
3779 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3780 		    }
3781 		  if (integer_zerop (bias))
3782 		    bias = b;
3783 		  else
3784 		    {
3785 		      bias = fold_convert_loc (clause_loc,
3786 					       TREE_TYPE (b), bias);
3787 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3788 					      TREE_TYPE (b), b, bias);
3789 		    }
3790 		  orig_var = TREE_OPERAND (orig_var, 0);
3791 		}
3792 	      if (TREE_CODE (orig_var) == INDIRECT_REF
3793 		  || TREE_CODE (orig_var) == ADDR_EXPR)
3794 		orig_var = TREE_OPERAND (orig_var, 0);
3795 	      tree d = OMP_CLAUSE_DECL (c);
3796 	      tree type = TREE_TYPE (d);
3797 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3798 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3799 	      const char *name = get_name (orig_var);
3800 	      if (TREE_CONSTANT (v))
3801 		{
3802 		  x = create_tmp_var_raw (type, name);
3803 		  gimple_add_tmp_var (x);
3804 		  TREE_ADDRESSABLE (x) = 1;
3805 		  x = build_fold_addr_expr_loc (clause_loc, x);
3806 		}
3807 	      else
3808 		{
3809 		  tree atmp
3810 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3811 		  tree t = maybe_lookup_decl (v, ctx);
3812 		  if (t)
3813 		    v = t;
3814 		  else
3815 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3816 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3817 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
3818 				       TREE_TYPE (v), v,
3819 				       build_int_cst (TREE_TYPE (v), 1));
3820 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
3821 				       TREE_TYPE (v), t,
3822 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
3823 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3824 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3825 		}
3826 
3827 	      tree ptype = build_pointer_type (TREE_TYPE (type));
3828 	      x = fold_convert_loc (clause_loc, ptype, x);
3829 	      tree y = create_tmp_var (ptype, name);
3830 	      gimplify_assign (y, x, ilist);
3831 	      x = y;
3832 	      tree yb = y;
3833 
3834 	      if (!integer_zerop (bias))
3835 		{
3836 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3837 					   bias);
3838 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3839 					 x);
3840 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3841 					pointer_sized_int_node, yb, bias);
3842 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3843 		  yb = create_tmp_var (ptype, name);
3844 		  gimplify_assign (yb, x, ilist);
3845 		  x = yb;
3846 		}
3847 
3848 	      d = TREE_OPERAND (d, 0);
3849 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3850 		d = TREE_OPERAND (d, 0);
3851 	      if (TREE_CODE (d) == ADDR_EXPR)
3852 		{
3853 		  if (orig_var != var)
3854 		    {
3855 		      gcc_assert (is_variable_sized (orig_var));
3856 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3857 					    x);
3858 		      gimplify_assign (new_var, x, ilist);
3859 		      tree new_orig_var = lookup_decl (orig_var, ctx);
3860 		      tree t = build_fold_indirect_ref (new_var);
3861 		      DECL_IGNORED_P (new_var) = 0;
3862 		      TREE_THIS_NOTRAP (t);
3863 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
3864 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3865 		    }
3866 		  else
3867 		    {
3868 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3869 				  build_int_cst (ptype, 0));
3870 		      SET_DECL_VALUE_EXPR (new_var, x);
3871 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3872 		    }
3873 		}
3874 	      else
3875 		{
3876 		  gcc_assert (orig_var == var);
3877 		  if (TREE_CODE (d) == INDIRECT_REF)
3878 		    {
3879 		      x = create_tmp_var (ptype, name);
3880 		      TREE_ADDRESSABLE (x) = 1;
3881 		      gimplify_assign (x, yb, ilist);
3882 		      x = build_fold_addr_expr_loc (clause_loc, x);
3883 		    }
3884 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3885 		  gimplify_assign (new_var, x, ilist);
3886 		}
3887 	      tree y1 = create_tmp_var (ptype, NULL);
3888 	      gimplify_assign (y1, y, ilist);
3889 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
3890 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
3891 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
3892 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3893 		{
3894 		  y2 = create_tmp_var (ptype, NULL);
3895 		  gimplify_assign (y2, y, ilist);
3896 		  tree ref = build_outer_var_ref (var, ctx);
3897 		  /* For ref build_outer_var_ref already performs this.  */
3898 		  if (TREE_CODE (d) == INDIRECT_REF)
3899 		    gcc_assert (omp_is_reference (var));
3900 		  else if (TREE_CODE (d) == ADDR_EXPR)
3901 		    ref = build_fold_addr_expr (ref);
3902 		  else if (omp_is_reference (var))
3903 		    ref = build_fold_addr_expr (ref);
3904 		  ref = fold_convert_loc (clause_loc, ptype, ref);
3905 		  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3906 		      && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3907 		    {
3908 		      y3 = create_tmp_var (ptype, NULL);
3909 		      gimplify_assign (y3, unshare_expr (ref), ilist);
3910 		    }
3911 		  if (is_simd)
3912 		    {
3913 		      y4 = create_tmp_var (ptype, NULL);
3914 		      gimplify_assign (y4, ref, dlist);
3915 		    }
3916 		}
3917 	      tree i = create_tmp_var (TREE_TYPE (v), NULL);
3918 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3919 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
3920 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
3921 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
3922 	      if (y2)
3923 		{
3924 		  i2 = create_tmp_var (TREE_TYPE (v), NULL);
3925 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3926 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
3927 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
3928 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3929 		}
3930 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3931 		{
3932 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3933 		  tree decl_placeholder
3934 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3935 		  SET_DECL_VALUE_EXPR (decl_placeholder,
3936 				       build_simple_mem_ref (y1));
3937 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3938 		  SET_DECL_VALUE_EXPR (placeholder,
3939 				       y3 ? build_simple_mem_ref (y3)
3940 				       : error_mark_node);
3941 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3942 		  x = lang_hooks.decls.omp_clause_default_ctor
3943 				(c, build_simple_mem_ref (y1),
3944 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3945 		  if (x)
3946 		    gimplify_and_add (x, ilist);
3947 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3948 		    {
3949 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3950 		      lower_omp (&tseq, ctx);
3951 		      gimple_seq_add_seq (ilist, tseq);
3952 		    }
3953 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3954 		  if (is_simd)
3955 		    {
3956 		      SET_DECL_VALUE_EXPR (decl_placeholder,
3957 					   build_simple_mem_ref (y2));
3958 		      SET_DECL_VALUE_EXPR (placeholder,
3959 					   build_simple_mem_ref (y4));
3960 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3961 		      lower_omp (&tseq, ctx);
3962 		      gimple_seq_add_seq (dlist, tseq);
3963 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3964 		    }
3965 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
3966 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
3967 		  x = lang_hooks.decls.omp_clause_dtor
3968 					(c, build_simple_mem_ref (y2));
3969 		  if (x)
3970 		    {
3971 		      gimple_seq tseq = NULL;
3972 		      dtor = x;
3973 		      gimplify_stmt (&dtor, &tseq);
3974 		      gimple_seq_add_seq (dlist, tseq);
3975 		    }
3976 		}
3977 	      else
3978 		{
3979 		  x = omp_reduction_init (c, TREE_TYPE (type));
3980 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
3981 
3982 		  /* reduction(-:var) sums up the partial results, so it
3983 		     acts identically to reduction(+:var).  */
3984 		  if (code == MINUS_EXPR)
3985 		    code = PLUS_EXPR;
3986 
3987 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
3988 		  if (is_simd)
3989 		    {
3990 		      x = build2 (code, TREE_TYPE (type),
3991 				  build_simple_mem_ref (y4),
3992 				  build_simple_mem_ref (y2));
3993 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
3994 		    }
3995 		}
3996 	      gimple *g
3997 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
3998 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
3999 	      gimple_seq_add_stmt (ilist, g);
4000 	      if (y3)
4001 		{
4002 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4003 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4004 		  gimple_seq_add_stmt (ilist, g);
4005 		}
4006 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4007 				       build_int_cst (TREE_TYPE (i), 1));
4008 	      gimple_seq_add_stmt (ilist, g);
4009 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4010 	      gimple_seq_add_stmt (ilist, g);
4011 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4012 	      if (y2)
4013 		{
4014 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4015 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4016 		  gimple_seq_add_stmt (dlist, g);
4017 		  if (y4)
4018 		    {
4019 		      g = gimple_build_assign
4020 					(y4, POINTER_PLUS_EXPR, y4,
4021 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4022 		      gimple_seq_add_stmt (dlist, g);
4023 		    }
4024 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4025 					   build_int_cst (TREE_TYPE (i2), 1));
4026 		  gimple_seq_add_stmt (dlist, g);
4027 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4028 		  gimple_seq_add_stmt (dlist, g);
4029 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4030 		}
4031 	      continue;
4032 	    }
4033 	  else if (is_variable_sized (var))
4034 	    {
4035 	      /* For variable sized types, we need to allocate the
4036 		 actual storage here.  Call alloca and store the
4037 		 result in the pointer decl that we created elsewhere.  */
4038 	      if (pass == 0)
4039 		continue;
4040 
4041 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4042 		{
4043 		  gcall *stmt;
4044 		  tree tmp, atmp;
4045 
4046 		  ptr = DECL_VALUE_EXPR (new_var);
4047 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4048 		  ptr = TREE_OPERAND (ptr, 0);
4049 		  gcc_assert (DECL_P (ptr));
4050 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4051 
4052 		  /* void *tmp = __builtin_alloca */
4053 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4054 		  stmt = gimple_build_call (atmp, 2, x,
4055 					    size_int (DECL_ALIGN (var)));
4056 		  tmp = create_tmp_var_raw (ptr_type_node);
4057 		  gimple_add_tmp_var (tmp);
4058 		  gimple_call_set_lhs (stmt, tmp);
4059 
4060 		  gimple_seq_add_stmt (ilist, stmt);
4061 
4062 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4063 		  gimplify_assign (ptr, x, ilist);
4064 		}
4065 	    }
4066 	  else if (omp_is_reference (var))
4067 	    {
4068 	      /* For references that are being privatized for Fortran,
4069 		 allocate new backing storage for the new pointer
4070 		 variable.  This allows us to avoid changing all the
4071 		 code that expects a pointer to something that expects
4072 		 a direct variable.  */
4073 	      if (pass == 0)
4074 		continue;
4075 
4076 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4077 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4078 		{
4079 		  x = build_receiver_ref (var, false, ctx);
4080 		  x = build_fold_addr_expr_loc (clause_loc, x);
4081 		}
4082 	      else if (TREE_CONSTANT (x))
4083 		{
4084 		  /* For reduction in SIMD loop, defer adding the
4085 		     initialization of the reference, because if we decide
4086 		     to use SIMD array for it, the initilization could cause
4087 		     expansion ICE.  */
4088 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4089 		    x = NULL_TREE;
4090 		  else
4091 		    {
4092 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4093 					      get_name (var));
4094 		      gimple_add_tmp_var (x);
4095 		      TREE_ADDRESSABLE (x) = 1;
4096 		      x = build_fold_addr_expr_loc (clause_loc, x);
4097 		    }
4098 		}
4099 	      else
4100 		{
4101 		  tree atmp
4102 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4103 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4104 		  tree al = size_int (TYPE_ALIGN (rtype));
4105 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4106 		}
4107 
4108 	      if (x)
4109 		{
4110 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4111 		  gimplify_assign (new_var, x, ilist);
4112 		}
4113 
4114 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4115 	    }
4116 	  else if (c_kind == OMP_CLAUSE_REDUCTION
4117 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4118 	    {
4119 	      if (pass == 0)
4120 		continue;
4121 	    }
4122 	  else if (pass != 0)
4123 	    continue;
4124 
4125 	  switch (OMP_CLAUSE_CODE (c))
4126 	    {
4127 	    case OMP_CLAUSE_SHARED:
4128 	      /* Ignore shared directives in teams construct.  */
4129 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4130 		continue;
4131 	      /* Shared global vars are just accessed directly.  */
4132 	      if (is_global_var (new_var))
4133 		break;
4134 	      /* For taskloop firstprivate/lastprivate, represented
4135 		 as firstprivate and shared clause on the task, new_var
4136 		 is the firstprivate var.  */
4137 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4138 		break;
4139 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4140 		 needs to be delayed until after fixup_child_record_type so
4141 		 that we get the correct type during the dereference.  */
4142 	      by_ref = use_pointer_for_field (var, ctx);
4143 	      x = build_receiver_ref (var, by_ref, ctx);
4144 	      SET_DECL_VALUE_EXPR (new_var, x);
4145 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4146 
4147 	      /* ??? If VAR is not passed by reference, and the variable
4148 		 hasn't been initialized yet, then we'll get a warning for
4149 		 the store into the omp_data_s structure.  Ideally, we'd be
4150 		 able to notice this and not store anything at all, but
4151 		 we're generating code too early.  Suppress the warning.  */
4152 	      if (!by_ref)
4153 		TREE_NO_WARNING (var) = 1;
4154 	      break;
4155 
4156 	    case OMP_CLAUSE_LASTPRIVATE:
4157 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4158 		break;
4159 	      /* FALLTHRU */
4160 
4161 	    case OMP_CLAUSE_PRIVATE:
4162 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4163 		x = build_outer_var_ref (var, ctx);
4164 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4165 		{
4166 		  if (is_task_ctx (ctx))
4167 		    x = build_receiver_ref (var, false, ctx);
4168 		  else
4169 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4170 		}
4171 	      else
4172 		x = NULL;
4173 	    do_private:
4174 	      tree nx;
4175 	      nx = lang_hooks.decls.omp_clause_default_ctor
4176 						(c, unshare_expr (new_var), x);
4177 	      if (is_simd)
4178 		{
4179 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4180 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4181 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4182 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4183 						       ivar, lvar))
4184 		    {
4185 		      if (nx)
4186 			x = lang_hooks.decls.omp_clause_default_ctor
4187 						(c, unshare_expr (ivar), x);
4188 		      if (nx && x)
4189 			gimplify_and_add (x, &llist[0]);
4190 		      if (y)
4191 			{
4192 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4193 			  if (y)
4194 			    {
4195 			      gimple_seq tseq = NULL;
4196 
4197 			      dtor = y;
4198 			      gimplify_stmt (&dtor, &tseq);
4199 			      gimple_seq_add_seq (&llist[1], tseq);
4200 			    }
4201 			}
4202 		      break;
4203 		    }
4204 		}
4205 	      if (nx)
4206 		gimplify_and_add (nx, ilist);
4207 	      /* FALLTHRU */
4208 
4209 	    do_dtor:
4210 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4211 	      if (x)
4212 		{
4213 		  gimple_seq tseq = NULL;
4214 
4215 		  dtor = x;
4216 		  gimplify_stmt (&dtor, &tseq);
4217 		  gimple_seq_add_seq (dlist, tseq);
4218 		}
4219 	      break;
4220 
4221 	    case OMP_CLAUSE_LINEAR:
4222 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4223 		goto do_firstprivate;
4224 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4225 		x = NULL;
4226 	      else
4227 		x = build_outer_var_ref (var, ctx);
4228 	      goto do_private;
4229 
4230 	    case OMP_CLAUSE_FIRSTPRIVATE:
4231 	      if (is_task_ctx (ctx))
4232 		{
4233 		  if (omp_is_reference (var) || is_variable_sized (var))
4234 		    goto do_dtor;
4235 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4236 									  ctx))
4237 			   || use_pointer_for_field (var, NULL))
4238 		    {
4239 		      x = build_receiver_ref (var, false, ctx);
4240 		      SET_DECL_VALUE_EXPR (new_var, x);
4241 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4242 		      goto do_dtor;
4243 		    }
4244 		}
4245 	    do_firstprivate:
4246 	      x = build_outer_var_ref (var, ctx);
4247 	      if (is_simd)
4248 		{
4249 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4250 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4251 		    {
4252 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4253 		      tree stept = TREE_TYPE (t);
4254 		      tree ct = omp_find_clause (clauses,
4255 						 OMP_CLAUSE__LOOPTEMP_);
4256 		      gcc_assert (ct);
4257 		      tree l = OMP_CLAUSE_DECL (ct);
4258 		      tree n1 = fd->loop.n1;
4259 		      tree step = fd->loop.step;
4260 		      tree itype = TREE_TYPE (l);
4261 		      if (POINTER_TYPE_P (itype))
4262 			itype = signed_type_for (itype);
4263 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4264 		      if (TYPE_UNSIGNED (itype)
4265 			  && fd->loop.cond_code == GT_EXPR)
4266 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4267 					 fold_build1 (NEGATE_EXPR, itype, l),
4268 					 fold_build1 (NEGATE_EXPR,
4269 						      itype, step));
4270 		      else
4271 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4272 		      t = fold_build2 (MULT_EXPR, stept,
4273 				       fold_convert (stept, l), t);
4274 
4275 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4276 			{
4277 			  x = lang_hooks.decls.omp_clause_linear_ctor
4278 							(c, new_var, x, t);
4279 			  gimplify_and_add (x, ilist);
4280 			  goto do_dtor;
4281 			}
4282 
4283 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4284 			x = fold_build2 (POINTER_PLUS_EXPR,
4285 					 TREE_TYPE (x), x, t);
4286 		      else
4287 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4288 		    }
4289 
4290 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4291 		       || TREE_ADDRESSABLE (new_var))
4292 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4293 						       ivar, lvar))
4294 		    {
4295 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4296 			{
4297 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4298 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4299 			  gimplify_and_add (x, ilist);
4300 			  gimple_stmt_iterator gsi
4301 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4302 			  gassign *g
4303 			    = gimple_build_assign (unshare_expr (lvar), iv);
4304 			  gsi_insert_before_without_update (&gsi, g,
4305 							    GSI_SAME_STMT);
4306 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4307 			  enum tree_code code = PLUS_EXPR;
4308 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4309 			    code = POINTER_PLUS_EXPR;
4310 			  g = gimple_build_assign (iv, code, iv, t);
4311 			  gsi_insert_before_without_update (&gsi, g,
4312 							    GSI_SAME_STMT);
4313 			  break;
4314 			}
4315 		      x = lang_hooks.decls.omp_clause_copy_ctor
4316 						(c, unshare_expr (ivar), x);
4317 		      gimplify_and_add (x, &llist[0]);
4318 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4319 		      if (x)
4320 			{
4321 			  gimple_seq tseq = NULL;
4322 
4323 			  dtor = x;
4324 			  gimplify_stmt (&dtor, &tseq);
4325 			  gimple_seq_add_seq (&llist[1], tseq);
4326 			}
4327 		      break;
4328 		    }
4329 		}
4330 	      x = lang_hooks.decls.omp_clause_copy_ctor
4331 						(c, unshare_expr (new_var), x);
4332 	      gimplify_and_add (x, ilist);
4333 	      goto do_dtor;
4334 
4335 	    case OMP_CLAUSE__LOOPTEMP_:
4336 	      gcc_assert (is_taskreg_ctx (ctx));
4337 	      x = build_outer_var_ref (var, ctx);
4338 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4339 	      gimplify_and_add (x, ilist);
4340 	      break;
4341 
4342 	    case OMP_CLAUSE_COPYIN:
4343 	      by_ref = use_pointer_for_field (var, NULL);
4344 	      x = build_receiver_ref (var, by_ref, ctx);
4345 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4346 	      append_to_statement_list (x, &copyin_seq);
4347 	      copyin_by_ref |= by_ref;
4348 	      break;
4349 
4350 	    case OMP_CLAUSE_REDUCTION:
4351 	      /* OpenACC reductions are initialized using the
4352 		 GOACC_REDUCTION internal function.  */
4353 	      if (is_gimple_omp_oacc (ctx->stmt))
4354 		break;
4355 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4356 		{
4357 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4358 		  gimple *tseq;
4359 		  x = build_outer_var_ref (var, ctx);
4360 
4361 		  if (omp_is_reference (var)
4362 		      && !useless_type_conversion_p (TREE_TYPE (placeholder),
4363 						     TREE_TYPE (x)))
4364 		    x = build_fold_addr_expr_loc (clause_loc, x);
4365 		  SET_DECL_VALUE_EXPR (placeholder, x);
4366 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4367 		  tree new_vard = new_var;
4368 		  if (omp_is_reference (var))
4369 		    {
4370 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4371 		      new_vard = TREE_OPERAND (new_var, 0);
4372 		      gcc_assert (DECL_P (new_vard));
4373 		    }
4374 		  if (is_simd
4375 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4376 						       ivar, lvar))
4377 		    {
4378 		      if (new_vard == new_var)
4379 			{
4380 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4381 			  SET_DECL_VALUE_EXPR (new_var, ivar);
4382 			}
4383 		      else
4384 			{
4385 			  SET_DECL_VALUE_EXPR (new_vard,
4386 					       build_fold_addr_expr (ivar));
4387 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4388 			}
4389 		      x = lang_hooks.decls.omp_clause_default_ctor
4390 				(c, unshare_expr (ivar),
4391 				 build_outer_var_ref (var, ctx));
4392 		      if (x)
4393 			gimplify_and_add (x, &llist[0]);
4394 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4395 			{
4396 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4397 			  lower_omp (&tseq, ctx);
4398 			  gimple_seq_add_seq (&llist[0], tseq);
4399 			}
4400 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4401 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4402 		      lower_omp (&tseq, ctx);
4403 		      gimple_seq_add_seq (&llist[1], tseq);
4404 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4405 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4406 		      if (new_vard == new_var)
4407 			SET_DECL_VALUE_EXPR (new_var, lvar);
4408 		      else
4409 			SET_DECL_VALUE_EXPR (new_vard,
4410 					     build_fold_addr_expr (lvar));
4411 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4412 		      if (x)
4413 			{
4414 			  tseq = NULL;
4415 			  dtor = x;
4416 			  gimplify_stmt (&dtor, &tseq);
4417 			  gimple_seq_add_seq (&llist[1], tseq);
4418 			}
4419 		      break;
4420 		    }
4421 		  /* If this is a reference to constant size reduction var
4422 		     with placeholder, we haven't emitted the initializer
4423 		     for it because it is undesirable if SIMD arrays are used.
4424 		     But if they aren't used, we need to emit the deferred
4425 		     initialization now.  */
4426 		  else if (omp_is_reference (var) && is_simd)
4427 		    handle_simd_reference (clause_loc, new_vard, ilist);
4428 		  x = lang_hooks.decls.omp_clause_default_ctor
4429 				(c, unshare_expr (new_var),
4430 				 build_outer_var_ref (var, ctx));
4431 		  if (x)
4432 		    gimplify_and_add (x, ilist);
4433 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4434 		    {
4435 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4436 		      lower_omp (&tseq, ctx);
4437 		      gimple_seq_add_seq (ilist, tseq);
4438 		    }
4439 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4440 		  if (is_simd)
4441 		    {
4442 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4443 		      lower_omp (&tseq, ctx);
4444 		      gimple_seq_add_seq (dlist, tseq);
4445 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4446 		    }
4447 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4448 		  goto do_dtor;
4449 		}
4450 	      else
4451 		{
4452 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
4453 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4454 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4455 
4456 		  /* reduction(-:var) sums up the partial results, so it
4457 		     acts identically to reduction(+:var).  */
4458 		  if (code == MINUS_EXPR)
4459 		    code = PLUS_EXPR;
4460 
4461 		  tree new_vard = new_var;
4462 		  if (is_simd && omp_is_reference (var))
4463 		    {
4464 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4465 		      new_vard = TREE_OPERAND (new_var, 0);
4466 		      gcc_assert (DECL_P (new_vard));
4467 		    }
4468 		  if (is_simd
4469 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4470 						       ivar, lvar))
4471 		    {
4472 		      tree ref = build_outer_var_ref (var, ctx);
4473 
4474 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4475 
4476 		      if (sctx.is_simt)
4477 			{
4478 			  if (!simt_lane)
4479 			    simt_lane = create_tmp_var (unsigned_type_node);
4480 			  x = build_call_expr_internal_loc
4481 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4482 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
4483 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
4484 			  gimplify_assign (ivar, x, &llist[2]);
4485 			}
4486 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
4487 		      ref = build_outer_var_ref (var, ctx);
4488 		      gimplify_assign (ref, x, &llist[1]);
4489 
4490 		      if (new_vard != new_var)
4491 			{
4492 			  SET_DECL_VALUE_EXPR (new_vard,
4493 					       build_fold_addr_expr (lvar));
4494 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4495 			}
4496 		    }
4497 		  else
4498 		    {
4499 		      if (omp_is_reference (var) && is_simd)
4500 			handle_simd_reference (clause_loc, new_vard, ilist);
4501 		      gimplify_assign (new_var, x, ilist);
4502 		      if (is_simd)
4503 			{
4504 			  tree ref = build_outer_var_ref (var, ctx);
4505 
4506 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
4507 			  ref = build_outer_var_ref (var, ctx);
4508 			  gimplify_assign (ref, x, dlist);
4509 			}
4510 		    }
4511 		}
4512 	      break;
4513 
4514 	    default:
4515 	      gcc_unreachable ();
4516 	    }
4517 	}
4518     }
4519 
4520   if (known_eq (sctx.max_vf, 1U))
4521     sctx.is_simt = false;
4522 
4523   if (sctx.lane || sctx.is_simt)
4524     {
4525       uid = create_tmp_var (ptr_type_node, "simduid");
4526       /* Don't want uninit warnings on simduid, it is always uninitialized,
4527 	 but we use it not for the value, but for the DECL_UID only.  */
4528       TREE_NO_WARNING (uid) = 1;
4529       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4530       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4531       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4532       gimple_omp_for_set_clauses (ctx->stmt, c);
4533     }
4534   /* Emit calls denoting privatized variables and initializing a pointer to
4535      structure that holds private variables as fields after ompdevlow pass.  */
4536   if (sctx.is_simt)
4537     {
4538       sctx.simt_eargs[0] = uid;
4539       gimple *g
4540 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4541       gimple_call_set_lhs (g, uid);
4542       gimple_seq_add_stmt (ilist, g);
4543       sctx.simt_eargs.release ();
4544 
4545       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4546       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4547       gimple_call_set_lhs (g, simtrec);
4548       gimple_seq_add_stmt (ilist, g);
4549     }
4550   if (sctx.lane)
4551     {
4552       gimple *g
4553 	= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4554       gimple_call_set_lhs (g, sctx.lane);
4555       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4556       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4557       g = gimple_build_assign (sctx.lane, INTEGER_CST,
4558 			       build_int_cst (unsigned_type_node, 0));
4559       gimple_seq_add_stmt (ilist, g);
4560       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
4561       if (llist[2])
4562 	{
4563 	  tree simt_vf = create_tmp_var (unsigned_type_node);
4564 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4565 	  gimple_call_set_lhs (g, simt_vf);
4566 	  gimple_seq_add_stmt (dlist, g);
4567 
4568 	  tree t = build_int_cst (unsigned_type_node, 1);
4569 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4570 	  gimple_seq_add_stmt (dlist, g);
4571 
4572 	  t = build_int_cst (unsigned_type_node, 0);
4573 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4574 	  gimple_seq_add_stmt (dlist, g);
4575 
4576 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
4577 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
4578 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
4579 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4580 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
4581 
4582 	  gimple_seq_add_seq (dlist, llist[2]);
4583 
4584 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4585 	  gimple_seq_add_stmt (dlist, g);
4586 
4587 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
4588 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4589 	  gimple_seq_add_stmt (dlist, g);
4590 
4591 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
4592 	}
4593       for (int i = 0; i < 2; i++)
4594 	if (llist[i])
4595 	  {
4596 	    tree vf = create_tmp_var (unsigned_type_node);
4597 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4598 	    gimple_call_set_lhs (g, vf);
4599 	    gimple_seq *seq = i == 0 ? ilist : dlist;
4600 	    gimple_seq_add_stmt (seq, g);
4601 	    tree t = build_int_cst (unsigned_type_node, 0);
4602 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4603 	    gimple_seq_add_stmt (seq, g);
4604 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
4605 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
4606 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
4607 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
4608 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
4609 	    gimple_seq_add_seq (seq, llist[i]);
4610 	    t = build_int_cst (unsigned_type_node, 1);
4611 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4612 	    gimple_seq_add_stmt (seq, g);
4613 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
4614 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4615 	    gimple_seq_add_stmt (seq, g);
4616 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
4617 	  }
4618     }
4619   if (sctx.is_simt)
4620     {
4621       gimple_seq_add_seq (dlist, sctx.simt_dlist);
4622       gimple *g
4623 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4624       gimple_seq_add_stmt (dlist, g);
4625     }
4626 
4627   /* The copyin sequence is not to be executed by the main thread, since
4628      that would result in self-copies.  Perhaps not visible to scalars,
4629      but it certainly is to C++ operator=.  */
4630   if (copyin_seq)
4631     {
4632       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4633 			   0);
4634       x = build2 (NE_EXPR, boolean_type_node, x,
4635 		  build_int_cst (TREE_TYPE (x), 0));
4636       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4637       gimplify_and_add (x, ilist);
4638     }
4639 
4640   /* If any copyin variable is passed by reference, we must ensure the
4641      master thread doesn't modify it before it is copied over in all
4642      threads.  Similarly for variables in both firstprivate and
4643      lastprivate clauses we need to ensure the lastprivate copying
4644      happens after firstprivate copying in all threads.  And similarly
4645      for UDRs if initializer expression refers to omp_orig.  */
4646   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4647     {
4648       /* Don't add any barrier for #pragma omp simd or
4649 	 #pragma omp distribute.  */
4650       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4651 	  || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4652 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4653     }
4654 
4655   /* If max_vf is non-zero, then we can use only a vectorization factor
4656      up to the max_vf we chose.  So stick it into the safelen clause.  */
4657   if (maybe_ne (sctx.max_vf, 0U))
4658     {
4659       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4660 				OMP_CLAUSE_SAFELEN);
4661       poly_uint64 safe_len;
4662       if (c == NULL_TREE
4663 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4664 	      && maybe_gt (safe_len, sctx.max_vf)))
4665 	{
4666 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4667 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4668 						       sctx.max_vf);
4669 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4670 	  gimple_omp_for_set_clauses (ctx->stmt, c);
4671 	}
4672     }
4673 }
4674 
4675 
4676 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
4677    both parallel and workshare constructs.  PREDICATE may be NULL if it's
4678    always true.   */
4679 
4680 static void
4681 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4682 			   omp_context *ctx)
4683 {
4684   tree x, c, label = NULL, orig_clauses = clauses;
4685   bool par_clauses = false;
4686   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4687 
4688   /* Early exit if there are no lastprivate or linear clauses.  */
4689   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4690     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4691 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4692 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4693       break;
4694   if (clauses == NULL)
4695     {
4696       /* If this was a workshare clause, see if it had been combined
4697 	 with its parallel.  In that case, look for the clauses on the
4698 	 parallel statement itself.  */
4699       if (is_parallel_ctx (ctx))
4700 	return;
4701 
4702       ctx = ctx->outer;
4703       if (ctx == NULL || !is_parallel_ctx (ctx))
4704 	return;
4705 
4706       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4707 				 OMP_CLAUSE_LASTPRIVATE);
4708       if (clauses == NULL)
4709 	return;
4710       par_clauses = true;
4711     }
4712 
4713   bool maybe_simt = false;
4714   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4715       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4716     {
4717       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4718       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4719       if (simduid)
4720 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4721     }
4722 
4723   if (predicate)
4724     {
4725       gcond *stmt;
4726       tree label_true, arm1, arm2;
4727       enum tree_code pred_code = TREE_CODE (predicate);
4728 
4729       label = create_artificial_label (UNKNOWN_LOCATION);
4730       label_true = create_artificial_label (UNKNOWN_LOCATION);
4731       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4732 	{
4733 	  arm1 = TREE_OPERAND (predicate, 0);
4734 	  arm2 = TREE_OPERAND (predicate, 1);
4735 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4736 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4737 	}
4738       else
4739 	{
4740 	  arm1 = predicate;
4741 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4742 	  arm2 = boolean_false_node;
4743 	  pred_code = NE_EXPR;
4744 	}
4745       if (maybe_simt)
4746 	{
4747 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
4748 	  c = fold_convert (integer_type_node, c);
4749 	  simtcond = create_tmp_var (integer_type_node);
4750 	  gimplify_assign (simtcond, c, stmt_list);
4751 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4752 						 1, simtcond);
4753 	  c = create_tmp_var (integer_type_node);
4754 	  gimple_call_set_lhs (g, c);
4755 	  gimple_seq_add_stmt (stmt_list, g);
4756 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4757 				    label_true, label);
4758 	}
4759       else
4760 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4761       gimple_seq_add_stmt (stmt_list, stmt);
4762       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4763     }
4764 
4765   for (c = clauses; c ;)
4766     {
4767       tree var, new_var;
4768       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4769 
4770       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4771 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4772 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4773 	{
4774 	  var = OMP_CLAUSE_DECL (c);
4775 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4776 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4777 	      && is_taskloop_ctx (ctx))
4778 	    {
4779 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4780 	      new_var = lookup_decl (var, ctx->outer);
4781 	    }
4782 	  else
4783 	    {
4784 	      new_var = lookup_decl (var, ctx);
4785 	      /* Avoid uninitialized warnings for lastprivate and
4786 		 for linear iterators.  */
4787 	      if (predicate
4788 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4789 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4790 		TREE_NO_WARNING (new_var) = 1;
4791 	    }
4792 
4793 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4794 	    {
4795 	      tree val = DECL_VALUE_EXPR (new_var);
4796 	      if (TREE_CODE (val) == ARRAY_REF
4797 		  && VAR_P (TREE_OPERAND (val, 0))
4798 		  && lookup_attribute ("omp simd array",
4799 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
4800 								      0))))
4801 		{
4802 		  if (lastlane == NULL)
4803 		    {
4804 		      lastlane = create_tmp_var (unsigned_type_node);
4805 		      gcall *g
4806 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4807 						      2, simduid,
4808 						      TREE_OPERAND (val, 1));
4809 		      gimple_call_set_lhs (g, lastlane);
4810 		      gimple_seq_add_stmt (stmt_list, g);
4811 		    }
4812 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4813 				    TREE_OPERAND (val, 0), lastlane,
4814 				    NULL_TREE, NULL_TREE);
4815 		}
4816 	    }
4817 	  else if (maybe_simt)
4818 	    {
4819 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4820 			  ? DECL_VALUE_EXPR (new_var)
4821 			  : new_var);
4822 	      if (simtlast == NULL)
4823 		{
4824 		  simtlast = create_tmp_var (unsigned_type_node);
4825 		  gcall *g = gimple_build_call_internal
4826 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4827 		  gimple_call_set_lhs (g, simtlast);
4828 		  gimple_seq_add_stmt (stmt_list, g);
4829 		}
4830 	      x = build_call_expr_internal_loc
4831 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4832 		 TREE_TYPE (val), 2, val, simtlast);
4833 	      new_var = unshare_expr (new_var);
4834 	      gimplify_assign (new_var, x, stmt_list);
4835 	      new_var = unshare_expr (new_var);
4836 	    }
4837 
4838 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4839 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4840 	    {
4841 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4842 	      gimple_seq_add_seq (stmt_list,
4843 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4844 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4845 	    }
4846 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4847 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4848 	    {
4849 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4850 	      gimple_seq_add_seq (stmt_list,
4851 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4852 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4853 	    }
4854 
4855 	  x = NULL_TREE;
4856 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4857 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4858 	    {
4859 	      gcc_checking_assert (is_taskloop_ctx (ctx));
4860 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4861 							  ctx->outer->outer);
4862 	      if (is_global_var (ovar))
4863 		x = ovar;
4864 	    }
4865 	  if (!x)
4866 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4867 	  if (omp_is_reference (var))
4868 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4869 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4870 	  gimplify_and_add (x, stmt_list);
4871 	}
4872       c = OMP_CLAUSE_CHAIN (c);
4873       if (c == NULL && !par_clauses)
4874 	{
4875 	  /* If this was a workshare clause, see if it had been combined
4876 	     with its parallel.  In that case, continue looking for the
4877 	     clauses also on the parallel statement itself.  */
4878 	  if (is_parallel_ctx (ctx))
4879 	    break;
4880 
4881 	  ctx = ctx->outer;
4882 	  if (ctx == NULL || !is_parallel_ctx (ctx))
4883 	    break;
4884 
4885 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4886 			       OMP_CLAUSE_LASTPRIVATE);
4887 	  par_clauses = true;
4888 	}
4889     }
4890 
4891   if (label)
4892     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4893 }
4894 
4895 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4896    (which might be a placeholder).  INNER is true if this is an inner
4897    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
4898    join markers.  Generate the before-loop forking sequence in
4899    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
4900    general form of these sequences is
4901 
4902      GOACC_REDUCTION_SETUP
4903      GOACC_FORK
4904      GOACC_REDUCTION_INIT
4905      ...
4906      GOACC_REDUCTION_FINI
4907      GOACC_JOIN
4908      GOACC_REDUCTION_TEARDOWN.  */
4909 
4910 static void
4911 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4912 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
4913 		       gimple_seq *join_seq, omp_context *ctx)
4914 {
4915   gimple_seq before_fork = NULL;
4916   gimple_seq after_fork = NULL;
4917   gimple_seq before_join = NULL;
4918   gimple_seq after_join = NULL;
4919   tree init_code = NULL_TREE, fini_code = NULL_TREE,
4920     setup_code = NULL_TREE, teardown_code = NULL_TREE;
4921   unsigned offset = 0;
4922 
4923   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4924     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4925       {
4926 	tree orig = OMP_CLAUSE_DECL (c);
4927 	tree var = maybe_lookup_decl (orig, ctx);
4928 	tree ref_to_res = NULL_TREE;
4929 	tree incoming, outgoing, v1, v2, v3;
4930 	bool is_private = false;
4931 
4932 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4933 	if (rcode == MINUS_EXPR)
4934 	  rcode = PLUS_EXPR;
4935 	else if (rcode == TRUTH_ANDIF_EXPR)
4936 	  rcode = BIT_AND_EXPR;
4937 	else if (rcode == TRUTH_ORIF_EXPR)
4938 	  rcode = BIT_IOR_EXPR;
4939 	tree op = build_int_cst (unsigned_type_node, rcode);
4940 
4941 	if (!var)
4942 	  var = orig;
4943 
4944 	incoming = outgoing = var;
4945 
4946 	if (!inner)
4947 	  {
4948 	    /* See if an outer construct also reduces this variable.  */
4949 	    omp_context *outer = ctx;
4950 
4951 	    while (omp_context *probe = outer->outer)
4952 	      {
4953 		enum gimple_code type = gimple_code (probe->stmt);
4954 		tree cls;
4955 
4956 		switch (type)
4957 		  {
4958 		  case GIMPLE_OMP_FOR:
4959 		    cls = gimple_omp_for_clauses (probe->stmt);
4960 		    break;
4961 
4962 		  case GIMPLE_OMP_TARGET:
4963 		    if (gimple_omp_target_kind (probe->stmt)
4964 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
4965 		      goto do_lookup;
4966 
4967 		    cls = gimple_omp_target_clauses (probe->stmt);
4968 		    break;
4969 
4970 		  default:
4971 		    goto do_lookup;
4972 		  }
4973 
4974 		outer = probe;
4975 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
4976 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
4977 		      && orig == OMP_CLAUSE_DECL (cls))
4978 		    {
4979 		      incoming = outgoing = lookup_decl (orig, probe);
4980 		      goto has_outer_reduction;
4981 		    }
4982 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
4983 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
4984 			   && orig == OMP_CLAUSE_DECL (cls))
4985 		    {
4986 		      is_private = true;
4987 		      goto do_lookup;
4988 		    }
4989 	      }
4990 
4991 	  do_lookup:
4992 	    /* This is the outermost construct with this reduction,
4993 	       see if there's a mapping for it.  */
4994 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
4995 		&& maybe_lookup_field (orig, outer) && !is_private)
4996 	      {
4997 		ref_to_res = build_receiver_ref (orig, false, outer);
4998 		if (omp_is_reference (orig))
4999 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5000 
5001 		tree type = TREE_TYPE (var);
5002 		if (POINTER_TYPE_P (type))
5003 		  type = TREE_TYPE (type);
5004 
5005 		outgoing = var;
5006 		incoming = omp_reduction_init_op (loc, rcode, type);
5007 	      }
5008 	    else
5009 	      {
5010 		/* Try to look at enclosing contexts for reduction var,
5011 		   use original if no mapping found.  */
5012 		tree t = NULL_TREE;
5013 		omp_context *c = ctx->outer;
5014 		while (c && !t)
5015 		  {
5016 		    t = maybe_lookup_decl (orig, c);
5017 		    c = c->outer;
5018 		  }
5019 		incoming = outgoing = (t ? t : orig);
5020 	      }
5021 
5022 	  has_outer_reduction:;
5023 	  }
5024 
5025 	if (!ref_to_res)
5026 	  ref_to_res = integer_zero_node;
5027 
5028 	if (omp_is_reference (orig))
5029 	  {
5030 	    tree type = TREE_TYPE (var);
5031 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5032 
5033 	    if (!inner)
5034 	      {
5035 		tree x = create_tmp_var (TREE_TYPE (type), id);
5036 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5037 	      }
5038 
5039 	    v1 = create_tmp_var (type, id);
5040 	    v2 = create_tmp_var (type, id);
5041 	    v3 = create_tmp_var (type, id);
5042 
5043 	    gimplify_assign (v1, var, fork_seq);
5044 	    gimplify_assign (v2, var, fork_seq);
5045 	    gimplify_assign (v3, var, fork_seq);
5046 
5047 	    var = build_simple_mem_ref (var);
5048 	    v1 = build_simple_mem_ref (v1);
5049 	    v2 = build_simple_mem_ref (v2);
5050 	    v3 = build_simple_mem_ref (v3);
5051 	    outgoing = build_simple_mem_ref (outgoing);
5052 
5053 	    if (!TREE_CONSTANT (incoming))
5054 	      incoming = build_simple_mem_ref (incoming);
5055 	  }
5056 	else
5057 	  v1 = v2 = v3 = var;
5058 
5059 	/* Determine position in reduction buffer, which may be used
5060 	   by target.  The parser has ensured that this is not a
5061 	   variable-sized type.  */
5062 	fixed_size_mode mode
5063 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5064 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5065 	offset = (offset + align - 1) & ~(align - 1);
5066 	tree off = build_int_cst (sizetype, offset);
5067 	offset += GET_MODE_SIZE (mode);
5068 
5069 	if (!init_code)
5070 	  {
5071 	    init_code = build_int_cst (integer_type_node,
5072 				       IFN_GOACC_REDUCTION_INIT);
5073 	    fini_code = build_int_cst (integer_type_node,
5074 				       IFN_GOACC_REDUCTION_FINI);
5075 	    setup_code = build_int_cst (integer_type_node,
5076 					IFN_GOACC_REDUCTION_SETUP);
5077 	    teardown_code = build_int_cst (integer_type_node,
5078 					   IFN_GOACC_REDUCTION_TEARDOWN);
5079 	  }
5080 
5081 	tree setup_call
5082 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5083 					  TREE_TYPE (var), 6, setup_code,
5084 					  unshare_expr (ref_to_res),
5085 					  incoming, level, op, off);
5086 	tree init_call
5087 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5088 					  TREE_TYPE (var), 6, init_code,
5089 					  unshare_expr (ref_to_res),
5090 					  v1, level, op, off);
5091 	tree fini_call
5092 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5093 					  TREE_TYPE (var), 6, fini_code,
5094 					  unshare_expr (ref_to_res),
5095 					  v2, level, op, off);
5096 	tree teardown_call
5097 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5098 					  TREE_TYPE (var), 6, teardown_code,
5099 					  ref_to_res, v3, level, op, off);
5100 
5101 	gimplify_assign (v1, setup_call, &before_fork);
5102 	gimplify_assign (v2, init_call, &after_fork);
5103 	gimplify_assign (v3, fini_call, &before_join);
5104 	gimplify_assign (outgoing, teardown_call, &after_join);
5105       }
5106 
5107   /* Now stitch things together.  */
5108   gimple_seq_add_seq (fork_seq, before_fork);
5109   if (fork)
5110     gimple_seq_add_stmt (fork_seq, fork);
5111   gimple_seq_add_seq (fork_seq, after_fork);
5112 
5113   gimple_seq_add_seq (join_seq, before_join);
5114   if (join)
5115     gimple_seq_add_stmt (join_seq, join);
5116   gimple_seq_add_seq (join_seq, after_join);
5117 }
5118 
5119 /* Generate code to implement the REDUCTION clauses.  */
5120 
5121 static void
5122 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5123 {
5124   gimple_seq sub_seq = NULL;
5125   gimple *stmt;
5126   tree x, c;
5127   int count = 0;
5128 
5129   /* OpenACC loop reductions are handled elsewhere.  */
5130   if (is_gimple_omp_oacc (ctx->stmt))
5131     return;
5132 
5133   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5134   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5135       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5136     return;
5137 
5138   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5139      update in that case, otherwise use a lock.  */
5140   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5141     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5142       {
5143 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5144 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5145 	  {
5146 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5147 	    count = -1;
5148 	    break;
5149 	  }
5150 	count++;
5151       }
5152 
5153   if (count == 0)
5154     return;
5155 
5156   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5157     {
5158       tree var, ref, new_var, orig_var;
5159       enum tree_code code;
5160       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5161 
5162       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5163 	continue;
5164 
5165       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5166       orig_var = var = OMP_CLAUSE_DECL (c);
5167       if (TREE_CODE (var) == MEM_REF)
5168 	{
5169 	  var = TREE_OPERAND (var, 0);
5170 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5171 	    var = TREE_OPERAND (var, 0);
5172 	  if (TREE_CODE (var) == ADDR_EXPR)
5173 	    var = TREE_OPERAND (var, 0);
5174 	  else
5175 	    {
5176 	      /* If this is a pointer or referenced based array
5177 		 section, the var could be private in the outer
5178 		 context e.g. on orphaned loop construct.  Pretend this
5179 		 is private variable's outer reference.  */
5180 	      ccode = OMP_CLAUSE_PRIVATE;
5181 	      if (TREE_CODE (var) == INDIRECT_REF)
5182 		var = TREE_OPERAND (var, 0);
5183 	    }
5184 	  orig_var = var;
5185 	  if (is_variable_sized (var))
5186 	    {
5187 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5188 	      var = DECL_VALUE_EXPR (var);
5189 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5190 	      var = TREE_OPERAND (var, 0);
5191 	      gcc_assert (DECL_P (var));
5192 	    }
5193 	}
5194       new_var = lookup_decl (var, ctx);
5195       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5196 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5197       ref = build_outer_var_ref (var, ctx, ccode);
5198       code = OMP_CLAUSE_REDUCTION_CODE (c);
5199 
5200       /* reduction(-:var) sums up the partial results, so it acts
5201 	 identically to reduction(+:var).  */
5202       if (code == MINUS_EXPR)
5203         code = PLUS_EXPR;
5204 
5205       if (count == 1)
5206 	{
5207 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5208 
5209 	  addr = save_expr (addr);
5210 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5211 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5212 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5213 	  gimplify_and_add (x, stmt_seqp);
5214 	  return;
5215 	}
5216       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5217 	{
5218 	  tree d = OMP_CLAUSE_DECL (c);
5219 	  tree type = TREE_TYPE (d);
5220 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5221 	  tree i = create_tmp_var (TREE_TYPE (v), NULL);
5222 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5223 	  tree bias = TREE_OPERAND (d, 1);
5224 	  d = TREE_OPERAND (d, 0);
5225 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5226 	    {
5227 	      tree b = TREE_OPERAND (d, 1);
5228 	      b = maybe_lookup_decl (b, ctx);
5229 	      if (b == NULL)
5230 		{
5231 		  b = TREE_OPERAND (d, 1);
5232 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5233 		}
5234 	      if (integer_zerop (bias))
5235 		bias = b;
5236 	      else
5237 		{
5238 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5239 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5240 					  TREE_TYPE (b), b, bias);
5241 		}
5242 	      d = TREE_OPERAND (d, 0);
5243 	    }
5244 	  /* For ref build_outer_var_ref already performs this, so
5245 	     only new_var needs a dereference.  */
5246 	  if (TREE_CODE (d) == INDIRECT_REF)
5247 	    {
5248 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5249 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5250 	    }
5251 	  else if (TREE_CODE (d) == ADDR_EXPR)
5252 	    {
5253 	      if (orig_var == var)
5254 		{
5255 		  new_var = build_fold_addr_expr (new_var);
5256 		  ref = build_fold_addr_expr (ref);
5257 		}
5258 	    }
5259 	  else
5260 	    {
5261 	      gcc_assert (orig_var == var);
5262 	      if (omp_is_reference (var))
5263 		ref = build_fold_addr_expr (ref);
5264 	    }
5265 	  if (DECL_P (v))
5266 	    {
5267 	      tree t = maybe_lookup_decl (v, ctx);
5268 	      if (t)
5269 		v = t;
5270 	      else
5271 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5272 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5273 	    }
5274 	  if (!integer_zerop (bias))
5275 	    {
5276 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
5277 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5278 					 TREE_TYPE (new_var), new_var,
5279 					 unshare_expr (bias));
5280 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5281 					 TREE_TYPE (ref), ref, bias);
5282 	    }
5283 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
5284 	  ref = fold_convert_loc (clause_loc, ptype, ref);
5285 	  tree m = create_tmp_var (ptype, NULL);
5286 	  gimplify_assign (m, new_var, stmt_seqp);
5287 	  new_var = m;
5288 	  m = create_tmp_var (ptype, NULL);
5289 	  gimplify_assign (m, ref, stmt_seqp);
5290 	  ref = m;
5291 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5292 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5293 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5294 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5295 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5296 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
5297 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5298 	    {
5299 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5300 	      tree decl_placeholder
5301 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5302 	      SET_DECL_VALUE_EXPR (placeholder, out);
5303 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5304 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5305 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5306 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5307 	      gimple_seq_add_seq (&sub_seq,
5308 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5309 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5310 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5311 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5312 	    }
5313 	  else
5314 	    {
5315 	      x = build2 (code, TREE_TYPE (out), out, priv);
5316 	      out = unshare_expr (out);
5317 	      gimplify_assign (out, x, &sub_seq);
5318 	    }
5319 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5320 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5321 	  gimple_seq_add_stmt (&sub_seq, g);
5322 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5323 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5324 	  gimple_seq_add_stmt (&sub_seq, g);
5325 	  g = gimple_build_assign (i, PLUS_EXPR, i,
5326 				   build_int_cst (TREE_TYPE (i), 1));
5327 	  gimple_seq_add_stmt (&sub_seq, g);
5328 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
5329 	  gimple_seq_add_stmt (&sub_seq, g);
5330 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5331 	}
5332       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5333 	{
5334 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5335 
5336 	  if (omp_is_reference (var)
5337 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
5338 					     TREE_TYPE (ref)))
5339 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
5340 	  SET_DECL_VALUE_EXPR (placeholder, ref);
5341 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5342 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5343 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5344 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5345 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5346 	}
5347       else
5348 	{
5349 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5350 	  ref = build_outer_var_ref (var, ctx);
5351 	  gimplify_assign (ref, x, &sub_seq);
5352 	}
5353     }
5354 
5355   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5356 			    0);
5357   gimple_seq_add_stmt (stmt_seqp, stmt);
5358 
5359   gimple_seq_add_seq (stmt_seqp, sub_seq);
5360 
5361   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5362 			    0);
5363   gimple_seq_add_stmt (stmt_seqp, stmt);
5364 }
5365 
5366 
5367 /* Generate code to implement the COPYPRIVATE clauses.  */
5368 
5369 static void
5370 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5371 			    omp_context *ctx)
5372 {
5373   tree c;
5374 
5375   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5376     {
5377       tree var, new_var, ref, x;
5378       bool by_ref;
5379       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5380 
5381       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5382 	continue;
5383 
5384       var = OMP_CLAUSE_DECL (c);
5385       by_ref = use_pointer_for_field (var, NULL);
5386 
5387       ref = build_sender_ref (var, ctx);
5388       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5389       if (by_ref)
5390 	{
5391 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
5392 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5393 	}
5394       gimplify_assign (ref, x, slist);
5395 
5396       ref = build_receiver_ref (var, false, ctx);
5397       if (by_ref)
5398 	{
5399 	  ref = fold_convert_loc (clause_loc,
5400 				  build_pointer_type (TREE_TYPE (new_var)),
5401 				  ref);
5402 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
5403 	}
5404       if (omp_is_reference (var))
5405 	{
5406 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5407 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
5408 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5409 	}
5410       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5411       gimplify_and_add (x, rlist);
5412     }
5413 }
5414 
5415 
5416 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5417    and REDUCTION from the sender (aka parent) side.  */
5418 
5419 static void
5420 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5421     		    omp_context *ctx)
5422 {
5423   tree c, t;
5424   int ignored_looptemp = 0;
5425   bool is_taskloop = false;
5426 
5427   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5428      by GOMP_taskloop.  */
5429   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5430     {
5431       ignored_looptemp = 2;
5432       is_taskloop = true;
5433     }
5434 
5435   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5436     {
5437       tree val, ref, x, var;
5438       bool by_ref, do_in = false, do_out = false;
5439       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5440 
5441       switch (OMP_CLAUSE_CODE (c))
5442 	{
5443 	case OMP_CLAUSE_PRIVATE:
5444 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5445 	    break;
5446 	  continue;
5447 	case OMP_CLAUSE_FIRSTPRIVATE:
5448 	case OMP_CLAUSE_COPYIN:
5449 	case OMP_CLAUSE_LASTPRIVATE:
5450 	case OMP_CLAUSE_REDUCTION:
5451 	  break;
5452 	case OMP_CLAUSE_SHARED:
5453 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5454 	    break;
5455 	  continue;
5456 	case OMP_CLAUSE__LOOPTEMP_:
5457 	  if (ignored_looptemp)
5458 	    {
5459 	      ignored_looptemp--;
5460 	      continue;
5461 	    }
5462 	  break;
5463 	default:
5464 	  continue;
5465 	}
5466 
5467       val = OMP_CLAUSE_DECL (c);
5468       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5469 	  && TREE_CODE (val) == MEM_REF)
5470 	{
5471 	  val = TREE_OPERAND (val, 0);
5472 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5473 	    val = TREE_OPERAND (val, 0);
5474 	  if (TREE_CODE (val) == INDIRECT_REF
5475 	      || TREE_CODE (val) == ADDR_EXPR)
5476 	    val = TREE_OPERAND (val, 0);
5477 	  if (is_variable_sized (val))
5478 	    continue;
5479 	}
5480 
5481       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5482 	 outer taskloop region.  */
5483       omp_context *ctx_for_o = ctx;
5484       if (is_taskloop
5485 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5486 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5487 	ctx_for_o = ctx->outer;
5488 
5489       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5490 
5491       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5492 	  && is_global_var (var))
5493 	continue;
5494 
5495       t = omp_member_access_dummy_var (var);
5496       if (t)
5497 	{
5498 	  var = DECL_VALUE_EXPR (var);
5499 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5500 	  if (o != t)
5501 	    var = unshare_and_remap (var, t, o);
5502 	  else
5503 	    var = unshare_expr (var);
5504 	}
5505 
5506       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5507 	{
5508 	  /* Handle taskloop firstprivate/lastprivate, where the
5509 	     lastprivate on GIMPLE_OMP_TASK is represented as
5510 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
5511 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5512 	  x = omp_build_component_ref (ctx->sender_decl, f);
5513 	  if (use_pointer_for_field (val, ctx))
5514 	    var = build_fold_addr_expr (var);
5515 	  gimplify_assign (x, var, ilist);
5516 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
5517 	  continue;
5518 	}
5519 
5520       if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5521 	   || val == OMP_CLAUSE_DECL (c))
5522 	  && is_variable_sized (val))
5523 	continue;
5524       by_ref = use_pointer_for_field (val, NULL);
5525 
5526       switch (OMP_CLAUSE_CODE (c))
5527 	{
5528 	case OMP_CLAUSE_FIRSTPRIVATE:
5529 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5530 	      && !by_ref
5531 	      && is_task_ctx (ctx))
5532 	    TREE_NO_WARNING (var) = 1;
5533 	  do_in = true;
5534 	  break;
5535 
5536 	case OMP_CLAUSE_PRIVATE:
5537 	case OMP_CLAUSE_COPYIN:
5538 	case OMP_CLAUSE__LOOPTEMP_:
5539 	  do_in = true;
5540 	  break;
5541 
5542 	case OMP_CLAUSE_LASTPRIVATE:
5543 	  if (by_ref || omp_is_reference (val))
5544 	    {
5545 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5546 		continue;
5547 	      do_in = true;
5548 	    }
5549 	  else
5550 	    {
5551 	      do_out = true;
5552 	      if (lang_hooks.decls.omp_private_outer_ref (val))
5553 		do_in = true;
5554 	    }
5555 	  break;
5556 
5557 	case OMP_CLAUSE_REDUCTION:
5558 	  do_in = true;
5559 	  if (val == OMP_CLAUSE_DECL (c))
5560 	    do_out = !(by_ref || omp_is_reference (val));
5561 	  else
5562 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5563 	  break;
5564 
5565 	default:
5566 	  gcc_unreachable ();
5567 	}
5568 
5569       if (do_in)
5570 	{
5571 	  ref = build_sender_ref (val, ctx);
5572 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5573 	  gimplify_assign (ref, x, ilist);
5574 	  if (is_task_ctx (ctx))
5575 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5576 	}
5577 
5578       if (do_out)
5579 	{
5580 	  ref = build_sender_ref (val, ctx);
5581 	  gimplify_assign (var, ref, olist);
5582 	}
5583     }
5584 }
5585 
5586 /* Generate code to implement SHARED from the sender (aka parent)
5587    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5588    list things that got automatically shared.  */
5589 
5590 static void
5591 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5592 {
5593   tree var, ovar, nvar, t, f, x, record_type;
5594 
5595   if (ctx->record_type == NULL)
5596     return;
5597 
5598   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5599   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5600     {
5601       ovar = DECL_ABSTRACT_ORIGIN (f);
5602       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5603 	continue;
5604 
5605       nvar = maybe_lookup_decl (ovar, ctx);
5606       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5607 	continue;
5608 
5609       /* If CTX is a nested parallel directive.  Find the immediately
5610 	 enclosing parallel or workshare construct that contains a
5611 	 mapping for OVAR.  */
5612       var = lookup_decl_in_outer_ctx (ovar, ctx);
5613 
5614       t = omp_member_access_dummy_var (var);
5615       if (t)
5616 	{
5617 	  var = DECL_VALUE_EXPR (var);
5618 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5619 	  if (o != t)
5620 	    var = unshare_and_remap (var, t, o);
5621 	  else
5622 	    var = unshare_expr (var);
5623 	}
5624 
5625       if (use_pointer_for_field (ovar, ctx))
5626 	{
5627 	  x = build_sender_ref (ovar, ctx);
5628 	  var = build_fold_addr_expr (var);
5629 	  gimplify_assign (x, var, ilist);
5630 	}
5631       else
5632 	{
5633 	  x = build_sender_ref (ovar, ctx);
5634 	  gimplify_assign (x, var, ilist);
5635 
5636 	  if (!TREE_READONLY (var)
5637 	      /* We don't need to receive a new reference to a result
5638 	         or parm decl.  In fact we may not store to it as we will
5639 		 invalidate any pending RSO and generate wrong gimple
5640 		 during inlining.  */
5641 	      && !((TREE_CODE (var) == RESULT_DECL
5642 		    || TREE_CODE (var) == PARM_DECL)
5643 		   && DECL_BY_REFERENCE (var)))
5644 	    {
5645 	      x = build_sender_ref (ovar, ctx);
5646 	      gimplify_assign (var, x, olist);
5647 	    }
5648 	}
5649     }
5650 }
5651 
5652 /* Emit an OpenACC head marker call, encapulating the partitioning and
5653    other information that must be processed by the target compiler.
5654    Return the maximum number of dimensions the associated loop might
5655    be partitioned over.  */
5656 
5657 static unsigned
5658 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5659 		      gimple_seq *seq, omp_context *ctx)
5660 {
5661   unsigned levels = 0;
5662   unsigned tag = 0;
5663   tree gang_static = NULL_TREE;
5664   auto_vec<tree, 5> args;
5665 
5666   args.quick_push (build_int_cst
5667 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5668   args.quick_push (ddvar);
5669   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5670     {
5671       switch (OMP_CLAUSE_CODE (c))
5672 	{
5673 	case OMP_CLAUSE_GANG:
5674 	  tag |= OLF_DIM_GANG;
5675 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5676 	  /* static:* is represented by -1, and we can ignore it, as
5677 	     scheduling is always static.  */
5678 	  if (gang_static && integer_minus_onep (gang_static))
5679 	    gang_static = NULL_TREE;
5680 	  levels++;
5681 	  break;
5682 
5683 	case OMP_CLAUSE_WORKER:
5684 	  tag |= OLF_DIM_WORKER;
5685 	  levels++;
5686 	  break;
5687 
5688 	case OMP_CLAUSE_VECTOR:
5689 	  tag |= OLF_DIM_VECTOR;
5690 	  levels++;
5691 	  break;
5692 
5693 	case OMP_CLAUSE_SEQ:
5694 	  tag |= OLF_SEQ;
5695 	  break;
5696 
5697 	case OMP_CLAUSE_AUTO:
5698 	  tag |= OLF_AUTO;
5699 	  break;
5700 
5701 	case OMP_CLAUSE_INDEPENDENT:
5702 	  tag |= OLF_INDEPENDENT;
5703 	  break;
5704 
5705 	case OMP_CLAUSE_TILE:
5706 	  tag |= OLF_TILE;
5707 	  break;
5708 
5709 	default:
5710 	  continue;
5711 	}
5712     }
5713 
5714   if (gang_static)
5715     {
5716       if (DECL_P (gang_static))
5717 	gang_static = build_outer_var_ref (gang_static, ctx);
5718       tag |= OLF_GANG_STATIC;
5719     }
5720 
5721   /* In a parallel region, loops are implicitly INDEPENDENT.  */
5722   omp_context *tgt = enclosing_target_ctx (ctx);
5723   if (!tgt || is_oacc_parallel (tgt))
5724     tag |= OLF_INDEPENDENT;
5725 
5726   if (tag & OLF_TILE)
5727     /* Tiling could use all 3 levels.  */
5728     levels = 3;
5729   else
5730     {
5731       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5732 	 Ensure at least one level, or 2 for possible auto
5733 	 partitioning */
5734       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5735 				  << OLF_DIM_BASE) | OLF_SEQ));
5736 
5737       if (levels < 1u + maybe_auto)
5738 	levels = 1u + maybe_auto;
5739     }
5740 
5741   args.quick_push (build_int_cst (integer_type_node, levels));
5742   args.quick_push (build_int_cst (integer_type_node, tag));
5743   if (gang_static)
5744     args.quick_push (gang_static);
5745 
5746   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5747   gimple_set_location (call, loc);
5748   gimple_set_lhs (call, ddvar);
5749   gimple_seq_add_stmt (seq, call);
5750 
5751   return levels;
5752 }
5753 
5754 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
5755    partitioning level of the enclosed region.  */
5756 
5757 static void
5758 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5759 			tree tofollow, gimple_seq *seq)
5760 {
5761   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5762 		     : IFN_UNIQUE_OACC_TAIL_MARK);
5763   tree marker = build_int_cst (integer_type_node, marker_kind);
5764   int nargs = 2 + (tofollow != NULL_TREE);
5765   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5766 					    marker, ddvar, tofollow);
5767   gimple_set_location (call, loc);
5768   gimple_set_lhs (call, ddvar);
5769   gimple_seq_add_stmt (seq, call);
5770 }
5771 
5772 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
5773    the loop clauses, from which we extract reductions.  Initialize
5774    HEAD and TAIL.  */
5775 
5776 static void
5777 lower_oacc_head_tail (location_t loc, tree clauses,
5778 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5779 {
5780   bool inner = false;
5781   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5782   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5783 
5784   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5785   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5786   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5787 
5788   gcc_assert (count);
5789   for (unsigned done = 1; count; count--, done++)
5790     {
5791       gimple_seq fork_seq = NULL;
5792       gimple_seq join_seq = NULL;
5793 
5794       tree place = build_int_cst (integer_type_node, -1);
5795       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5796 						fork_kind, ddvar, place);
5797       gimple_set_location (fork, loc);
5798       gimple_set_lhs (fork, ddvar);
5799 
5800       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5801 						join_kind, ddvar, place);
5802       gimple_set_location (join, loc);
5803       gimple_set_lhs (join, ddvar);
5804 
5805       /* Mark the beginning of this level sequence.  */
5806       if (inner)
5807 	lower_oacc_loop_marker (loc, ddvar, true,
5808 				build_int_cst (integer_type_node, count),
5809 				&fork_seq);
5810       lower_oacc_loop_marker (loc, ddvar, false,
5811 			      build_int_cst (integer_type_node, done),
5812 			      &join_seq);
5813 
5814       lower_oacc_reductions (loc, clauses, place, inner,
5815 			     fork, join, &fork_seq, &join_seq,  ctx);
5816 
5817       /* Append this level to head. */
5818       gimple_seq_add_seq (head, fork_seq);
5819       /* Prepend it to tail.  */
5820       gimple_seq_add_seq (&join_seq, *tail);
5821       *tail = join_seq;
5822 
5823       inner = true;
5824     }
5825 
5826   /* Mark the end of the sequence.  */
5827   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5828   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5829 }
5830 
5831 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5832    catch handler and return it.  This prevents programs from violating the
5833    structured block semantics with throws.  */
5834 
5835 static gimple_seq
5836 maybe_catch_exception (gimple_seq body)
5837 {
5838   gimple *g;
5839   tree decl;
5840 
5841   if (!flag_exceptions)
5842     return body;
5843 
5844   if (lang_hooks.eh_protect_cleanup_actions != NULL)
5845     decl = lang_hooks.eh_protect_cleanup_actions ();
5846   else
5847     decl = builtin_decl_explicit (BUILT_IN_TRAP);
5848 
5849   g = gimple_build_eh_must_not_throw (decl);
5850   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5851       			GIMPLE_TRY_CATCH);
5852 
5853  return gimple_seq_alloc_with_stmt (g);
5854 }
5855 
5856 
5857 /* Routines to lower OMP directives into OMP-GIMPLE.  */
5858 
5859 /* If ctx is a worksharing context inside of a cancellable parallel
5860    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5861    and conditional branch to parallel's cancel_label to handle
5862    cancellation in the implicit barrier.  */
5863 
5864 static void
5865 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5866 {
5867   gimple *omp_return = gimple_seq_last_stmt (*body);
5868   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5869   if (gimple_omp_return_nowait_p (omp_return))
5870     return;
5871   if (ctx->outer
5872       && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5873       && ctx->outer->cancellable)
5874     {
5875       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5876       tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5877       tree lhs = create_tmp_var (c_bool_type);
5878       gimple_omp_return_set_lhs (omp_return, lhs);
5879       tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5880       gimple *g = gimple_build_cond (NE_EXPR, lhs,
5881 				    fold_convert (c_bool_type,
5882 						  boolean_false_node),
5883 				    ctx->outer->cancel_label, fallthru_label);
5884       gimple_seq_add_stmt (body, g);
5885       gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5886     }
5887 }
5888 
5889 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5890    CTX is the enclosing OMP context for the current statement.  */
5891 
5892 static void
5893 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5894 {
5895   tree block, control;
5896   gimple_stmt_iterator tgsi;
5897   gomp_sections *stmt;
5898   gimple *t;
5899   gbind *new_stmt, *bind;
5900   gimple_seq ilist, dlist, olist, new_body;
5901 
5902   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5903 
5904   push_gimplify_context ();
5905 
5906   dlist = NULL;
5907   ilist = NULL;
5908   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5909       			   &ilist, &dlist, ctx, NULL);
5910 
5911   new_body = gimple_omp_body (stmt);
5912   gimple_omp_set_body (stmt, NULL);
5913   tgsi = gsi_start (new_body);
5914   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5915     {
5916       omp_context *sctx;
5917       gimple *sec_start;
5918 
5919       sec_start = gsi_stmt (tgsi);
5920       sctx = maybe_lookup_ctx (sec_start);
5921       gcc_assert (sctx);
5922 
5923       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5924       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5925 			    GSI_CONTINUE_LINKING);
5926       gimple_omp_set_body (sec_start, NULL);
5927 
5928       if (gsi_one_before_end_p (tgsi))
5929 	{
5930 	  gimple_seq l = NULL;
5931 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5932 				     &l, ctx);
5933 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5934 	  gimple_omp_section_set_last (sec_start);
5935 	}
5936 
5937       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5938 			GSI_CONTINUE_LINKING);
5939     }
5940 
5941   block = make_node (BLOCK);
5942   bind = gimple_build_bind (NULL, new_body, block);
5943 
5944   olist = NULL;
5945   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5946 
5947   block = make_node (BLOCK);
5948   new_stmt = gimple_build_bind (NULL, NULL, block);
5949   gsi_replace (gsi_p, new_stmt, true);
5950 
5951   pop_gimplify_context (new_stmt);
5952   gimple_bind_append_vars (new_stmt, ctx->block_vars);
5953   BLOCK_VARS (block) = gimple_bind_vars (bind);
5954   if (BLOCK_VARS (block))
5955     TREE_USED (block) = 1;
5956 
5957   new_body = NULL;
5958   gimple_seq_add_seq (&new_body, ilist);
5959   gimple_seq_add_stmt (&new_body, stmt);
5960   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5961   gimple_seq_add_stmt (&new_body, bind);
5962 
5963   control = create_tmp_var (unsigned_type_node, ".section");
5964   t = gimple_build_omp_continue (control, control);
5965   gimple_omp_sections_set_control (stmt, control);
5966   gimple_seq_add_stmt (&new_body, t);
5967 
5968   gimple_seq_add_seq (&new_body, olist);
5969   if (ctx->cancellable)
5970     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
5971   gimple_seq_add_seq (&new_body, dlist);
5972 
5973   new_body = maybe_catch_exception (new_body);
5974 
5975   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
5976 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
5977   t = gimple_build_omp_return (nowait);
5978   gimple_seq_add_stmt (&new_body, t);
5979   maybe_add_implicit_barrier_cancel (ctx, &new_body);
5980 
5981   gimple_bind_set_body (new_stmt, new_body);
5982 }
5983 
5984 
5985 /* A subroutine of lower_omp_single.  Expand the simple form of
5986    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
5987 
5988      	if (GOMP_single_start ())
5989 	  BODY;
5990 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
5991 
5992   FIXME.  It may be better to delay expanding the logic of this until
5993   pass_expand_omp.  The expanded logic may make the job more difficult
5994   to a synchronization analysis pass.  */
5995 
5996 static void
5997 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
5998 {
5999   location_t loc = gimple_location (single_stmt);
6000   tree tlabel = create_artificial_label (loc);
6001   tree flabel = create_artificial_label (loc);
6002   gimple *call, *cond;
6003   tree lhs, decl;
6004 
6005   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6006   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6007   call = gimple_build_call (decl, 0);
6008   gimple_call_set_lhs (call, lhs);
6009   gimple_seq_add_stmt (pre_p, call);
6010 
6011   cond = gimple_build_cond (EQ_EXPR, lhs,
6012 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6013 					      boolean_true_node),
6014 			    tlabel, flabel);
6015   gimple_seq_add_stmt (pre_p, cond);
6016   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6017   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6018   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6019 }
6020 
6021 
6022 /* A subroutine of lower_omp_single.  Expand the simple form of
6023    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6024 
6025 	#pragma omp single copyprivate (a, b, c)
6026 
6027    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6028 
6029       {
6030 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6031 	  {
6032 	    BODY;
6033 	    copyout.a = a;
6034 	    copyout.b = b;
6035 	    copyout.c = c;
6036 	    GOMP_single_copy_end (&copyout);
6037 	  }
6038 	else
6039 	  {
6040 	    a = copyout_p->a;
6041 	    b = copyout_p->b;
6042 	    c = copyout_p->c;
6043 	  }
6044 	GOMP_barrier ();
6045       }
6046 
6047   FIXME.  It may be better to delay expanding the logic of this until
6048   pass_expand_omp.  The expanded logic may make the job more difficult
6049   to a synchronization analysis pass.  */
6050 
6051 static void
6052 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6053 		       omp_context *ctx)
6054 {
6055   tree ptr_type, t, l0, l1, l2, bfn_decl;
6056   gimple_seq copyin_seq;
6057   location_t loc = gimple_location (single_stmt);
6058 
6059   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6060 
6061   ptr_type = build_pointer_type (ctx->record_type);
6062   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6063 
6064   l0 = create_artificial_label (loc);
6065   l1 = create_artificial_label (loc);
6066   l2 = create_artificial_label (loc);
6067 
6068   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6069   t = build_call_expr_loc (loc, bfn_decl, 0);
6070   t = fold_convert_loc (loc, ptr_type, t);
6071   gimplify_assign (ctx->receiver_decl, t, pre_p);
6072 
6073   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6074 	      build_int_cst (ptr_type, 0));
6075   t = build3 (COND_EXPR, void_type_node, t,
6076 	      build_and_jump (&l0), build_and_jump (&l1));
6077   gimplify_and_add (t, pre_p);
6078 
6079   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6080 
6081   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6082 
6083   copyin_seq = NULL;
6084   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6085 			      &copyin_seq, ctx);
6086 
6087   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6088   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6089   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6090   gimplify_and_add (t, pre_p);
6091 
6092   t = build_and_jump (&l2);
6093   gimplify_and_add (t, pre_p);
6094 
6095   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6096 
6097   gimple_seq_add_seq (pre_p, copyin_seq);
6098 
6099   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6100 }
6101 
6102 
6103 /* Expand code for an OpenMP single directive.  */
6104 
6105 static void
6106 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6107 {
6108   tree block;
6109   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6110   gbind *bind;
6111   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6112 
6113   push_gimplify_context ();
6114 
6115   block = make_node (BLOCK);
6116   bind = gimple_build_bind (NULL, NULL, block);
6117   gsi_replace (gsi_p, bind, true);
6118   bind_body = NULL;
6119   dlist = NULL;
6120   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6121 			   &bind_body, &dlist, ctx, NULL);
6122   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6123 
6124   gimple_seq_add_stmt (&bind_body, single_stmt);
6125 
6126   if (ctx->record_type)
6127     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6128   else
6129     lower_omp_single_simple (single_stmt, &bind_body);
6130 
6131   gimple_omp_set_body (single_stmt, NULL);
6132 
6133   gimple_seq_add_seq (&bind_body, dlist);
6134 
6135   bind_body = maybe_catch_exception (bind_body);
6136 
6137   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6138 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6139   gimple *g = gimple_build_omp_return (nowait);
6140   gimple_seq_add_stmt (&bind_body_tail, g);
6141   maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6142   if (ctx->record_type)
6143     {
6144       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6145       tree clobber = build_constructor (ctx->record_type, NULL);
6146       TREE_THIS_VOLATILE (clobber) = 1;
6147       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6148 						   clobber), GSI_SAME_STMT);
6149     }
6150   gimple_seq_add_seq (&bind_body, bind_body_tail);
6151   gimple_bind_set_body (bind, bind_body);
6152 
6153   pop_gimplify_context (bind);
6154 
6155   gimple_bind_append_vars (bind, ctx->block_vars);
6156   BLOCK_VARS (block) = ctx->block_vars;
6157   if (BLOCK_VARS (block))
6158     TREE_USED (block) = 1;
6159 }
6160 
6161 
6162 /* Expand code for an OpenMP master directive.  */
6163 
6164 static void
6165 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6166 {
6167   tree block, lab = NULL, x, bfn_decl;
6168   gimple *stmt = gsi_stmt (*gsi_p);
6169   gbind *bind;
6170   location_t loc = gimple_location (stmt);
6171   gimple_seq tseq;
6172 
6173   push_gimplify_context ();
6174 
6175   block = make_node (BLOCK);
6176   bind = gimple_build_bind (NULL, NULL, block);
6177   gsi_replace (gsi_p, bind, true);
6178   gimple_bind_add_stmt (bind, stmt);
6179 
6180   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6181   x = build_call_expr_loc (loc, bfn_decl, 0);
6182   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6183   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6184   tseq = NULL;
6185   gimplify_and_add (x, &tseq);
6186   gimple_bind_add_seq (bind, tseq);
6187 
6188   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6189   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6190   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6191   gimple_omp_set_body (stmt, NULL);
6192 
6193   gimple_bind_add_stmt (bind, gimple_build_label (lab));
6194 
6195   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6196 
6197   pop_gimplify_context (bind);
6198 
6199   gimple_bind_append_vars (bind, ctx->block_vars);
6200   BLOCK_VARS (block) = ctx->block_vars;
6201 }
6202 
6203 
6204 /* Expand code for an OpenMP taskgroup directive.  */
6205 
6206 static void
6207 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6208 {
6209   gimple *stmt = gsi_stmt (*gsi_p);
6210   gcall *x;
6211   gbind *bind;
6212   tree block = make_node (BLOCK);
6213 
6214   bind = gimple_build_bind (NULL, NULL, block);
6215   gsi_replace (gsi_p, bind, true);
6216   gimple_bind_add_stmt (bind, stmt);
6217 
6218   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6219 			 0);
6220   gimple_bind_add_stmt (bind, x);
6221 
6222   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6223   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6224   gimple_omp_set_body (stmt, NULL);
6225 
6226   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6227 
6228   gimple_bind_append_vars (bind, ctx->block_vars);
6229   BLOCK_VARS (block) = ctx->block_vars;
6230 }
6231 
6232 
6233 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
6234 
6235 static void
6236 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6237 			   omp_context *ctx)
6238 {
6239   struct omp_for_data fd;
6240   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6241     return;
6242 
6243   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6244   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6245   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6246   if (!fd.ordered)
6247     return;
6248 
6249   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6250   tree c = gimple_omp_ordered_clauses (ord_stmt);
6251   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6252       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6253     {
6254       /* Merge depend clauses from multiple adjacent
6255 	 #pragma omp ordered depend(sink:...) constructs
6256 	 into one #pragma omp ordered depend(sink:...), so that
6257 	 we can optimize them together.  */
6258       gimple_stmt_iterator gsi = *gsi_p;
6259       gsi_next (&gsi);
6260       while (!gsi_end_p (gsi))
6261 	{
6262 	  gimple *stmt = gsi_stmt (gsi);
6263 	  if (is_gimple_debug (stmt)
6264 	      || gimple_code (stmt) == GIMPLE_NOP)
6265 	    {
6266 	      gsi_next (&gsi);
6267 	      continue;
6268 	    }
6269 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6270 	    break;
6271 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6272 	  c = gimple_omp_ordered_clauses (ord_stmt2);
6273 	  if (c == NULL_TREE
6274 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6275 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6276 	    break;
6277 	  while (*list_p)
6278 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
6279 	  *list_p = c;
6280 	  gsi_remove (&gsi, true);
6281 	}
6282     }
6283 
6284   /* Canonicalize sink dependence clauses into one folded clause if
6285      possible.
6286 
6287      The basic algorithm is to create a sink vector whose first
6288      element is the GCD of all the first elements, and whose remaining
6289      elements are the minimum of the subsequent columns.
6290 
6291      We ignore dependence vectors whose first element is zero because
6292      such dependencies are known to be executed by the same thread.
6293 
6294      We take into account the direction of the loop, so a minimum
6295      becomes a maximum if the loop is iterating forwards.  We also
6296      ignore sink clauses where the loop direction is unknown, or where
6297      the offsets are clearly invalid because they are not a multiple
6298      of the loop increment.
6299 
6300      For example:
6301 
6302 	#pragma omp for ordered(2)
6303 	for (i=0; i < N; ++i)
6304 	  for (j=0; j < M; ++j)
6305 	    {
6306 	      #pragma omp ordered \
6307 		depend(sink:i-8,j-2) \
6308 		depend(sink:i,j-1) \	// Completely ignored because i+0.
6309 		depend(sink:i-4,j-3) \
6310 		depend(sink:i-6,j-4)
6311 	      #pragma omp ordered depend(source)
6312 	    }
6313 
6314      Folded clause is:
6315 
6316 	depend(sink:-gcd(8,4,6),-min(2,3,4))
6317 	  -or-
6318 	depend(sink:-2,-2)
6319   */
6320 
6321   /* FIXME: Computing GCD's where the first element is zero is
6322      non-trivial in the presence of collapsed loops.  Do this later.  */
6323   if (fd.collapse > 1)
6324     return;
6325 
6326   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6327 
6328   /* wide_int is not a POD so it must be default-constructed.  */
6329   for (unsigned i = 0; i != 2 * len - 1; ++i)
6330     new (static_cast<void*>(folded_deps + i)) wide_int ();
6331 
6332   tree folded_dep = NULL_TREE;
6333   /* TRUE if the first dimension's offset is negative.  */
6334   bool neg_offset_p = false;
6335 
6336   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6337   unsigned int i;
6338   while ((c = *list_p) != NULL)
6339     {
6340       bool remove = false;
6341 
6342       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6343       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6344 	goto next_ordered_clause;
6345 
6346       tree vec;
6347       for (vec = OMP_CLAUSE_DECL (c), i = 0;
6348 	   vec && TREE_CODE (vec) == TREE_LIST;
6349 	   vec = TREE_CHAIN (vec), ++i)
6350 	{
6351 	  gcc_assert (i < len);
6352 
6353 	  /* omp_extract_for_data has canonicalized the condition.  */
6354 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
6355 		      || fd.loops[i].cond_code == GT_EXPR);
6356 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
6357 	  bool maybe_lexically_later = true;
6358 
6359 	  /* While the committee makes up its mind, bail if we have any
6360 	     non-constant steps.  */
6361 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6362 	    goto lower_omp_ordered_ret;
6363 
6364 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
6365 	  if (POINTER_TYPE_P (itype))
6366 	    itype = sizetype;
6367 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6368 					    TYPE_PRECISION (itype),
6369 					    TYPE_SIGN (itype));
6370 
6371 	  /* Ignore invalid offsets that are not multiples of the step.  */
6372 	  if (!wi::multiple_of_p (wi::abs (offset),
6373 				  wi::abs (wi::to_wide (fd.loops[i].step)),
6374 				  UNSIGNED))
6375 	    {
6376 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
6377 			  "ignoring sink clause with offset that is not "
6378 			  "a multiple of the loop step");
6379 	      remove = true;
6380 	      goto next_ordered_clause;
6381 	    }
6382 
6383 	  /* Calculate the first dimension.  The first dimension of
6384 	     the folded dependency vector is the GCD of the first
6385 	     elements, while ignoring any first elements whose offset
6386 	     is 0.  */
6387 	  if (i == 0)
6388 	    {
6389 	      /* Ignore dependence vectors whose first dimension is 0.  */
6390 	      if (offset == 0)
6391 		{
6392 		  remove = true;
6393 		  goto next_ordered_clause;
6394 		}
6395 	      else
6396 		{
6397 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6398 		    {
6399 		      error_at (OMP_CLAUSE_LOCATION (c),
6400 				"first offset must be in opposite direction "
6401 				"of loop iterations");
6402 		      goto lower_omp_ordered_ret;
6403 		    }
6404 		  if (forward)
6405 		    offset = -offset;
6406 		  neg_offset_p = forward;
6407 		  /* Initialize the first time around.  */
6408 		  if (folded_dep == NULL_TREE)
6409 		    {
6410 		      folded_dep = c;
6411 		      folded_deps[0] = offset;
6412 		    }
6413 		  else
6414 		    folded_deps[0] = wi::gcd (folded_deps[0],
6415 					      offset, UNSIGNED);
6416 		}
6417 	    }
6418 	  /* Calculate minimum for the remaining dimensions.  */
6419 	  else
6420 	    {
6421 	      folded_deps[len + i - 1] = offset;
6422 	      if (folded_dep == c)
6423 		folded_deps[i] = offset;
6424 	      else if (maybe_lexically_later
6425 		       && !wi::eq_p (folded_deps[i], offset))
6426 		{
6427 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
6428 		    {
6429 		      unsigned int j;
6430 		      folded_dep = c;
6431 		      for (j = 1; j <= i; j++)
6432 			folded_deps[j] = folded_deps[len + j - 1];
6433 		    }
6434 		  else
6435 		    maybe_lexically_later = false;
6436 		}
6437 	    }
6438 	}
6439       gcc_assert (i == len);
6440 
6441       remove = true;
6442 
6443     next_ordered_clause:
6444       if (remove)
6445 	*list_p = OMP_CLAUSE_CHAIN (c);
6446       else
6447 	list_p = &OMP_CLAUSE_CHAIN (c);
6448     }
6449 
6450   if (folded_dep)
6451     {
6452       if (neg_offset_p)
6453 	folded_deps[0] = -folded_deps[0];
6454 
6455       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6456       if (POINTER_TYPE_P (itype))
6457 	itype = sizetype;
6458 
6459       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6460 	= wide_int_to_tree (itype, folded_deps[0]);
6461       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6462       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6463     }
6464 
6465  lower_omp_ordered_ret:
6466 
6467   /* Ordered without clauses is #pragma omp threads, while we want
6468      a nop instead if we remove all clauses.  */
6469   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6470     gsi_replace (gsi_p, gimple_build_nop (), true);
6471 }
6472 
6473 
6474 /* Expand code for an OpenMP ordered directive.  */
6475 
6476 static void
6477 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6478 {
6479   tree block;
6480   gimple *stmt = gsi_stmt (*gsi_p), *g;
6481   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6482   gcall *x;
6483   gbind *bind;
6484   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6485 			       OMP_CLAUSE_SIMD);
6486   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6487      loop.  */
6488   bool maybe_simt
6489     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6490   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6491 				  OMP_CLAUSE_THREADS);
6492 
6493   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6494 		       OMP_CLAUSE_DEPEND))
6495     {
6496       /* FIXME: This is needs to be moved to the expansion to verify various
6497 	 conditions only testable on cfg with dominators computed, and also
6498 	 all the depend clauses to be merged still might need to be available
6499 	 for the runtime checks.  */
6500       if (0)
6501 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6502       return;
6503     }
6504 
6505   push_gimplify_context ();
6506 
6507   block = make_node (BLOCK);
6508   bind = gimple_build_bind (NULL, NULL, block);
6509   gsi_replace (gsi_p, bind, true);
6510   gimple_bind_add_stmt (bind, stmt);
6511 
6512   if (simd)
6513     {
6514       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6515 				      build_int_cst (NULL_TREE, threads));
6516       cfun->has_simduid_loops = true;
6517     }
6518   else
6519     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6520 			   0);
6521   gimple_bind_add_stmt (bind, x);
6522 
6523   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6524   if (maybe_simt)
6525     {
6526       counter = create_tmp_var (integer_type_node);
6527       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6528       gimple_call_set_lhs (g, counter);
6529       gimple_bind_add_stmt (bind, g);
6530 
6531       body = create_artificial_label (UNKNOWN_LOCATION);
6532       test = create_artificial_label (UNKNOWN_LOCATION);
6533       gimple_bind_add_stmt (bind, gimple_build_label (body));
6534 
6535       tree simt_pred = create_tmp_var (integer_type_node);
6536       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6537       gimple_call_set_lhs (g, simt_pred);
6538       gimple_bind_add_stmt (bind, g);
6539 
6540       tree t = create_artificial_label (UNKNOWN_LOCATION);
6541       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6542       gimple_bind_add_stmt (bind, g);
6543 
6544       gimple_bind_add_stmt (bind, gimple_build_label (t));
6545     }
6546   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6547   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6548   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6549   gimple_omp_set_body (stmt, NULL);
6550 
6551   if (maybe_simt)
6552     {
6553       gimple_bind_add_stmt (bind, gimple_build_label (test));
6554       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6555       gimple_bind_add_stmt (bind, g);
6556 
6557       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6558       tree nonneg = create_tmp_var (integer_type_node);
6559       gimple_seq tseq = NULL;
6560       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6561       gimple_bind_add_seq (bind, tseq);
6562 
6563       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6564       gimple_call_set_lhs (g, nonneg);
6565       gimple_bind_add_stmt (bind, g);
6566 
6567       tree end = create_artificial_label (UNKNOWN_LOCATION);
6568       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6569       gimple_bind_add_stmt (bind, g);
6570 
6571       gimple_bind_add_stmt (bind, gimple_build_label (end));
6572     }
6573   if (simd)
6574     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6575 				    build_int_cst (NULL_TREE, threads));
6576   else
6577     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6578 			   0);
6579   gimple_bind_add_stmt (bind, x);
6580 
6581   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6582 
6583   pop_gimplify_context (bind);
6584 
6585   gimple_bind_append_vars (bind, ctx->block_vars);
6586   BLOCK_VARS (block) = gimple_bind_vars (bind);
6587 }
6588 
6589 
6590 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
6591    substitution of a couple of function calls.  But in the NAMED case,
6592    requires that languages coordinate a symbol name.  It is therefore
6593    best put here in common code.  */
6594 
6595 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6596 
6597 static void
6598 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6599 {
6600   tree block;
6601   tree name, lock, unlock;
6602   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6603   gbind *bind;
6604   location_t loc = gimple_location (stmt);
6605   gimple_seq tbody;
6606 
6607   name = gimple_omp_critical_name (stmt);
6608   if (name)
6609     {
6610       tree decl;
6611 
6612       if (!critical_name_mutexes)
6613 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6614 
6615       tree *n = critical_name_mutexes->get (name);
6616       if (n == NULL)
6617 	{
6618 	  char *new_str;
6619 
6620 	  decl = create_tmp_var_raw (ptr_type_node);
6621 
6622 	  new_str = ACONCAT ((".gomp_critical_user_",
6623 			      IDENTIFIER_POINTER (name), NULL));
6624 	  DECL_NAME (decl) = get_identifier (new_str);
6625 	  TREE_PUBLIC (decl) = 1;
6626 	  TREE_STATIC (decl) = 1;
6627 	  DECL_COMMON (decl) = 1;
6628 	  DECL_ARTIFICIAL (decl) = 1;
6629 	  DECL_IGNORED_P (decl) = 1;
6630 
6631 	  varpool_node::finalize_decl (decl);
6632 
6633 	  critical_name_mutexes->put (name, decl);
6634 	}
6635       else
6636 	decl = *n;
6637 
6638       /* If '#pragma omp critical' is inside offloaded region or
6639 	 inside function marked as offloadable, the symbol must be
6640 	 marked as offloadable too.  */
6641       omp_context *octx;
6642       if (cgraph_node::get (current_function_decl)->offloadable)
6643 	varpool_node::get_create (decl)->offloadable = 1;
6644       else
6645 	for (octx = ctx->outer; octx; octx = octx->outer)
6646 	  if (is_gimple_omp_offloaded (octx->stmt))
6647 	    {
6648 	      varpool_node::get_create (decl)->offloadable = 1;
6649 	      break;
6650 	    }
6651 
6652       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6653       lock = build_call_expr_loc (loc, lock, 1,
6654 				  build_fold_addr_expr_loc (loc, decl));
6655 
6656       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6657       unlock = build_call_expr_loc (loc, unlock, 1,
6658 				build_fold_addr_expr_loc (loc, decl));
6659     }
6660   else
6661     {
6662       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6663       lock = build_call_expr_loc (loc, lock, 0);
6664 
6665       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6666       unlock = build_call_expr_loc (loc, unlock, 0);
6667     }
6668 
6669   push_gimplify_context ();
6670 
6671   block = make_node (BLOCK);
6672   bind = gimple_build_bind (NULL, NULL, block);
6673   gsi_replace (gsi_p, bind, true);
6674   gimple_bind_add_stmt (bind, stmt);
6675 
6676   tbody = gimple_bind_body (bind);
6677   gimplify_and_add (lock, &tbody);
6678   gimple_bind_set_body (bind, tbody);
6679 
6680   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6681   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6682   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6683   gimple_omp_set_body (stmt, NULL);
6684 
6685   tbody = gimple_bind_body (bind);
6686   gimplify_and_add (unlock, &tbody);
6687   gimple_bind_set_body (bind, tbody);
6688 
6689   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6690 
6691   pop_gimplify_context (bind);
6692   gimple_bind_append_vars (bind, ctx->block_vars);
6693   BLOCK_VARS (block) = gimple_bind_vars (bind);
6694 }
6695 
6696 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
6697    for a lastprivate clause.  Given a loop control predicate of (V
6698    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
6699    is appended to *DLIST, iterator initialization is appended to
6700    *BODY_P.  */
6701 
6702 static void
6703 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6704 			   gimple_seq *dlist, struct omp_context *ctx)
6705 {
6706   tree clauses, cond, vinit;
6707   enum tree_code cond_code;
6708   gimple_seq stmts;
6709 
6710   cond_code = fd->loop.cond_code;
6711   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6712 
6713   /* When possible, use a strict equality expression.  This can let VRP
6714      type optimizations deduce the value and remove a copy.  */
6715   if (tree_fits_shwi_p (fd->loop.step))
6716     {
6717       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6718       if (step == 1 || step == -1)
6719 	cond_code = EQ_EXPR;
6720     }
6721 
6722   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6723       || gimple_omp_for_grid_phony (fd->for_stmt))
6724     cond = omp_grid_lastprivate_predicate (fd);
6725   else
6726     {
6727       tree n2 = fd->loop.n2;
6728       if (fd->collapse > 1
6729 	  && TREE_CODE (n2) != INTEGER_CST
6730 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
6731 	{
6732 	  struct omp_context *taskreg_ctx = NULL;
6733 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6734 	    {
6735 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6736 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6737 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6738 		{
6739 		  if (gimple_omp_for_combined_into_p (gfor))
6740 		    {
6741 		      gcc_assert (ctx->outer->outer
6742 				  && is_parallel_ctx (ctx->outer->outer));
6743 		      taskreg_ctx = ctx->outer->outer;
6744 		    }
6745 		  else
6746 		    {
6747 		      struct omp_for_data outer_fd;
6748 		      omp_extract_for_data (gfor, &outer_fd, NULL);
6749 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6750 		    }
6751 		}
6752 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6753 		taskreg_ctx = ctx->outer->outer;
6754 	    }
6755 	  else if (is_taskreg_ctx (ctx->outer))
6756 	    taskreg_ctx = ctx->outer;
6757 	  if (taskreg_ctx)
6758 	    {
6759 	      int i;
6760 	      tree taskreg_clauses
6761 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6762 	      tree innerc = omp_find_clause (taskreg_clauses,
6763 					     OMP_CLAUSE__LOOPTEMP_);
6764 	      gcc_assert (innerc);
6765 	      for (i = 0; i < fd->collapse; i++)
6766 		{
6767 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6768 					    OMP_CLAUSE__LOOPTEMP_);
6769 		  gcc_assert (innerc);
6770 		}
6771 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6772 					OMP_CLAUSE__LOOPTEMP_);
6773 	      if (innerc)
6774 		n2 = fold_convert (TREE_TYPE (n2),
6775 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
6776 						taskreg_ctx));
6777 	    }
6778 	}
6779       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6780     }
6781 
6782   clauses = gimple_omp_for_clauses (fd->for_stmt);
6783   stmts = NULL;
6784   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6785   if (!gimple_seq_empty_p (stmts))
6786     {
6787       gimple_seq_add_seq (&stmts, *dlist);
6788       *dlist = stmts;
6789 
6790       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
6791       vinit = fd->loop.n1;
6792       if (cond_code == EQ_EXPR
6793 	  && tree_fits_shwi_p (fd->loop.n2)
6794 	  && ! integer_zerop (fd->loop.n2))
6795 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6796       else
6797 	vinit = unshare_expr (vinit);
6798 
6799       /* Initialize the iterator variable, so that threads that don't execute
6800 	 any iterations don't execute the lastprivate clauses by accident.  */
6801       gimplify_assign (fd->loop.v, vinit, body_p);
6802     }
6803 }
6804 
6805 
6806 /* Lower code for an OMP loop directive.  */
6807 
6808 static void
6809 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6810 {
6811   tree *rhs_p, block;
6812   struct omp_for_data fd, *fdp = NULL;
6813   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6814   gbind *new_stmt;
6815   gimple_seq omp_for_body, body, dlist;
6816   gimple_seq oacc_head = NULL, oacc_tail = NULL;
6817   size_t i;
6818 
6819   push_gimplify_context ();
6820 
6821   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6822 
6823   block = make_node (BLOCK);
6824   new_stmt = gimple_build_bind (NULL, NULL, block);
6825   /* Replace at gsi right away, so that 'stmt' is no member
6826      of a sequence anymore as we're going to add to a different
6827      one below.  */
6828   gsi_replace (gsi_p, new_stmt, true);
6829 
6830   /* Move declaration of temporaries in the loop body before we make
6831      it go away.  */
6832   omp_for_body = gimple_omp_body (stmt);
6833   if (!gimple_seq_empty_p (omp_for_body)
6834       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6835     {
6836       gbind *inner_bind
6837 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6838       tree vars = gimple_bind_vars (inner_bind);
6839       gimple_bind_append_vars (new_stmt, vars);
6840       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6841 	 keep them on the inner_bind and it's block.  */
6842       gimple_bind_set_vars (inner_bind, NULL_TREE);
6843       if (gimple_bind_block (inner_bind))
6844 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6845     }
6846 
6847   if (gimple_omp_for_combined_into_p (stmt))
6848     {
6849       omp_extract_for_data (stmt, &fd, NULL);
6850       fdp = &fd;
6851 
6852       /* We need two temporaries with fd.loop.v type (istart/iend)
6853 	 and then (fd.collapse - 1) temporaries with the same
6854 	 type for count2 ... countN-1 vars if not constant.  */
6855       size_t count = 2;
6856       tree type = fd.iter_type;
6857       if (fd.collapse > 1
6858 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6859 	count += fd.collapse - 1;
6860       bool taskreg_for
6861 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6862 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6863       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6864       tree simtc = NULL;
6865       tree clauses = *pc;
6866       if (taskreg_for)
6867 	outerc
6868 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6869 			     OMP_CLAUSE__LOOPTEMP_);
6870       if (ctx->simt_stmt)
6871 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6872 				 OMP_CLAUSE__LOOPTEMP_);
6873       for (i = 0; i < count; i++)
6874 	{
6875 	  tree temp;
6876 	  if (taskreg_for)
6877 	    {
6878 	      gcc_assert (outerc);
6879 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6880 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6881 					OMP_CLAUSE__LOOPTEMP_);
6882 	    }
6883 	  else
6884 	    {
6885 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
6886 		 clause, another without, make sure they have the same
6887 		 decls in _looptemp_ clauses, because the outer stmt
6888 		 they are combined into will look up just one inner_stmt.  */
6889 	      if (ctx->simt_stmt)
6890 		temp = OMP_CLAUSE_DECL (simtc);
6891 	      else
6892 		temp = create_tmp_var (type);
6893 	      insert_decl_map (&ctx->outer->cb, temp, temp);
6894 	    }
6895 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6896 	  OMP_CLAUSE_DECL (*pc) = temp;
6897 	  pc = &OMP_CLAUSE_CHAIN (*pc);
6898 	  if (ctx->simt_stmt)
6899 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6900 				     OMP_CLAUSE__LOOPTEMP_);
6901 	}
6902       *pc = clauses;
6903     }
6904 
6905   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
6906   dlist = NULL;
6907   body = NULL;
6908   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6909 			   fdp);
6910   gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6911 
6912   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6913 
6914   /* Lower the header expressions.  At this point, we can assume that
6915      the header is of the form:
6916 
6917      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6918 
6919      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6920      using the .omp_data_s mapping, if needed.  */
6921   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6922     {
6923       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6924       if (!is_gimple_min_invariant (*rhs_p))
6925 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6926       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6927 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6928 
6929       rhs_p = gimple_omp_for_final_ptr (stmt, i);
6930       if (!is_gimple_min_invariant (*rhs_p))
6931 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6932       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6933 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6934 
6935       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6936       if (!is_gimple_min_invariant (*rhs_p))
6937 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6938     }
6939 
6940   /* Once lowered, extract the bounds and clauses.  */
6941   omp_extract_for_data (stmt, &fd, NULL);
6942 
6943   if (is_gimple_omp_oacc (ctx->stmt)
6944       && !ctx_in_oacc_kernels_region (ctx))
6945     lower_oacc_head_tail (gimple_location (stmt),
6946 			  gimple_omp_for_clauses (stmt),
6947 			  &oacc_head, &oacc_tail, ctx);
6948 
6949   /* Add OpenACC partitioning and reduction markers just before the loop.  */
6950   if (oacc_head)
6951     gimple_seq_add_seq (&body, oacc_head);
6952 
6953   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6954 
6955   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6956     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6957       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6958 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6959 	{
6960 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6961 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6962 	    OMP_CLAUSE_LINEAR_STEP (c)
6963 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6964 						ctx);
6965 	}
6966 
6967   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
6968 		     && gimple_omp_for_grid_phony (stmt));
6969   if (!phony_loop)
6970     gimple_seq_add_stmt (&body, stmt);
6971   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
6972 
6973   if (!phony_loop)
6974     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
6975 							   fd.loop.v));
6976 
6977   /* After the loop, add exit clauses.  */
6978   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
6979 
6980   if (ctx->cancellable)
6981     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
6982 
6983   gimple_seq_add_seq (&body, dlist);
6984 
6985   body = maybe_catch_exception (body);
6986 
6987   if (!phony_loop)
6988     {
6989       /* Region exit marker goes at the end of the loop body.  */
6990       gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
6991       maybe_add_implicit_barrier_cancel (ctx, &body);
6992     }
6993 
6994   /* Add OpenACC joining and reduction markers just after the loop.  */
6995   if (oacc_tail)
6996     gimple_seq_add_seq (&body, oacc_tail);
6997 
6998   pop_gimplify_context (new_stmt);
6999 
7000   gimple_bind_append_vars (new_stmt, ctx->block_vars);
7001   maybe_remove_omp_member_access_dummy_vars (new_stmt);
7002   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7003   if (BLOCK_VARS (block))
7004     TREE_USED (block) = 1;
7005 
7006   gimple_bind_set_body (new_stmt, body);
7007   gimple_omp_set_body (stmt, NULL);
7008   gimple_omp_for_set_pre_body (stmt, NULL);
7009 }
7010 
7011 /* Callback for walk_stmts.  Check if the current statement only contains
7012    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
7013 
7014 static tree
7015 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7016     			 bool *handled_ops_p,
7017     			 struct walk_stmt_info *wi)
7018 {
7019   int *info = (int *) wi->info;
7020   gimple *stmt = gsi_stmt (*gsi_p);
7021 
7022   *handled_ops_p = true;
7023   switch (gimple_code (stmt))
7024     {
7025     WALK_SUBSTMTS;
7026 
7027     case GIMPLE_DEBUG:
7028       break;
7029     case GIMPLE_OMP_FOR:
7030     case GIMPLE_OMP_SECTIONS:
7031       *info = *info == 0 ? 1 : -1;
7032       break;
7033     default:
7034       *info = -1;
7035       break;
7036     }
7037   return NULL;
7038 }
7039 
7040 struct omp_taskcopy_context
7041 {
7042   /* This field must be at the beginning, as we do "inheritance": Some
7043      callback functions for tree-inline.c (e.g., omp_copy_decl)
7044      receive a copy_body_data pointer that is up-casted to an
7045      omp_context pointer.  */
7046   copy_body_data cb;
7047   omp_context *ctx;
7048 };
7049 
7050 static tree
7051 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7052 {
7053   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7054 
7055   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7056     return create_tmp_var (TREE_TYPE (var));
7057 
7058   return var;
7059 }
7060 
7061 static tree
7062 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7063 {
7064   tree name, new_fields = NULL, type, f;
7065 
7066   type = lang_hooks.types.make_type (RECORD_TYPE);
7067   name = DECL_NAME (TYPE_NAME (orig_type));
7068   name = build_decl (gimple_location (tcctx->ctx->stmt),
7069 		     TYPE_DECL, name, type);
7070   TYPE_NAME (type) = name;
7071 
7072   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7073     {
7074       tree new_f = copy_node (f);
7075       DECL_CONTEXT (new_f) = type;
7076       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7077       TREE_CHAIN (new_f) = new_fields;
7078       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7079       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7080       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7081 		 &tcctx->cb, NULL);
7082       new_fields = new_f;
7083       tcctx->cb.decl_map->put (f, new_f);
7084     }
7085   TYPE_FIELDS (type) = nreverse (new_fields);
7086   layout_type (type);
7087   return type;
7088 }
7089 
7090 /* Create task copyfn.  */
7091 
7092 static void
7093 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7094 {
7095   struct function *child_cfun;
7096   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7097   tree record_type, srecord_type, bind, list;
7098   bool record_needs_remap = false, srecord_needs_remap = false;
7099   splay_tree_node n;
7100   struct omp_taskcopy_context tcctx;
7101   location_t loc = gimple_location (task_stmt);
7102 
7103   child_fn = gimple_omp_task_copy_fn (task_stmt);
7104   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7105   gcc_assert (child_cfun->cfg == NULL);
7106   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7107 
7108   /* Reset DECL_CONTEXT on function arguments.  */
7109   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7110     DECL_CONTEXT (t) = child_fn;
7111 
7112   /* Populate the function.  */
7113   push_gimplify_context ();
7114   push_cfun (child_cfun);
7115 
7116   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7117   TREE_SIDE_EFFECTS (bind) = 1;
7118   list = NULL;
7119   DECL_SAVED_TREE (child_fn) = bind;
7120   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7121 
7122   /* Remap src and dst argument types if needed.  */
7123   record_type = ctx->record_type;
7124   srecord_type = ctx->srecord_type;
7125   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7126     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7127       {
7128 	record_needs_remap = true;
7129 	break;
7130       }
7131   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7132     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7133       {
7134 	srecord_needs_remap = true;
7135 	break;
7136       }
7137 
7138   if (record_needs_remap || srecord_needs_remap)
7139     {
7140       memset (&tcctx, '\0', sizeof (tcctx));
7141       tcctx.cb.src_fn = ctx->cb.src_fn;
7142       tcctx.cb.dst_fn = child_fn;
7143       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7144       gcc_checking_assert (tcctx.cb.src_node);
7145       tcctx.cb.dst_node = tcctx.cb.src_node;
7146       tcctx.cb.src_cfun = ctx->cb.src_cfun;
7147       tcctx.cb.copy_decl = task_copyfn_copy_decl;
7148       tcctx.cb.eh_lp_nr = 0;
7149       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7150       tcctx.cb.decl_map = new hash_map<tree, tree>;
7151       tcctx.ctx = ctx;
7152 
7153       if (record_needs_remap)
7154 	record_type = task_copyfn_remap_type (&tcctx, record_type);
7155       if (srecord_needs_remap)
7156 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7157     }
7158   else
7159     tcctx.cb.decl_map = NULL;
7160 
7161   arg = DECL_ARGUMENTS (child_fn);
7162   TREE_TYPE (arg) = build_pointer_type (record_type);
7163   sarg = DECL_CHAIN (arg);
7164   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7165 
7166   /* First pass: initialize temporaries used in record_type and srecord_type
7167      sizes and field offsets.  */
7168   if (tcctx.cb.decl_map)
7169     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7170       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7171 	{
7172 	  tree *p;
7173 
7174 	  decl = OMP_CLAUSE_DECL (c);
7175 	  p = tcctx.cb.decl_map->get (decl);
7176 	  if (p == NULL)
7177 	    continue;
7178 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7179 	  sf = (tree) n->value;
7180 	  sf = *tcctx.cb.decl_map->get (sf);
7181 	  src = build_simple_mem_ref_loc (loc, sarg);
7182 	  src = omp_build_component_ref (src, sf);
7183 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7184 	  append_to_statement_list (t, &list);
7185 	}
7186 
7187   /* Second pass: copy shared var pointers and copy construct non-VLA
7188      firstprivate vars.  */
7189   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7190     switch (OMP_CLAUSE_CODE (c))
7191       {
7192 	splay_tree_key key;
7193       case OMP_CLAUSE_SHARED:
7194 	decl = OMP_CLAUSE_DECL (c);
7195 	key = (splay_tree_key) decl;
7196 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7197 	  key = (splay_tree_key) &DECL_UID (decl);
7198 	n = splay_tree_lookup (ctx->field_map, key);
7199 	if (n == NULL)
7200 	  break;
7201 	f = (tree) n->value;
7202 	if (tcctx.cb.decl_map)
7203 	  f = *tcctx.cb.decl_map->get (f);
7204 	n = splay_tree_lookup (ctx->sfield_map, key);
7205 	sf = (tree) n->value;
7206 	if (tcctx.cb.decl_map)
7207 	  sf = *tcctx.cb.decl_map->get (sf);
7208 	src = build_simple_mem_ref_loc (loc, sarg);
7209 	src = omp_build_component_ref (src, sf);
7210 	dst = build_simple_mem_ref_loc (loc, arg);
7211 	dst = omp_build_component_ref (dst, f);
7212 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7213 	append_to_statement_list (t, &list);
7214 	break;
7215       case OMP_CLAUSE_FIRSTPRIVATE:
7216 	decl = OMP_CLAUSE_DECL (c);
7217 	if (is_variable_sized (decl))
7218 	  break;
7219 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7220 	if (n == NULL)
7221 	  break;
7222 	f = (tree) n->value;
7223 	if (tcctx.cb.decl_map)
7224 	  f = *tcctx.cb.decl_map->get (f);
7225 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7226 	if (n != NULL)
7227 	  {
7228 	    sf = (tree) n->value;
7229 	    if (tcctx.cb.decl_map)
7230 	      sf = *tcctx.cb.decl_map->get (sf);
7231 	    src = build_simple_mem_ref_loc (loc, sarg);
7232 	    src = omp_build_component_ref (src, sf);
7233 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7234 	      src = build_simple_mem_ref_loc (loc, src);
7235 	  }
7236 	else
7237 	  src = decl;
7238 	dst = build_simple_mem_ref_loc (loc, arg);
7239 	dst = omp_build_component_ref (dst, f);
7240 	t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7241 	append_to_statement_list (t, &list);
7242 	break;
7243       case OMP_CLAUSE_PRIVATE:
7244 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7245 	  break;
7246 	decl = OMP_CLAUSE_DECL (c);
7247 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7248 	f = (tree) n->value;
7249 	if (tcctx.cb.decl_map)
7250 	  f = *tcctx.cb.decl_map->get (f);
7251 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7252 	if (n != NULL)
7253 	  {
7254 	    sf = (tree) n->value;
7255 	    if (tcctx.cb.decl_map)
7256 	      sf = *tcctx.cb.decl_map->get (sf);
7257 	    src = build_simple_mem_ref_loc (loc, sarg);
7258 	    src = omp_build_component_ref (src, sf);
7259 	    if (use_pointer_for_field (decl, NULL))
7260 	      src = build_simple_mem_ref_loc (loc, src);
7261 	  }
7262 	else
7263 	  src = decl;
7264 	dst = build_simple_mem_ref_loc (loc, arg);
7265 	dst = omp_build_component_ref (dst, f);
7266 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7267 	append_to_statement_list (t, &list);
7268 	break;
7269       default:
7270 	break;
7271       }
7272 
7273   /* Last pass: handle VLA firstprivates.  */
7274   if (tcctx.cb.decl_map)
7275     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7276       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7277 	{
7278 	  tree ind, ptr, df;
7279 
7280 	  decl = OMP_CLAUSE_DECL (c);
7281 	  if (!is_variable_sized (decl))
7282 	    continue;
7283 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7284 	  if (n == NULL)
7285 	    continue;
7286 	  f = (tree) n->value;
7287 	  f = *tcctx.cb.decl_map->get (f);
7288 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7289 	  ind = DECL_VALUE_EXPR (decl);
7290 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7291 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7292 	  n = splay_tree_lookup (ctx->sfield_map,
7293 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7294 	  sf = (tree) n->value;
7295 	  sf = *tcctx.cb.decl_map->get (sf);
7296 	  src = build_simple_mem_ref_loc (loc, sarg);
7297 	  src = omp_build_component_ref (src, sf);
7298 	  src = build_simple_mem_ref_loc (loc, src);
7299 	  dst = build_simple_mem_ref_loc (loc, arg);
7300 	  dst = omp_build_component_ref (dst, f);
7301 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7302 	  append_to_statement_list (t, &list);
7303 	  n = splay_tree_lookup (ctx->field_map,
7304 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7305 	  df = (tree) n->value;
7306 	  df = *tcctx.cb.decl_map->get (df);
7307 	  ptr = build_simple_mem_ref_loc (loc, arg);
7308 	  ptr = omp_build_component_ref (ptr, df);
7309 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7310 		      build_fold_addr_expr_loc (loc, dst));
7311 	  append_to_statement_list (t, &list);
7312 	}
7313 
7314   t = build1 (RETURN_EXPR, void_type_node, NULL);
7315   append_to_statement_list (t, &list);
7316 
7317   if (tcctx.cb.decl_map)
7318     delete tcctx.cb.decl_map;
7319   pop_gimplify_context (NULL);
7320   BIND_EXPR_BODY (bind) = list;
7321   pop_cfun ();
7322 }
7323 
7324 static void
7325 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7326 {
7327   tree c, clauses;
7328   gimple *g;
7329   size_t n_in = 0, n_out = 0, idx = 2, i;
7330 
7331   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7332   gcc_assert (clauses);
7333   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7334     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7335       switch (OMP_CLAUSE_DEPEND_KIND (c))
7336 	{
7337 	case OMP_CLAUSE_DEPEND_IN:
7338 	  n_in++;
7339 	  break;
7340 	case OMP_CLAUSE_DEPEND_OUT:
7341 	case OMP_CLAUSE_DEPEND_INOUT:
7342 	  n_out++;
7343 	  break;
7344 	case OMP_CLAUSE_DEPEND_SOURCE:
7345 	case OMP_CLAUSE_DEPEND_SINK:
7346 	  /* FALLTHRU */
7347 	default:
7348 	  gcc_unreachable ();
7349 	}
7350   tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7351   tree array = create_tmp_var (type);
7352   TREE_ADDRESSABLE (array) = 1;
7353   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7354 		   NULL_TREE);
7355   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7356   gimple_seq_add_stmt (iseq, g);
7357   r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7358 	      NULL_TREE);
7359   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7360   gimple_seq_add_stmt (iseq, g);
7361   for (i = 0; i < 2; i++)
7362     {
7363       if ((i ? n_in : n_out) == 0)
7364 	continue;
7365       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7366 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7367 	    && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7368 	  {
7369 	    tree t = OMP_CLAUSE_DECL (c);
7370 	    t = fold_convert (ptr_type_node, t);
7371 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7372 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7373 			NULL_TREE, NULL_TREE);
7374 	    g = gimple_build_assign (r, t);
7375 	    gimple_seq_add_stmt (iseq, g);
7376 	  }
7377     }
7378   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7379   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7380   OMP_CLAUSE_CHAIN (c) = *pclauses;
7381   *pclauses = c;
7382   tree clobber = build_constructor (type, NULL);
7383   TREE_THIS_VOLATILE (clobber) = 1;
7384   g = gimple_build_assign (array, clobber);
7385   gimple_seq_add_stmt (oseq, g);
7386 }
7387 
7388 /* Lower the OpenMP parallel or task directive in the current statement
7389    in GSI_P.  CTX holds context information for the directive.  */
7390 
7391 static void
7392 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7393 {
7394   tree clauses;
7395   tree child_fn, t;
7396   gimple *stmt = gsi_stmt (*gsi_p);
7397   gbind *par_bind, *bind, *dep_bind = NULL;
7398   gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7399   location_t loc = gimple_location (stmt);
7400 
7401   clauses = gimple_omp_taskreg_clauses (stmt);
7402   par_bind
7403     = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7404   par_body = gimple_bind_body (par_bind);
7405   child_fn = ctx->cb.dst_fn;
7406   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7407       && !gimple_omp_parallel_combined_p (stmt))
7408     {
7409       struct walk_stmt_info wi;
7410       int ws_num = 0;
7411 
7412       memset (&wi, 0, sizeof (wi));
7413       wi.info = &ws_num;
7414       wi.val_only = true;
7415       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7416       if (ws_num == 1)
7417 	gimple_omp_parallel_set_combined_p (stmt, true);
7418     }
7419   gimple_seq dep_ilist = NULL;
7420   gimple_seq dep_olist = NULL;
7421   if (gimple_code (stmt) == GIMPLE_OMP_TASK
7422       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7423     {
7424       push_gimplify_context ();
7425       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7426       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7427 			    &dep_ilist, &dep_olist);
7428     }
7429 
7430   if (ctx->srecord_type)
7431     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7432 
7433   push_gimplify_context ();
7434 
7435   par_olist = NULL;
7436   par_ilist = NULL;
7437   par_rlist = NULL;
7438   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7439     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7440   if (phony_construct && ctx->record_type)
7441     {
7442       gcc_checking_assert (!ctx->receiver_decl);
7443       ctx->receiver_decl = create_tmp_var
7444 	(build_reference_type (ctx->record_type), ".omp_rec");
7445     }
7446   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7447   lower_omp (&par_body, ctx);
7448   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7449     lower_reduction_clauses (clauses, &par_rlist, ctx);
7450 
7451   /* Declare all the variables created by mapping and the variables
7452      declared in the scope of the parallel body.  */
7453   record_vars_into (ctx->block_vars, child_fn);
7454   maybe_remove_omp_member_access_dummy_vars (par_bind);
7455   record_vars_into (gimple_bind_vars (par_bind), child_fn);
7456 
7457   if (ctx->record_type)
7458     {
7459       ctx->sender_decl
7460 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7461 			  : ctx->record_type, ".omp_data_o");
7462       DECL_NAMELESS (ctx->sender_decl) = 1;
7463       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7464       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7465     }
7466 
7467   olist = NULL;
7468   ilist = NULL;
7469   lower_send_clauses (clauses, &ilist, &olist, ctx);
7470   lower_send_shared_vars (&ilist, &olist, ctx);
7471 
7472   if (ctx->record_type)
7473     {
7474       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7475       TREE_THIS_VOLATILE (clobber) = 1;
7476       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7477 							clobber));
7478     }
7479 
7480   /* Once all the expansions are done, sequence all the different
7481      fragments inside gimple_omp_body.  */
7482 
7483   new_body = NULL;
7484 
7485   if (ctx->record_type)
7486     {
7487       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7488       /* fixup_child_record_type might have changed receiver_decl's type.  */
7489       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7490       gimple_seq_add_stmt (&new_body,
7491 	  		   gimple_build_assign (ctx->receiver_decl, t));
7492     }
7493 
7494   gimple_seq_add_seq (&new_body, par_ilist);
7495   gimple_seq_add_seq (&new_body, par_body);
7496   gimple_seq_add_seq (&new_body, par_rlist);
7497   if (ctx->cancellable)
7498     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7499   gimple_seq_add_seq (&new_body, par_olist);
7500   new_body = maybe_catch_exception (new_body);
7501   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7502     gimple_seq_add_stmt (&new_body,
7503 			 gimple_build_omp_continue (integer_zero_node,
7504 						    integer_zero_node));
7505   if (!phony_construct)
7506     {
7507       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7508       gimple_omp_set_body (stmt, new_body);
7509     }
7510 
7511   bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7512   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7513   gimple_bind_add_seq (bind, ilist);
7514   if (!phony_construct)
7515     gimple_bind_add_stmt (bind, stmt);
7516   else
7517     gimple_bind_add_seq (bind, new_body);
7518   gimple_bind_add_seq (bind, olist);
7519 
7520   pop_gimplify_context (NULL);
7521 
7522   if (dep_bind)
7523     {
7524       gimple_bind_add_seq (dep_bind, dep_ilist);
7525       gimple_bind_add_stmt (dep_bind, bind);
7526       gimple_bind_add_seq (dep_bind, dep_olist);
7527       pop_gimplify_context (dep_bind);
7528     }
7529 }
7530 
7531 /* Lower the GIMPLE_OMP_TARGET in the current statement
7532    in GSI_P.  CTX holds context information for the directive.  */
7533 
7534 static void
7535 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7536 {
7537   tree clauses;
7538   tree child_fn, t, c;
7539   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7540   gbind *tgt_bind, *bind, *dep_bind = NULL;
7541   gimple_seq tgt_body, olist, ilist, fplist, new_body;
7542   location_t loc = gimple_location (stmt);
7543   bool offloaded, data_region;
7544   unsigned int map_cnt = 0;
7545 
7546   offloaded = is_gimple_omp_offloaded (stmt);
7547   switch (gimple_omp_target_kind (stmt))
7548     {
7549     case GF_OMP_TARGET_KIND_REGION:
7550     case GF_OMP_TARGET_KIND_UPDATE:
7551     case GF_OMP_TARGET_KIND_ENTER_DATA:
7552     case GF_OMP_TARGET_KIND_EXIT_DATA:
7553     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7554     case GF_OMP_TARGET_KIND_OACC_KERNELS:
7555     case GF_OMP_TARGET_KIND_OACC_UPDATE:
7556     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7557     case GF_OMP_TARGET_KIND_OACC_DECLARE:
7558       data_region = false;
7559       break;
7560     case GF_OMP_TARGET_KIND_DATA:
7561     case GF_OMP_TARGET_KIND_OACC_DATA:
7562     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7563       data_region = true;
7564       break;
7565     default:
7566       gcc_unreachable ();
7567     }
7568 
7569   clauses = gimple_omp_target_clauses (stmt);
7570 
7571   gimple_seq dep_ilist = NULL;
7572   gimple_seq dep_olist = NULL;
7573   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7574     {
7575       push_gimplify_context ();
7576       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7577       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7578 			    &dep_ilist, &dep_olist);
7579     }
7580 
7581   tgt_bind = NULL;
7582   tgt_body = NULL;
7583   if (offloaded)
7584     {
7585       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7586       tgt_body = gimple_bind_body (tgt_bind);
7587     }
7588   else if (data_region)
7589     tgt_body = gimple_omp_body (stmt);
7590   child_fn = ctx->cb.dst_fn;
7591 
7592   push_gimplify_context ();
7593   fplist = NULL;
7594 
7595   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7596     switch (OMP_CLAUSE_CODE (c))
7597       {
7598 	tree var, x;
7599 
7600       default:
7601 	break;
7602       case OMP_CLAUSE_MAP:
7603 #if CHECKING_P
7604 	/* First check what we're prepared to handle in the following.  */
7605 	switch (OMP_CLAUSE_MAP_KIND (c))
7606 	  {
7607 	  case GOMP_MAP_ALLOC:
7608 	  case GOMP_MAP_TO:
7609 	  case GOMP_MAP_FROM:
7610 	  case GOMP_MAP_TOFROM:
7611 	  case GOMP_MAP_POINTER:
7612 	  case GOMP_MAP_TO_PSET:
7613 	  case GOMP_MAP_DELETE:
7614 	  case GOMP_MAP_RELEASE:
7615 	  case GOMP_MAP_ALWAYS_TO:
7616 	  case GOMP_MAP_ALWAYS_FROM:
7617 	  case GOMP_MAP_ALWAYS_TOFROM:
7618 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
7619 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7620 	  case GOMP_MAP_STRUCT:
7621 	  case GOMP_MAP_ALWAYS_POINTER:
7622 	    break;
7623 	  case GOMP_MAP_FORCE_ALLOC:
7624 	  case GOMP_MAP_FORCE_TO:
7625 	  case GOMP_MAP_FORCE_FROM:
7626 	  case GOMP_MAP_FORCE_TOFROM:
7627 	  case GOMP_MAP_FORCE_PRESENT:
7628 	  case GOMP_MAP_FORCE_DEVICEPTR:
7629 	  case GOMP_MAP_DEVICE_RESIDENT:
7630 	  case GOMP_MAP_LINK:
7631 	    gcc_assert (is_gimple_omp_oacc (stmt));
7632 	    break;
7633 	  default:
7634 	    gcc_unreachable ();
7635 	  }
7636 #endif
7637 	  /* FALLTHRU */
7638       case OMP_CLAUSE_TO:
7639       case OMP_CLAUSE_FROM:
7640       oacc_firstprivate:
7641 	var = OMP_CLAUSE_DECL (c);
7642 	if (!DECL_P (var))
7643 	  {
7644 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7645 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7646 		    && (OMP_CLAUSE_MAP_KIND (c)
7647 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
7648 	      map_cnt++;
7649 	    continue;
7650 	  }
7651 
7652 	if (DECL_SIZE (var)
7653 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7654 	  {
7655 	    tree var2 = DECL_VALUE_EXPR (var);
7656 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7657 	    var2 = TREE_OPERAND (var2, 0);
7658 	    gcc_assert (DECL_P (var2));
7659 	    var = var2;
7660 	  }
7661 
7662 	if (offloaded
7663 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7664 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7665 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7666 	  {
7667 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7668 	      {
7669 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7670 		    && varpool_node::get_create (var)->offloadable)
7671 		  continue;
7672 
7673 		tree type = build_pointer_type (TREE_TYPE (var));
7674 		tree new_var = lookup_decl (var, ctx);
7675 		x = create_tmp_var_raw (type, get_name (new_var));
7676 		gimple_add_tmp_var (x);
7677 		x = build_simple_mem_ref (x);
7678 		SET_DECL_VALUE_EXPR (new_var, x);
7679 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7680 	      }
7681 	    continue;
7682 	  }
7683 
7684 	if (!maybe_lookup_field (var, ctx))
7685 	  continue;
7686 
7687 	/* Don't remap oacc parallel reduction variables, because the
7688 	   intermediate result must be local to each gang.  */
7689 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7690 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7691 	  {
7692 	    x = build_receiver_ref (var, true, ctx);
7693 	    tree new_var = lookup_decl (var, ctx);
7694 
7695 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7696 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7697 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7698 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7699 	      x = build_simple_mem_ref (x);
7700 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7701 	      {
7702 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7703 		if (omp_is_reference (new_var))
7704 		  {
7705 		    /* Create a local object to hold the instance
7706 		       value.  */
7707 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
7708 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7709 		    tree inst = create_tmp_var (type, id);
7710 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7711 		    x = build_fold_addr_expr (inst);
7712 		  }
7713 		gimplify_assign (new_var, x, &fplist);
7714 	      }
7715 	    else if (DECL_P (new_var))
7716 	      {
7717 		SET_DECL_VALUE_EXPR (new_var, x);
7718 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7719 	      }
7720 	    else
7721 	      gcc_unreachable ();
7722 	  }
7723 	map_cnt++;
7724 	break;
7725 
7726       case OMP_CLAUSE_FIRSTPRIVATE:
7727 	if (is_oacc_parallel (ctx))
7728 	  goto oacc_firstprivate;
7729 	map_cnt++;
7730 	var = OMP_CLAUSE_DECL (c);
7731 	if (!omp_is_reference (var)
7732 	    && !is_gimple_reg_type (TREE_TYPE (var)))
7733 	  {
7734 	    tree new_var = lookup_decl (var, ctx);
7735 	    if (is_variable_sized (var))
7736 	      {
7737 		tree pvar = DECL_VALUE_EXPR (var);
7738 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7739 		pvar = TREE_OPERAND (pvar, 0);
7740 		gcc_assert (DECL_P (pvar));
7741 		tree new_pvar = lookup_decl (pvar, ctx);
7742 		x = build_fold_indirect_ref (new_pvar);
7743 		TREE_THIS_NOTRAP (x) = 1;
7744 	      }
7745 	    else
7746 	      x = build_receiver_ref (var, true, ctx);
7747 	    SET_DECL_VALUE_EXPR (new_var, x);
7748 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7749 	  }
7750 	break;
7751 
7752       case OMP_CLAUSE_PRIVATE:
7753 	if (is_gimple_omp_oacc (ctx->stmt))
7754 	  break;
7755 	var = OMP_CLAUSE_DECL (c);
7756 	if (is_variable_sized (var))
7757 	  {
7758 	    tree new_var = lookup_decl (var, ctx);
7759 	    tree pvar = DECL_VALUE_EXPR (var);
7760 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7761 	    pvar = TREE_OPERAND (pvar, 0);
7762 	    gcc_assert (DECL_P (pvar));
7763 	    tree new_pvar = lookup_decl (pvar, ctx);
7764 	    x = build_fold_indirect_ref (new_pvar);
7765 	    TREE_THIS_NOTRAP (x) = 1;
7766 	    SET_DECL_VALUE_EXPR (new_var, x);
7767 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7768 	  }
7769 	break;
7770 
7771       case OMP_CLAUSE_USE_DEVICE_PTR:
7772       case OMP_CLAUSE_IS_DEVICE_PTR:
7773 	var = OMP_CLAUSE_DECL (c);
7774 	map_cnt++;
7775 	if (is_variable_sized (var))
7776 	  {
7777 	    tree new_var = lookup_decl (var, ctx);
7778 	    tree pvar = DECL_VALUE_EXPR (var);
7779 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7780 	    pvar = TREE_OPERAND (pvar, 0);
7781 	    gcc_assert (DECL_P (pvar));
7782 	    tree new_pvar = lookup_decl (pvar, ctx);
7783 	    x = build_fold_indirect_ref (new_pvar);
7784 	    TREE_THIS_NOTRAP (x) = 1;
7785 	    SET_DECL_VALUE_EXPR (new_var, x);
7786 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7787 	  }
7788 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7789 	  {
7790 	    tree new_var = lookup_decl (var, ctx);
7791 	    tree type = build_pointer_type (TREE_TYPE (var));
7792 	    x = create_tmp_var_raw (type, get_name (new_var));
7793 	    gimple_add_tmp_var (x);
7794 	    x = build_simple_mem_ref (x);
7795 	    SET_DECL_VALUE_EXPR (new_var, x);
7796 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7797 	  }
7798 	else
7799 	  {
7800 	    tree new_var = lookup_decl (var, ctx);
7801 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7802 	    gimple_add_tmp_var (x);
7803 	    SET_DECL_VALUE_EXPR (new_var, x);
7804 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7805 	  }
7806 	break;
7807       }
7808 
7809   if (offloaded)
7810     {
7811       target_nesting_level++;
7812       lower_omp (&tgt_body, ctx);
7813       target_nesting_level--;
7814     }
7815   else if (data_region)
7816     lower_omp (&tgt_body, ctx);
7817 
7818   if (offloaded)
7819     {
7820       /* Declare all the variables created by mapping and the variables
7821 	 declared in the scope of the target body.  */
7822       record_vars_into (ctx->block_vars, child_fn);
7823       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7824       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7825     }
7826 
7827   olist = NULL;
7828   ilist = NULL;
7829   if (ctx->record_type)
7830     {
7831       ctx->sender_decl
7832 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
7833       DECL_NAMELESS (ctx->sender_decl) = 1;
7834       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7835       t = make_tree_vec (3);
7836       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7837       TREE_VEC_ELT (t, 1)
7838 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7839 			  ".omp_data_sizes");
7840       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7841       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7842       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7843       tree tkind_type = short_unsigned_type_node;
7844       int talign_shift = 8;
7845       TREE_VEC_ELT (t, 2)
7846 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7847 			  ".omp_data_kinds");
7848       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7849       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7850       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7851       gimple_omp_target_set_data_arg (stmt, t);
7852 
7853       vec<constructor_elt, va_gc> *vsize;
7854       vec<constructor_elt, va_gc> *vkind;
7855       vec_alloc (vsize, map_cnt);
7856       vec_alloc (vkind, map_cnt);
7857       unsigned int map_idx = 0;
7858 
7859       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7860 	switch (OMP_CLAUSE_CODE (c))
7861 	  {
7862 	    tree ovar, nc, s, purpose, var, x, type;
7863 	    unsigned int talign;
7864 
7865 	  default:
7866 	    break;
7867 
7868 	  case OMP_CLAUSE_MAP:
7869 	  case OMP_CLAUSE_TO:
7870 	  case OMP_CLAUSE_FROM:
7871 	  oacc_firstprivate_map:
7872 	    nc = c;
7873 	    ovar = OMP_CLAUSE_DECL (c);
7874 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7875 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7876 		    || (OMP_CLAUSE_MAP_KIND (c)
7877 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7878 	      break;
7879 	    if (!DECL_P (ovar))
7880 	      {
7881 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7882 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7883 		  {
7884 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7885 					 == get_base_address (ovar));
7886 		    nc = OMP_CLAUSE_CHAIN (c);
7887 		    ovar = OMP_CLAUSE_DECL (nc);
7888 		  }
7889 		else
7890 		  {
7891 		    tree x = build_sender_ref (ovar, ctx);
7892 		    tree v
7893 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7894 		    gimplify_assign (x, v, &ilist);
7895 		    nc = NULL_TREE;
7896 		  }
7897 	      }
7898 	    else
7899 	      {
7900 		if (DECL_SIZE (ovar)
7901 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7902 		  {
7903 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
7904 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7905 		    ovar2 = TREE_OPERAND (ovar2, 0);
7906 		    gcc_assert (DECL_P (ovar2));
7907 		    ovar = ovar2;
7908 		  }
7909 		if (!maybe_lookup_field (ovar, ctx))
7910 		  continue;
7911 	      }
7912 
7913 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7914 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7915 	      talign = DECL_ALIGN_UNIT (ovar);
7916 	    if (nc)
7917 	      {
7918 		var = lookup_decl_in_outer_ctx (ovar, ctx);
7919 		x = build_sender_ref (ovar, ctx);
7920 
7921 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7922 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7923 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7924 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7925 		  {
7926 		    gcc_assert (offloaded);
7927 		    tree avar
7928 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7929 		    mark_addressable (avar);
7930 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7931 		    talign = DECL_ALIGN_UNIT (avar);
7932 		    avar = build_fold_addr_expr (avar);
7933 		    gimplify_assign (x, avar, &ilist);
7934 		  }
7935 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7936 		  {
7937 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7938 		    if (!omp_is_reference (var))
7939 		      {
7940 			if (is_gimple_reg (var)
7941 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7942 			  TREE_NO_WARNING (var) = 1;
7943 			var = build_fold_addr_expr (var);
7944 		      }
7945 		    else
7946 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7947 		    gimplify_assign (x, var, &ilist);
7948 		  }
7949 		else if (is_gimple_reg (var))
7950 		  {
7951 		    gcc_assert (offloaded);
7952 		    tree avar = create_tmp_var (TREE_TYPE (var));
7953 		    mark_addressable (avar);
7954 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
7955 		    if (GOMP_MAP_COPY_TO_P (map_kind)
7956 			|| map_kind == GOMP_MAP_POINTER
7957 			|| map_kind == GOMP_MAP_TO_PSET
7958 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7959 		      {
7960 			/* If we need to initialize a temporary
7961 			   with VAR because it is not addressable, and
7962 			   the variable hasn't been initialized yet, then
7963 			   we'll get a warning for the store to avar.
7964 			   Don't warn in that case, the mapping might
7965 			   be implicit.  */
7966 			TREE_NO_WARNING (var) = 1;
7967 			gimplify_assign (avar, var, &ilist);
7968 		      }
7969 		    avar = build_fold_addr_expr (avar);
7970 		    gimplify_assign (x, avar, &ilist);
7971 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
7972 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
7973 			&& !TYPE_READONLY (TREE_TYPE (var)))
7974 		      {
7975 			x = unshare_expr (x);
7976 			x = build_simple_mem_ref (x);
7977 			gimplify_assign (var, x, &olist);
7978 		      }
7979 		  }
7980 		else
7981 		  {
7982 		    var = build_fold_addr_expr (var);
7983 		    gimplify_assign (x, var, &ilist);
7984 		  }
7985 	      }
7986 	    s = NULL_TREE;
7987 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7988 	      {
7989 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
7990 		s = TREE_TYPE (ovar);
7991 		if (TREE_CODE (s) == REFERENCE_TYPE)
7992 		  s = TREE_TYPE (s);
7993 		s = TYPE_SIZE_UNIT (s);
7994 	      }
7995 	    else
7996 	      s = OMP_CLAUSE_SIZE (c);
7997 	    if (s == NULL_TREE)
7998 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
7999 	    s = fold_convert (size_type_node, s);
8000 	    purpose = size_int (map_idx++);
8001 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8002 	    if (TREE_CODE (s) != INTEGER_CST)
8003 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8004 
8005 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
8006 	    switch (OMP_CLAUSE_CODE (c))
8007 	      {
8008 	      case OMP_CLAUSE_MAP:
8009 		tkind = OMP_CLAUSE_MAP_KIND (c);
8010 		tkind_zero = tkind;
8011 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8012 		  switch (tkind)
8013 		    {
8014 		    case GOMP_MAP_ALLOC:
8015 		    case GOMP_MAP_TO:
8016 		    case GOMP_MAP_FROM:
8017 		    case GOMP_MAP_TOFROM:
8018 		    case GOMP_MAP_ALWAYS_TO:
8019 		    case GOMP_MAP_ALWAYS_FROM:
8020 		    case GOMP_MAP_ALWAYS_TOFROM:
8021 		    case GOMP_MAP_RELEASE:
8022 		    case GOMP_MAP_FORCE_TO:
8023 		    case GOMP_MAP_FORCE_FROM:
8024 		    case GOMP_MAP_FORCE_TOFROM:
8025 		    case GOMP_MAP_FORCE_PRESENT:
8026 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8027 		      break;
8028 		    case GOMP_MAP_DELETE:
8029 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8030 		    default:
8031 		      break;
8032 		    }
8033 		if (tkind_zero != tkind)
8034 		  {
8035 		    if (integer_zerop (s))
8036 		      tkind = tkind_zero;
8037 		    else if (integer_nonzerop (s))
8038 		      tkind_zero = tkind;
8039 		  }
8040 		break;
8041 	      case OMP_CLAUSE_FIRSTPRIVATE:
8042 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8043 		tkind = GOMP_MAP_TO;
8044 		tkind_zero = tkind;
8045 		break;
8046 	      case OMP_CLAUSE_TO:
8047 		tkind = GOMP_MAP_TO;
8048 		tkind_zero = tkind;
8049 		break;
8050 	      case OMP_CLAUSE_FROM:
8051 		tkind = GOMP_MAP_FROM;
8052 		tkind_zero = tkind;
8053 		break;
8054 	      default:
8055 		gcc_unreachable ();
8056 	      }
8057 	    gcc_checking_assert (tkind
8058 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8059 	    gcc_checking_assert (tkind_zero
8060 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8061 	    talign = ceil_log2 (talign);
8062 	    tkind |= talign << talign_shift;
8063 	    tkind_zero |= talign << talign_shift;
8064 	    gcc_checking_assert (tkind
8065 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8066 	    gcc_checking_assert (tkind_zero
8067 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8068 	    if (tkind == tkind_zero)
8069 	      x = build_int_cstu (tkind_type, tkind);
8070 	    else
8071 	      {
8072 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8073 		x = build3 (COND_EXPR, tkind_type,
8074 			    fold_build2 (EQ_EXPR, boolean_type_node,
8075 					 unshare_expr (s), size_zero_node),
8076 			    build_int_cstu (tkind_type, tkind_zero),
8077 			    build_int_cstu (tkind_type, tkind));
8078 	      }
8079 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8080 	    if (nc && nc != c)
8081 	      c = nc;
8082 	    break;
8083 
8084 	  case OMP_CLAUSE_FIRSTPRIVATE:
8085 	    if (is_oacc_parallel (ctx))
8086 	      goto oacc_firstprivate_map;
8087 	    ovar = OMP_CLAUSE_DECL (c);
8088 	    if (omp_is_reference (ovar))
8089 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8090 	    else
8091 	      talign = DECL_ALIGN_UNIT (ovar);
8092 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8093 	    x = build_sender_ref (ovar, ctx);
8094 	    tkind = GOMP_MAP_FIRSTPRIVATE;
8095 	    type = TREE_TYPE (ovar);
8096 	    if (omp_is_reference (ovar))
8097 	      type = TREE_TYPE (type);
8098 	    if ((INTEGRAL_TYPE_P (type)
8099 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
8100 		|| TREE_CODE (type) == POINTER_TYPE)
8101 	      {
8102 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8103 		tree t = var;
8104 		if (omp_is_reference (var))
8105 		  t = build_simple_mem_ref (var);
8106 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8107 		  TREE_NO_WARNING (var) = 1;
8108 		if (TREE_CODE (type) != POINTER_TYPE)
8109 		  t = fold_convert (pointer_sized_int_node, t);
8110 		t = fold_convert (TREE_TYPE (x), t);
8111 		gimplify_assign (x, t, &ilist);
8112 	      }
8113 	    else if (omp_is_reference (var))
8114 	      gimplify_assign (x, var, &ilist);
8115 	    else if (is_gimple_reg (var))
8116 	      {
8117 		tree avar = create_tmp_var (TREE_TYPE (var));
8118 		mark_addressable (avar);
8119 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8120 		  TREE_NO_WARNING (var) = 1;
8121 		gimplify_assign (avar, var, &ilist);
8122 		avar = build_fold_addr_expr (avar);
8123 		gimplify_assign (x, avar, &ilist);
8124 	      }
8125 	    else
8126 	      {
8127 		var = build_fold_addr_expr (var);
8128 		gimplify_assign (x, var, &ilist);
8129 	      }
8130 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8131 	      s = size_int (0);
8132 	    else if (omp_is_reference (ovar))
8133 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8134 	    else
8135 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8136 	    s = fold_convert (size_type_node, s);
8137 	    purpose = size_int (map_idx++);
8138 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8139 	    if (TREE_CODE (s) != INTEGER_CST)
8140 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8141 
8142 	    gcc_checking_assert (tkind
8143 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8144 	    talign = ceil_log2 (talign);
8145 	    tkind |= talign << talign_shift;
8146 	    gcc_checking_assert (tkind
8147 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8148 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8149 				    build_int_cstu (tkind_type, tkind));
8150 	    break;
8151 
8152 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8153 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8154 	    ovar = OMP_CLAUSE_DECL (c);
8155 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8156 	    x = build_sender_ref (ovar, ctx);
8157 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8158 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
8159 	    else
8160 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8161 	    type = TREE_TYPE (ovar);
8162 	    if (TREE_CODE (type) == ARRAY_TYPE)
8163 	      var = build_fold_addr_expr (var);
8164 	    else
8165 	      {
8166 		if (omp_is_reference (ovar))
8167 		  {
8168 		    type = TREE_TYPE (type);
8169 		    if (TREE_CODE (type) != ARRAY_TYPE)
8170 		      var = build_simple_mem_ref (var);
8171 		    var = fold_convert (TREE_TYPE (x), var);
8172 		  }
8173 	      }
8174 	    gimplify_assign (x, var, &ilist);
8175 	    s = size_int (0);
8176 	    purpose = size_int (map_idx++);
8177 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8178 	    gcc_checking_assert (tkind
8179 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8180 	    gcc_checking_assert (tkind
8181 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8182 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8183 				    build_int_cstu (tkind_type, tkind));
8184 	    break;
8185 	  }
8186 
8187       gcc_assert (map_idx == map_cnt);
8188 
8189       DECL_INITIAL (TREE_VEC_ELT (t, 1))
8190 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8191       DECL_INITIAL (TREE_VEC_ELT (t, 2))
8192 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8193       for (int i = 1; i <= 2; i++)
8194 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8195 	  {
8196 	    gimple_seq initlist = NULL;
8197 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8198 					  TREE_VEC_ELT (t, i)),
8199 				  &initlist, true, NULL_TREE);
8200 	    gimple_seq_add_seq (&ilist, initlist);
8201 
8202 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8203 					      NULL);
8204 	    TREE_THIS_VOLATILE (clobber) = 1;
8205 	    gimple_seq_add_stmt (&olist,
8206 				 gimple_build_assign (TREE_VEC_ELT (t, i),
8207 						      clobber));
8208 	  }
8209 
8210       tree clobber = build_constructor (ctx->record_type, NULL);
8211       TREE_THIS_VOLATILE (clobber) = 1;
8212       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8213 							clobber));
8214     }
8215 
8216   /* Once all the expansions are done, sequence all the different
8217      fragments inside gimple_omp_body.  */
8218 
8219   new_body = NULL;
8220 
8221   if (offloaded
8222       && ctx->record_type)
8223     {
8224       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8225       /* fixup_child_record_type might have changed receiver_decl's type.  */
8226       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8227       gimple_seq_add_stmt (&new_body,
8228 	  		   gimple_build_assign (ctx->receiver_decl, t));
8229     }
8230   gimple_seq_add_seq (&new_body, fplist);
8231 
8232   if (offloaded || data_region)
8233     {
8234       tree prev = NULL_TREE;
8235       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8236 	switch (OMP_CLAUSE_CODE (c))
8237 	  {
8238 	    tree var, x;
8239 	  default:
8240 	    break;
8241 	  case OMP_CLAUSE_FIRSTPRIVATE:
8242 	    if (is_gimple_omp_oacc (ctx->stmt))
8243 	      break;
8244 	    var = OMP_CLAUSE_DECL (c);
8245 	    if (omp_is_reference (var)
8246 		|| is_gimple_reg_type (TREE_TYPE (var)))
8247 	      {
8248 		tree new_var = lookup_decl (var, ctx);
8249 		tree type;
8250 		type = TREE_TYPE (var);
8251 		if (omp_is_reference (var))
8252 		  type = TREE_TYPE (type);
8253 		if ((INTEGRAL_TYPE_P (type)
8254 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
8255 		    || TREE_CODE (type) == POINTER_TYPE)
8256 		  {
8257 		    x = build_receiver_ref (var, false, ctx);
8258 		    if (TREE_CODE (type) != POINTER_TYPE)
8259 		      x = fold_convert (pointer_sized_int_node, x);
8260 		    x = fold_convert (type, x);
8261 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8262 				   fb_rvalue);
8263 		    if (omp_is_reference (var))
8264 		      {
8265 			tree v = create_tmp_var_raw (type, get_name (var));
8266 			gimple_add_tmp_var (v);
8267 			TREE_ADDRESSABLE (v) = 1;
8268 			gimple_seq_add_stmt (&new_body,
8269 					     gimple_build_assign (v, x));
8270 			x = build_fold_addr_expr (v);
8271 		      }
8272 		    gimple_seq_add_stmt (&new_body,
8273 					 gimple_build_assign (new_var, x));
8274 		  }
8275 		else
8276 		  {
8277 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8278 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8279 				   fb_rvalue);
8280 		    gimple_seq_add_stmt (&new_body,
8281 					 gimple_build_assign (new_var, x));
8282 		  }
8283 	      }
8284 	    else if (is_variable_sized (var))
8285 	      {
8286 		tree pvar = DECL_VALUE_EXPR (var);
8287 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8288 		pvar = TREE_OPERAND (pvar, 0);
8289 		gcc_assert (DECL_P (pvar));
8290 		tree new_var = lookup_decl (pvar, ctx);
8291 		x = build_receiver_ref (var, false, ctx);
8292 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8293 		gimple_seq_add_stmt (&new_body,
8294 				     gimple_build_assign (new_var, x));
8295 	      }
8296 	    break;
8297 	  case OMP_CLAUSE_PRIVATE:
8298 	    if (is_gimple_omp_oacc (ctx->stmt))
8299 	      break;
8300 	    var = OMP_CLAUSE_DECL (c);
8301 	    if (omp_is_reference (var))
8302 	      {
8303 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8304 		tree new_var = lookup_decl (var, ctx);
8305 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8306 		if (TREE_CONSTANT (x))
8307 		  {
8308 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8309 					    get_name (var));
8310 		    gimple_add_tmp_var (x);
8311 		    TREE_ADDRESSABLE (x) = 1;
8312 		    x = build_fold_addr_expr_loc (clause_loc, x);
8313 		  }
8314 		else
8315 		  break;
8316 
8317 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8318 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8319 		gimple_seq_add_stmt (&new_body,
8320 				     gimple_build_assign (new_var, x));
8321 	      }
8322 	    break;
8323 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8324 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8325 	    var = OMP_CLAUSE_DECL (c);
8326 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8327 	      x = build_sender_ref (var, ctx);
8328 	    else
8329 	      x = build_receiver_ref (var, false, ctx);
8330 	    if (is_variable_sized (var))
8331 	      {
8332 		tree pvar = DECL_VALUE_EXPR (var);
8333 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8334 		pvar = TREE_OPERAND (pvar, 0);
8335 		gcc_assert (DECL_P (pvar));
8336 		tree new_var = lookup_decl (pvar, ctx);
8337 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8338 		gimple_seq_add_stmt (&new_body,
8339 				     gimple_build_assign (new_var, x));
8340 	      }
8341 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8342 	      {
8343 		tree new_var = lookup_decl (var, ctx);
8344 		new_var = DECL_VALUE_EXPR (new_var);
8345 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
8346 		new_var = TREE_OPERAND (new_var, 0);
8347 		gcc_assert (DECL_P (new_var));
8348 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8349 		gimple_seq_add_stmt (&new_body,
8350 				     gimple_build_assign (new_var, x));
8351 	      }
8352 	    else
8353 	      {
8354 		tree type = TREE_TYPE (var);
8355 		tree new_var = lookup_decl (var, ctx);
8356 		if (omp_is_reference (var))
8357 		  {
8358 		    type = TREE_TYPE (type);
8359 		    if (TREE_CODE (type) != ARRAY_TYPE)
8360 		      {
8361 			tree v = create_tmp_var_raw (type, get_name (var));
8362 			gimple_add_tmp_var (v);
8363 			TREE_ADDRESSABLE (v) = 1;
8364 			x = fold_convert (type, x);
8365 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8366 				       fb_rvalue);
8367 			gimple_seq_add_stmt (&new_body,
8368 					     gimple_build_assign (v, x));
8369 			x = build_fold_addr_expr (v);
8370 		      }
8371 		  }
8372 		new_var = DECL_VALUE_EXPR (new_var);
8373 		x = fold_convert (TREE_TYPE (new_var), x);
8374 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8375 		gimple_seq_add_stmt (&new_body,
8376 				     gimple_build_assign (new_var, x));
8377 	      }
8378 	    break;
8379 	  }
8380       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8381 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8382 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
8383 	 or references to VLAs.  */
8384       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8385 	switch (OMP_CLAUSE_CODE (c))
8386 	  {
8387 	    tree var;
8388 	  default:
8389 	    break;
8390 	  case OMP_CLAUSE_MAP:
8391 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8392 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8393 	      {
8394 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8395 		poly_int64 offset = 0;
8396 		gcc_assert (prev);
8397 		var = OMP_CLAUSE_DECL (c);
8398 		if (DECL_P (var)
8399 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8400 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8401 								      ctx))
8402 		    && varpool_node::get_create (var)->offloadable)
8403 		  break;
8404 		if (TREE_CODE (var) == INDIRECT_REF
8405 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8406 		  var = TREE_OPERAND (var, 0);
8407 		if (TREE_CODE (var) == COMPONENT_REF)
8408 		  {
8409 		    var = get_addr_base_and_unit_offset (var, &offset);
8410 		    gcc_assert (var != NULL_TREE && DECL_P (var));
8411 		  }
8412 		else if (DECL_SIZE (var)
8413 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8414 		  {
8415 		    tree var2 = DECL_VALUE_EXPR (var);
8416 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8417 		    var2 = TREE_OPERAND (var2, 0);
8418 		    gcc_assert (DECL_P (var2));
8419 		    var = var2;
8420 		  }
8421 		tree new_var = lookup_decl (var, ctx), x;
8422 		tree type = TREE_TYPE (new_var);
8423 		bool is_ref;
8424 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8425 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8426 			== COMPONENT_REF))
8427 		  {
8428 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8429 		    is_ref = true;
8430 		    new_var = build2 (MEM_REF, type,
8431 				      build_fold_addr_expr (new_var),
8432 				      build_int_cst (build_pointer_type (type),
8433 						     offset));
8434 		  }
8435 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8436 		  {
8437 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8438 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8439 		    new_var = build2 (MEM_REF, type,
8440 				      build_fold_addr_expr (new_var),
8441 				      build_int_cst (build_pointer_type (type),
8442 						     offset));
8443 		  }
8444 		else
8445 		  is_ref = omp_is_reference (var);
8446 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8447 		  is_ref = false;
8448 		bool ref_to_array = false;
8449 		if (is_ref)
8450 		  {
8451 		    type = TREE_TYPE (type);
8452 		    if (TREE_CODE (type) == ARRAY_TYPE)
8453 		      {
8454 			type = build_pointer_type (type);
8455 			ref_to_array = true;
8456 		      }
8457 		  }
8458 		else if (TREE_CODE (type) == ARRAY_TYPE)
8459 		  {
8460 		    tree decl2 = DECL_VALUE_EXPR (new_var);
8461 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
8462 		    decl2 = TREE_OPERAND (decl2, 0);
8463 		    gcc_assert (DECL_P (decl2));
8464 		    new_var = decl2;
8465 		    type = TREE_TYPE (new_var);
8466 		  }
8467 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8468 		x = fold_convert_loc (clause_loc, type, x);
8469 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8470 		  {
8471 		    tree bias = OMP_CLAUSE_SIZE (c);
8472 		    if (DECL_P (bias))
8473 		      bias = lookup_decl (bias, ctx);
8474 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
8475 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8476 					    bias);
8477 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8478 					 TREE_TYPE (x), x, bias);
8479 		  }
8480 		if (ref_to_array)
8481 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8482 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8483 		if (is_ref && !ref_to_array)
8484 		  {
8485 		    tree t = create_tmp_var_raw (type, get_name (var));
8486 		    gimple_add_tmp_var (t);
8487 		    TREE_ADDRESSABLE (t) = 1;
8488 		    gimple_seq_add_stmt (&new_body,
8489 					 gimple_build_assign (t, x));
8490 		    x = build_fold_addr_expr_loc (clause_loc, t);
8491 		  }
8492 		gimple_seq_add_stmt (&new_body,
8493 				     gimple_build_assign (new_var, x));
8494 		prev = NULL_TREE;
8495 	      }
8496 	    else if (OMP_CLAUSE_CHAIN (c)
8497 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8498 			== OMP_CLAUSE_MAP
8499 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8500 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
8501 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8502 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8503 	      prev = c;
8504 	    break;
8505 	  case OMP_CLAUSE_PRIVATE:
8506 	    var = OMP_CLAUSE_DECL (c);
8507 	    if (is_variable_sized (var))
8508 	      {
8509 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8510 		tree new_var = lookup_decl (var, ctx);
8511 		tree pvar = DECL_VALUE_EXPR (var);
8512 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8513 		pvar = TREE_OPERAND (pvar, 0);
8514 		gcc_assert (DECL_P (pvar));
8515 		tree new_pvar = lookup_decl (pvar, ctx);
8516 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8517 		tree al = size_int (DECL_ALIGN (var));
8518 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8519 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8520 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8521 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8522 		gimple_seq_add_stmt (&new_body,
8523 				     gimple_build_assign (new_pvar, x));
8524 	      }
8525 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8526 	      {
8527 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8528 		tree new_var = lookup_decl (var, ctx);
8529 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8530 		if (TREE_CONSTANT (x))
8531 		  break;
8532 		else
8533 		  {
8534 		    tree atmp
8535 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8536 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8537 		    tree al = size_int (TYPE_ALIGN (rtype));
8538 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8539 		  }
8540 
8541 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8542 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8543 		gimple_seq_add_stmt (&new_body,
8544 				     gimple_build_assign (new_var, x));
8545 	      }
8546 	    break;
8547 	  }
8548 
8549       gimple_seq fork_seq = NULL;
8550       gimple_seq join_seq = NULL;
8551 
8552       if (is_oacc_parallel (ctx))
8553 	{
8554 	  /* If there are reductions on the offloaded region itself, treat
8555 	     them as a dummy GANG loop.  */
8556 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8557 
8558 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8559 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8560 	}
8561 
8562       gimple_seq_add_seq (&new_body, fork_seq);
8563       gimple_seq_add_seq (&new_body, tgt_body);
8564       gimple_seq_add_seq (&new_body, join_seq);
8565 
8566       if (offloaded)
8567 	new_body = maybe_catch_exception (new_body);
8568 
8569       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8570       gimple_omp_set_body (stmt, new_body);
8571     }
8572 
8573   bind = gimple_build_bind (NULL, NULL,
8574 			    tgt_bind ? gimple_bind_block (tgt_bind)
8575 				     : NULL_TREE);
8576   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8577   gimple_bind_add_seq (bind, ilist);
8578   gimple_bind_add_stmt (bind, stmt);
8579   gimple_bind_add_seq (bind, olist);
8580 
8581   pop_gimplify_context (NULL);
8582 
8583   if (dep_bind)
8584     {
8585       gimple_bind_add_seq (dep_bind, dep_ilist);
8586       gimple_bind_add_stmt (dep_bind, bind);
8587       gimple_bind_add_seq (dep_bind, dep_olist);
8588       pop_gimplify_context (dep_bind);
8589     }
8590 }
8591 
8592 /* Expand code for an OpenMP teams directive.  */
8593 
8594 static void
8595 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8596 {
8597   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8598   push_gimplify_context ();
8599 
8600   tree block = make_node (BLOCK);
8601   gbind *bind = gimple_build_bind (NULL, NULL, block);
8602   gsi_replace (gsi_p, bind, true);
8603   gimple_seq bind_body = NULL;
8604   gimple_seq dlist = NULL;
8605   gimple_seq olist = NULL;
8606 
8607   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8608 				    OMP_CLAUSE_NUM_TEAMS);
8609   if (num_teams == NULL_TREE)
8610     num_teams = build_int_cst (unsigned_type_node, 0);
8611   else
8612     {
8613       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8614       num_teams = fold_convert (unsigned_type_node, num_teams);
8615       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8616     }
8617   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8618 				       OMP_CLAUSE_THREAD_LIMIT);
8619   if (thread_limit == NULL_TREE)
8620     thread_limit = build_int_cst (unsigned_type_node, 0);
8621   else
8622     {
8623       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8624       thread_limit = fold_convert (unsigned_type_node, thread_limit);
8625       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8626 		     fb_rvalue);
8627     }
8628 
8629   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8630 			   &bind_body, &dlist, ctx, NULL);
8631   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8632   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8633   if (!gimple_omp_teams_grid_phony (teams_stmt))
8634     {
8635       gimple_seq_add_stmt (&bind_body, teams_stmt);
8636       location_t loc = gimple_location (teams_stmt);
8637       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8638       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8639       gimple_set_location (call, loc);
8640       gimple_seq_add_stmt (&bind_body, call);
8641     }
8642 
8643   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8644   gimple_omp_set_body (teams_stmt, NULL);
8645   gimple_seq_add_seq (&bind_body, olist);
8646   gimple_seq_add_seq (&bind_body, dlist);
8647   if (!gimple_omp_teams_grid_phony (teams_stmt))
8648     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8649   gimple_bind_set_body (bind, bind_body);
8650 
8651   pop_gimplify_context (bind);
8652 
8653   gimple_bind_append_vars (bind, ctx->block_vars);
8654   BLOCK_VARS (block) = ctx->block_vars;
8655   if (BLOCK_VARS (block))
8656     TREE_USED (block) = 1;
8657 }
8658 
8659 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
8660 
8661 static void
8662 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8663 {
8664   gimple *stmt = gsi_stmt (*gsi_p);
8665   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8666   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8667 		       gimple_build_omp_return (false));
8668 }
8669 
8670 
8671 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
8672    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
8673    of OMP context, but with task_shared_vars set.  */
8674 
8675 static tree
8676 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8677     			void *data)
8678 {
8679   tree t = *tp;
8680 
8681   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
8682   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8683     return t;
8684 
8685   if (task_shared_vars
8686       && DECL_P (t)
8687       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8688     return t;
8689 
8690   /* If a global variable has been privatized, TREE_CONSTANT on
8691      ADDR_EXPR might be wrong.  */
8692   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8693     recompute_tree_invariant_for_addr_expr (t);
8694 
8695   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8696   return NULL_TREE;
8697 }
8698 
8699 /* Data to be communicated between lower_omp_regimplify_operands and
8700    lower_omp_regimplify_operands_p.  */
8701 
8702 struct lower_omp_regimplify_operands_data
8703 {
8704   omp_context *ctx;
8705   vec<tree> *decls;
8706 };
8707 
8708 /* Helper function for lower_omp_regimplify_operands.  Find
8709    omp_member_access_dummy_var vars and adjust temporarily their
8710    DECL_VALUE_EXPRs if needed.  */
8711 
8712 static tree
8713 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8714 				 void *data)
8715 {
8716   tree t = omp_member_access_dummy_var (*tp);
8717   if (t)
8718     {
8719       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8720       lower_omp_regimplify_operands_data *ldata
8721 	= (lower_omp_regimplify_operands_data *) wi->info;
8722       tree o = maybe_lookup_decl (t, ldata->ctx);
8723       if (o != t)
8724 	{
8725 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8726 	  ldata->decls->safe_push (*tp);
8727 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8728 	  SET_DECL_VALUE_EXPR (*tp, v);
8729 	}
8730     }
8731   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8732   return NULL_TREE;
8733 }
8734 
8735 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8736    of omp_member_access_dummy_var vars during regimplification.  */
8737 
8738 static void
8739 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8740 			       gimple_stmt_iterator *gsi_p)
8741 {
8742   auto_vec<tree, 10> decls;
8743   if (ctx)
8744     {
8745       struct walk_stmt_info wi;
8746       memset (&wi, '\0', sizeof (wi));
8747       struct lower_omp_regimplify_operands_data data;
8748       data.ctx = ctx;
8749       data.decls = &decls;
8750       wi.info = &data;
8751       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8752     }
8753   gimple_regimplify_operands (stmt, gsi_p);
8754   while (!decls.is_empty ())
8755     {
8756       tree t = decls.pop ();
8757       tree v = decls.pop ();
8758       SET_DECL_VALUE_EXPR (t, v);
8759     }
8760 }
8761 
8762 static void
8763 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8764 {
8765   gimple *stmt = gsi_stmt (*gsi_p);
8766   struct walk_stmt_info wi;
8767   gcall *call_stmt;
8768 
8769   if (gimple_has_location (stmt))
8770     input_location = gimple_location (stmt);
8771 
8772   if (task_shared_vars)
8773     memset (&wi, '\0', sizeof (wi));
8774 
8775   /* If we have issued syntax errors, avoid doing any heavy lifting.
8776      Just replace the OMP directives with a NOP to avoid
8777      confusing RTL expansion.  */
8778   if (seen_error () && is_gimple_omp (stmt))
8779     {
8780       gsi_replace (gsi_p, gimple_build_nop (), true);
8781       return;
8782     }
8783 
8784   switch (gimple_code (stmt))
8785     {
8786     case GIMPLE_COND:
8787       {
8788 	gcond *cond_stmt = as_a <gcond *> (stmt);
8789 	if ((ctx || task_shared_vars)
8790 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8791 			   lower_omp_regimplify_p,
8792 			   ctx ? NULL : &wi, NULL)
8793 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8794 			      lower_omp_regimplify_p,
8795 			      ctx ? NULL : &wi, NULL)))
8796 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8797       }
8798       break;
8799     case GIMPLE_CATCH:
8800       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8801       break;
8802     case GIMPLE_EH_FILTER:
8803       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8804       break;
8805     case GIMPLE_TRY:
8806       lower_omp (gimple_try_eval_ptr (stmt), ctx);
8807       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8808       break;
8809     case GIMPLE_TRANSACTION:
8810       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8811 		 ctx);
8812       break;
8813     case GIMPLE_BIND:
8814       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8815       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8816       break;
8817     case GIMPLE_OMP_PARALLEL:
8818     case GIMPLE_OMP_TASK:
8819       ctx = maybe_lookup_ctx (stmt);
8820       gcc_assert (ctx);
8821       if (ctx->cancellable)
8822 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8823       lower_omp_taskreg (gsi_p, ctx);
8824       break;
8825     case GIMPLE_OMP_FOR:
8826       ctx = maybe_lookup_ctx (stmt);
8827       gcc_assert (ctx);
8828       if (ctx->cancellable)
8829 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8830       lower_omp_for (gsi_p, ctx);
8831       break;
8832     case GIMPLE_OMP_SECTIONS:
8833       ctx = maybe_lookup_ctx (stmt);
8834       gcc_assert (ctx);
8835       if (ctx->cancellable)
8836 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8837       lower_omp_sections (gsi_p, ctx);
8838       break;
8839     case GIMPLE_OMP_SINGLE:
8840       ctx = maybe_lookup_ctx (stmt);
8841       gcc_assert (ctx);
8842       lower_omp_single (gsi_p, ctx);
8843       break;
8844     case GIMPLE_OMP_MASTER:
8845       ctx = maybe_lookup_ctx (stmt);
8846       gcc_assert (ctx);
8847       lower_omp_master (gsi_p, ctx);
8848       break;
8849     case GIMPLE_OMP_TASKGROUP:
8850       ctx = maybe_lookup_ctx (stmt);
8851       gcc_assert (ctx);
8852       lower_omp_taskgroup (gsi_p, ctx);
8853       break;
8854     case GIMPLE_OMP_ORDERED:
8855       ctx = maybe_lookup_ctx (stmt);
8856       gcc_assert (ctx);
8857       lower_omp_ordered (gsi_p, ctx);
8858       break;
8859     case GIMPLE_OMP_CRITICAL:
8860       ctx = maybe_lookup_ctx (stmt);
8861       gcc_assert (ctx);
8862       lower_omp_critical (gsi_p, ctx);
8863       break;
8864     case GIMPLE_OMP_ATOMIC_LOAD:
8865       if ((ctx || task_shared_vars)
8866 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8867 			  as_a <gomp_atomic_load *> (stmt)),
8868 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8869 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8870       break;
8871     case GIMPLE_OMP_TARGET:
8872       ctx = maybe_lookup_ctx (stmt);
8873       gcc_assert (ctx);
8874       lower_omp_target (gsi_p, ctx);
8875       break;
8876     case GIMPLE_OMP_TEAMS:
8877       ctx = maybe_lookup_ctx (stmt);
8878       gcc_assert (ctx);
8879       lower_omp_teams (gsi_p, ctx);
8880       break;
8881     case GIMPLE_OMP_GRID_BODY:
8882       ctx = maybe_lookup_ctx (stmt);
8883       gcc_assert (ctx);
8884       lower_omp_grid_body (gsi_p, ctx);
8885       break;
8886     case GIMPLE_CALL:
8887       tree fndecl;
8888       call_stmt = as_a <gcall *> (stmt);
8889       fndecl = gimple_call_fndecl (call_stmt);
8890       if (fndecl
8891 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8892 	switch (DECL_FUNCTION_CODE (fndecl))
8893 	  {
8894 	  case BUILT_IN_GOMP_BARRIER:
8895 	    if (ctx == NULL)
8896 	      break;
8897 	    /* FALLTHRU */
8898 	  case BUILT_IN_GOMP_CANCEL:
8899 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
8900 	    omp_context *cctx;
8901 	    cctx = ctx;
8902 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8903 	      cctx = cctx->outer;
8904 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8905 	    if (!cctx->cancellable)
8906 	      {
8907 		if (DECL_FUNCTION_CODE (fndecl)
8908 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
8909 		  {
8910 		    stmt = gimple_build_nop ();
8911 		    gsi_replace (gsi_p, stmt, false);
8912 		  }
8913 		break;
8914 	      }
8915 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8916 	      {
8917 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8918 		gimple_call_set_fndecl (call_stmt, fndecl);
8919 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8920 	      }
8921 	    tree lhs;
8922 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8923 	    gimple_call_set_lhs (call_stmt, lhs);
8924 	    tree fallthru_label;
8925 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8926 	    gimple *g;
8927 	    g = gimple_build_label (fallthru_label);
8928 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8929 	    g = gimple_build_cond (NE_EXPR, lhs,
8930 				   fold_convert (TREE_TYPE (lhs),
8931 						 boolean_false_node),
8932 				   cctx->cancel_label, fallthru_label);
8933 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8934 	    break;
8935 	  default:
8936 	    break;
8937 	  }
8938       /* FALLTHRU */
8939     default:
8940       if ((ctx || task_shared_vars)
8941 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
8942 			     ctx ? NULL : &wi))
8943 	{
8944 	  /* Just remove clobbers, this should happen only if we have
8945 	     "privatized" local addressable variables in SIMD regions,
8946 	     the clobber isn't needed in that case and gimplifying address
8947 	     of the ARRAY_REF into a pointer and creating MEM_REF based
8948 	     clobber would create worse code than we get with the clobber
8949 	     dropped.  */
8950 	  if (gimple_clobber_p (stmt))
8951 	    {
8952 	      gsi_replace (gsi_p, gimple_build_nop (), true);
8953 	      break;
8954 	    }
8955 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8956 	}
8957       break;
8958     }
8959 }
8960 
8961 static void
8962 lower_omp (gimple_seq *body, omp_context *ctx)
8963 {
8964   location_t saved_location = input_location;
8965   gimple_stmt_iterator gsi;
8966   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8967     lower_omp_1 (&gsi, ctx);
8968   /* During gimplification, we haven't folded statments inside offloading
8969      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
8970   if (target_nesting_level || taskreg_nesting_level)
8971     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
8972       fold_stmt (&gsi);
8973   input_location = saved_location;
8974 }
8975 
8976 /* Main entry point.  */
8977 
8978 static unsigned int
8979 execute_lower_omp (void)
8980 {
8981   gimple_seq body;
8982   int i;
8983   omp_context *ctx;
8984 
8985   /* This pass always runs, to provide PROP_gimple_lomp.
8986      But often, there is nothing to do.  */
8987   if (flag_openacc == 0 && flag_openmp == 0
8988       && flag_openmp_simd == 0)
8989     return 0;
8990 
8991   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
8992 				 delete_omp_context);
8993 
8994   body = gimple_body (current_function_decl);
8995 
8996   if (hsa_gen_requested_p ())
8997     omp_grid_gridify_all_targets (&body);
8998 
8999   scan_omp (&body, NULL);
9000   gcc_assert (taskreg_nesting_level == 0);
9001   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9002     finish_taskreg_scan (ctx);
9003   taskreg_contexts.release ();
9004 
9005   if (all_contexts->root)
9006     {
9007       if (task_shared_vars)
9008 	push_gimplify_context ();
9009       lower_omp (&body, NULL);
9010       if (task_shared_vars)
9011 	pop_gimplify_context (NULL);
9012     }
9013 
9014   if (all_contexts)
9015     {
9016       splay_tree_delete (all_contexts);
9017       all_contexts = NULL;
9018     }
9019   BITMAP_FREE (task_shared_vars);
9020 
9021   /* If current function is a method, remove artificial dummy VAR_DECL created
9022      for non-static data member privatization, they aren't needed for
9023      debuginfo nor anything else, have been already replaced everywhere in the
9024      IL and cause problems with LTO.  */
9025   if (DECL_ARGUMENTS (current_function_decl)
9026       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9027       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9028 	  == POINTER_TYPE))
9029     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
9030   return 0;
9031 }
9032 
9033 namespace {
9034 
9035 const pass_data pass_data_lower_omp =
9036 {
9037   GIMPLE_PASS, /* type */
9038   "omplower", /* name */
9039   OPTGROUP_OMP, /* optinfo_flags */
9040   TV_NONE, /* tv_id */
9041   PROP_gimple_any, /* properties_required */
9042   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9043   0, /* properties_destroyed */
9044   0, /* todo_flags_start */
9045   0, /* todo_flags_finish */
9046 };
9047 
9048 class pass_lower_omp : public gimple_opt_pass
9049 {
9050 public:
9051   pass_lower_omp (gcc::context *ctxt)
9052     : gimple_opt_pass (pass_data_lower_omp, ctxt)
9053   {}
9054 
9055   /* opt_pass methods: */
9056   virtual unsigned int execute (function *) { return execute_lower_omp (); }
9057 
9058 }; // class pass_lower_omp
9059 
9060 } // anon namespace
9061 
9062 gimple_opt_pass *
9063 make_pass_lower_omp (gcc::context *ctxt)
9064 {
9065   return new pass_lower_omp (ctxt);
9066 }
9067 
9068 /* The following is a utility to diagnose structured block violations.
9069    It is not part of the "omplower" pass, as that's invoked too late.  It
9070    should be invoked by the respective front ends after gimplification.  */
9071 
9072 static splay_tree all_labels;
9073 
9074 /* Check for mismatched contexts and generate an error if needed.  Return
9075    true if an error is detected.  */
9076 
9077 static bool
9078 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9079 	       gimple *branch_ctx, gimple *label_ctx)
9080 {
9081   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9082   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9083 
9084   if (label_ctx == branch_ctx)
9085     return false;
9086 
9087   const char* kind = NULL;
9088 
9089   if (flag_openacc)
9090     {
9091       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9092 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9093 	{
9094 	  gcc_checking_assert (kind == NULL);
9095 	  kind = "OpenACC";
9096 	}
9097     }
9098   if (kind == NULL)
9099     {
9100       gcc_checking_assert (flag_openmp || flag_openmp_simd);
9101       kind = "OpenMP";
9102     }
9103 
9104   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9105      so we could traverse it and issue a correct "exit" or "enter" error
9106      message upon a structured block violation.
9107 
9108      We built the context by building a list with tree_cons'ing, but there is
9109      no easy counterpart in gimple tuples.  It seems like far too much work
9110      for issuing exit/enter error messages.  If someone really misses the
9111      distinct error message... patches welcome.  */
9112 
9113 #if 0
9114   /* Try to avoid confusing the user by producing and error message
9115      with correct "exit" or "enter" verbiage.  We prefer "exit"
9116      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
9117   if (branch_ctx == NULL)
9118     exit_p = false;
9119   else
9120     {
9121       while (label_ctx)
9122 	{
9123 	  if (TREE_VALUE (label_ctx) == branch_ctx)
9124 	    {
9125 	      exit_p = false;
9126 	      break;
9127 	    }
9128 	  label_ctx = TREE_CHAIN (label_ctx);
9129 	}
9130     }
9131 
9132   if (exit_p)
9133     error ("invalid exit from %s structured block", kind);
9134   else
9135     error ("invalid entry to %s structured block", kind);
9136 #endif
9137 
9138   /* If it's obvious we have an invalid entry, be specific about the error.  */
9139   if (branch_ctx == NULL)
9140     error ("invalid entry to %s structured block", kind);
9141   else
9142     {
9143       /* Otherwise, be vague and lazy, but efficient.  */
9144       error ("invalid branch to/from %s structured block", kind);
9145     }
9146 
9147   gsi_replace (gsi_p, gimple_build_nop (), false);
9148   return true;
9149 }
9150 
9151 /* Pass 1: Create a minimal tree of structured blocks, and record
9152    where each label is found.  */
9153 
9154 static tree
9155 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9156     	       struct walk_stmt_info *wi)
9157 {
9158   gimple *context = (gimple *) wi->info;
9159   gimple *inner_context;
9160   gimple *stmt = gsi_stmt (*gsi_p);
9161 
9162   *handled_ops_p = true;
9163 
9164   switch (gimple_code (stmt))
9165     {
9166     WALK_SUBSTMTS;
9167 
9168     case GIMPLE_OMP_PARALLEL:
9169     case GIMPLE_OMP_TASK:
9170     case GIMPLE_OMP_SECTIONS:
9171     case GIMPLE_OMP_SINGLE:
9172     case GIMPLE_OMP_SECTION:
9173     case GIMPLE_OMP_MASTER:
9174     case GIMPLE_OMP_ORDERED:
9175     case GIMPLE_OMP_CRITICAL:
9176     case GIMPLE_OMP_TARGET:
9177     case GIMPLE_OMP_TEAMS:
9178     case GIMPLE_OMP_TASKGROUP:
9179       /* The minimal context here is just the current OMP construct.  */
9180       inner_context = stmt;
9181       wi->info = inner_context;
9182       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9183       wi->info = context;
9184       break;
9185 
9186     case GIMPLE_OMP_FOR:
9187       inner_context = stmt;
9188       wi->info = inner_context;
9189       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9190 	 walk them.  */
9191       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9192 	  	       diagnose_sb_1, NULL, wi);
9193       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9194       wi->info = context;
9195       break;
9196 
9197     case GIMPLE_LABEL:
9198       splay_tree_insert (all_labels,
9199 			 (splay_tree_key) gimple_label_label (
9200 					    as_a <glabel *> (stmt)),
9201 			 (splay_tree_value) context);
9202       break;
9203 
9204     default:
9205       break;
9206     }
9207 
9208   return NULL_TREE;
9209 }
9210 
9211 /* Pass 2: Check each branch and see if its context differs from that of
9212    the destination label's context.  */
9213 
9214 static tree
9215 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9216     	       struct walk_stmt_info *wi)
9217 {
9218   gimple *context = (gimple *) wi->info;
9219   splay_tree_node n;
9220   gimple *stmt = gsi_stmt (*gsi_p);
9221 
9222   *handled_ops_p = true;
9223 
9224   switch (gimple_code (stmt))
9225     {
9226     WALK_SUBSTMTS;
9227 
9228     case GIMPLE_OMP_PARALLEL:
9229     case GIMPLE_OMP_TASK:
9230     case GIMPLE_OMP_SECTIONS:
9231     case GIMPLE_OMP_SINGLE:
9232     case GIMPLE_OMP_SECTION:
9233     case GIMPLE_OMP_MASTER:
9234     case GIMPLE_OMP_ORDERED:
9235     case GIMPLE_OMP_CRITICAL:
9236     case GIMPLE_OMP_TARGET:
9237     case GIMPLE_OMP_TEAMS:
9238     case GIMPLE_OMP_TASKGROUP:
9239       wi->info = stmt;
9240       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9241       wi->info = context;
9242       break;
9243 
9244     case GIMPLE_OMP_FOR:
9245       wi->info = stmt;
9246       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9247 	 walk them.  */
9248       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9249 			   diagnose_sb_2, NULL, wi);
9250       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9251       wi->info = context;
9252       break;
9253 
9254     case GIMPLE_COND:
9255 	{
9256 	  gcond *cond_stmt = as_a <gcond *> (stmt);
9257 	  tree lab = gimple_cond_true_label (cond_stmt);
9258 	  if (lab)
9259 	    {
9260 	      n = splay_tree_lookup (all_labels,
9261 				     (splay_tree_key) lab);
9262 	      diagnose_sb_0 (gsi_p, context,
9263 			     n ? (gimple *) n->value : NULL);
9264 	    }
9265 	  lab = gimple_cond_false_label (cond_stmt);
9266 	  if (lab)
9267 	    {
9268 	      n = splay_tree_lookup (all_labels,
9269 				     (splay_tree_key) lab);
9270 	      diagnose_sb_0 (gsi_p, context,
9271 			     n ? (gimple *) n->value : NULL);
9272 	    }
9273 	}
9274       break;
9275 
9276     case GIMPLE_GOTO:
9277       {
9278 	tree lab = gimple_goto_dest (stmt);
9279 	if (TREE_CODE (lab) != LABEL_DECL)
9280 	  break;
9281 
9282 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9283 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9284       }
9285       break;
9286 
9287     case GIMPLE_SWITCH:
9288       {
9289 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
9290 	unsigned int i;
9291 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9292 	  {
9293 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9294 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9295 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9296 	      break;
9297 	  }
9298       }
9299       break;
9300 
9301     case GIMPLE_RETURN:
9302       diagnose_sb_0 (gsi_p, context, NULL);
9303       break;
9304 
9305     default:
9306       break;
9307     }
9308 
9309   return NULL_TREE;
9310 }
9311 
9312 static unsigned int
9313 diagnose_omp_structured_block_errors (void)
9314 {
9315   struct walk_stmt_info wi;
9316   gimple_seq body = gimple_body (current_function_decl);
9317 
9318   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9319 
9320   memset (&wi, 0, sizeof (wi));
9321   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9322 
9323   memset (&wi, 0, sizeof (wi));
9324   wi.want_locations = true;
9325   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9326 
9327   gimple_set_body (current_function_decl, body);
9328 
9329   splay_tree_delete (all_labels);
9330   all_labels = NULL;
9331 
9332   return 0;
9333 }
9334 
9335 namespace {
9336 
9337 const pass_data pass_data_diagnose_omp_blocks =
9338 {
9339   GIMPLE_PASS, /* type */
9340   "*diagnose_omp_blocks", /* name */
9341   OPTGROUP_OMP, /* optinfo_flags */
9342   TV_NONE, /* tv_id */
9343   PROP_gimple_any, /* properties_required */
9344   0, /* properties_provided */
9345   0, /* properties_destroyed */
9346   0, /* todo_flags_start */
9347   0, /* todo_flags_finish */
9348 };
9349 
9350 class pass_diagnose_omp_blocks : public gimple_opt_pass
9351 {
9352 public:
9353   pass_diagnose_omp_blocks (gcc::context *ctxt)
9354     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9355   {}
9356 
9357   /* opt_pass methods: */
9358   virtual bool gate (function *)
9359   {
9360     return flag_openacc || flag_openmp || flag_openmp_simd;
9361   }
9362   virtual unsigned int execute (function *)
9363     {
9364       return diagnose_omp_structured_block_errors ();
9365     }
9366 
9367 }; // class pass_diagnose_omp_blocks
9368 
9369 } // anon namespace
9370 
9371 gimple_opt_pass *
9372 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9373 {
9374   return new pass_diagnose_omp_blocks (ctxt);
9375 }
9376 
9377 
9378 #include "gt-omp-low.h"
9379