1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2018 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65    phases.  The first phase scans the function looking for OMP statements
66    and then for variables that must be replaced to satisfy data sharing
67    clauses.  The second phase expands code for the constructs, as well as
68    re-gimplifying things when variables have been replaced with complex
69    expressions.
70 
71    Final code generation is done by pass_expand_omp.  The flowgraph is
72    scanned for regions which are then moved to a new
73    function, to be invoked by the thread library, or offloaded.  */
74 
75 /* Context structure.  Used to store information about each parallel
76    directive in the code.  */
77 
78 struct omp_context
79 {
80   /* This field must be at the beginning, as we do "inheritance": Some
81      callback functions for tree-inline.c (e.g., omp_copy_decl)
82      receive a copy_body_data pointer that is up-casted to an
83      omp_context pointer.  */
84   copy_body_data cb;
85 
86   /* The tree of contexts corresponding to the encountered constructs.  */
87   struct omp_context *outer;
88   gimple *stmt;
89 
90   /* Map variables to fields in a structure that allows communication
91      between sending and receiving threads.  */
92   splay_tree field_map;
93   tree record_type;
94   tree sender_decl;
95   tree receiver_decl;
96 
97   /* These are used just by task contexts, if task firstprivate fn is
98      needed.  srecord_type is used to communicate from the thread
99      that encountered the task construct to task firstprivate fn,
100      record_type is allocated by GOMP_task, initialized by task firstprivate
101      fn and passed to the task body fn.  */
102   splay_tree sfield_map;
103   tree srecord_type;
104 
105   /* A chain of variables to add to the top-level block surrounding the
106      construct.  In the case of a parallel, this is in the child function.  */
107   tree block_vars;
108 
109   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110      barriers should jump to during omplower pass.  */
111   tree cancel_label;
112 
113   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114      otherwise.  */
115   gimple *simt_stmt;
116 
117   /* Nesting depth of this context.  Used to beautify error messages re
118      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
119      reserved for the main body of the function.  */
120   int depth;
121 
122   /* True if this parallel directive is nested within another.  */
123   bool is_nested;
124 
125   /* True if this construct can be cancelled.  */
126   bool cancellable;
127 };
128 
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
134 
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
137 
138 #define WALK_SUBSTMTS  \
139     case GIMPLE_BIND: \
140     case GIMPLE_TRY: \
141     case GIMPLE_CATCH: \
142     case GIMPLE_EH_FILTER: \
143     case GIMPLE_TRANSACTION: \
144       /* The sub-statements for these should be walked.  */ \
145       *handled_ops_p = false; \
146       break;
147 
148 /* Return true if CTX corresponds to an oacc parallel region.  */
149 
150 static bool
is_oacc_parallel(omp_context * ctx)151 is_oacc_parallel (omp_context *ctx)
152 {
153   enum gimple_code outer_type = gimple_code (ctx->stmt);
154   return ((outer_type == GIMPLE_OMP_TARGET)
155 	  && (gimple_omp_target_kind (ctx->stmt)
156 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 }
158 
159 /* Return true if CTX corresponds to an oacc kernels region.  */
160 
161 static bool
is_oacc_kernels(omp_context * ctx)162 is_oacc_kernels (omp_context *ctx)
163 {
164   enum gimple_code outer_type = gimple_code (ctx->stmt);
165   return ((outer_type == GIMPLE_OMP_TARGET)
166 	  && (gimple_omp_target_kind (ctx->stmt)
167 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 }
169 
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171    data member privatization, return the underlying "this" parameter,
172    otherwise return NULL.  */
173 
174 tree
omp_member_access_dummy_var(tree decl)175 omp_member_access_dummy_var (tree decl)
176 {
177   if (!VAR_P (decl)
178       || !DECL_ARTIFICIAL (decl)
179       || !DECL_IGNORED_P (decl)
180       || !DECL_HAS_VALUE_EXPR_P (decl)
181       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182     return NULL_TREE;
183 
184   tree v = DECL_VALUE_EXPR (decl);
185   if (TREE_CODE (v) != COMPONENT_REF)
186     return NULL_TREE;
187 
188   while (1)
189     switch (TREE_CODE (v))
190       {
191       case COMPONENT_REF:
192       case MEM_REF:
193       case INDIRECT_REF:
194       CASE_CONVERT:
195       case POINTER_PLUS_EXPR:
196 	v = TREE_OPERAND (v, 0);
197 	continue;
198       case PARM_DECL:
199 	if (DECL_CONTEXT (v) == current_function_decl
200 	    && DECL_ARTIFICIAL (v)
201 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 	  return v;
203 	return NULL_TREE;
204       default:
205 	return NULL_TREE;
206       }
207 }
208 
209 /* Helper for unshare_and_remap, called through walk_tree.  */
210 
211 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
213 {
214   tree *pair = (tree *) data;
215   if (*tp == pair[0])
216     {
217       *tp = unshare_expr (pair[1]);
218       *walk_subtrees = 0;
219     }
220   else if (IS_TYPE_OR_DECL_P (*tp))
221     *walk_subtrees = 0;
222   return NULL_TREE;
223 }
224 
225 /* Return unshare_expr (X) with all occurrences of FROM
226    replaced with TO.  */
227 
228 static tree
unshare_and_remap(tree x,tree from,tree to)229 unshare_and_remap (tree x, tree from, tree to)
230 {
231   tree pair[2] = { from, to };
232   x = unshare_expr (x);
233   walk_tree (&x, unshare_and_remap_1, pair, NULL);
234   return x;
235 }
236 
237 /* Convenience function for calling scan_omp_1_op on tree operands.  */
238 
239 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)240 scan_omp_op (tree *tp, omp_context *ctx)
241 {
242   struct walk_stmt_info wi;
243 
244   memset (&wi, 0, sizeof (wi));
245   wi.info = ctx;
246   wi.want_locations = true;
247 
248   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 }
250 
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
254 
255 /* Return true if CTX is for an omp parallel.  */
256 
257 static inline bool
is_parallel_ctx(omp_context * ctx)258 is_parallel_ctx (omp_context *ctx)
259 {
260   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
261 }
262 
263 
264 /* Return true if CTX is for an omp task.  */
265 
266 static inline bool
is_task_ctx(omp_context * ctx)267 is_task_ctx (omp_context *ctx)
268 {
269   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
270 }
271 
272 
273 /* Return true if CTX is for an omp taskloop.  */
274 
275 static inline bool
is_taskloop_ctx(omp_context * ctx)276 is_taskloop_ctx (omp_context *ctx)
277 {
278   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
280 }
281 
282 
283 /* Return true if CTX is for an omp parallel or omp task.  */
284 
285 static inline bool
is_taskreg_ctx(omp_context * ctx)286 is_taskreg_ctx (omp_context *ctx)
287 {
288   return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 }
290 
291 /* Return true if EXPR is variable sized.  */
292 
293 static inline bool
is_variable_sized(const_tree expr)294 is_variable_sized (const_tree expr)
295 {
296   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 }
298 
299 /* Lookup variables.  The "maybe" form
300    allows for the variable form to not have been entered, otherwise we
301    assert that the variable must have been entered.  */
302 
303 static inline tree
lookup_decl(tree var,omp_context * ctx)304 lookup_decl (tree var, omp_context *ctx)
305 {
306   tree *n = ctx->cb.decl_map->get (var);
307   return *n;
308 }
309 
310 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)311 maybe_lookup_decl (const_tree var, omp_context *ctx)
312 {
313   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314   return n ? *n : NULL_TREE;
315 }
316 
317 static inline tree
lookup_field(tree var,omp_context * ctx)318 lookup_field (tree var, omp_context *ctx)
319 {
320   splay_tree_node n;
321   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322   return (tree) n->value;
323 }
324 
325 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)326 lookup_sfield (splay_tree_key key, omp_context *ctx)
327 {
328   splay_tree_node n;
329   n = splay_tree_lookup (ctx->sfield_map
330 			 ? ctx->sfield_map : ctx->field_map, key);
331   return (tree) n->value;
332 }
333 
334 static inline tree
lookup_sfield(tree var,omp_context * ctx)335 lookup_sfield (tree var, omp_context *ctx)
336 {
337   return lookup_sfield ((splay_tree_key) var, ctx);
338 }
339 
340 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
342 {
343   splay_tree_node n;
344   n = splay_tree_lookup (ctx->field_map, key);
345   return n ? (tree) n->value : NULL_TREE;
346 }
347 
348 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)349 maybe_lookup_field (tree var, omp_context *ctx)
350 {
351   return maybe_lookup_field ((splay_tree_key) var, ctx);
352 }
353 
354 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
355    the parallel context if DECL is to be shared.  */
356 
357 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
359 {
360   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361       || TYPE_ATOMIC (TREE_TYPE (decl)))
362     return true;
363 
364   /* We can only use copy-in/copy-out semantics for shared variables
365      when we know the value is not accessible from an outer scope.  */
366   if (shared_ctx)
367     {
368       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
369 
370       /* ??? Trivially accessible from anywhere.  But why would we even
371 	 be passing an address in this case?  Should we simply assert
372 	 this to be false, or should we have a cleanup pass that removes
373 	 these from the list of mappings?  */
374       if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 	return true;
376 
377       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 	 without analyzing the expression whether or not its location
379 	 is accessible to anyone else.  In the case of nested parallel
380 	 regions it certainly may be.  */
381       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 	return true;
383 
384       /* Do not use copy-in/copy-out for variables that have their
385 	 address taken.  */
386       if (TREE_ADDRESSABLE (decl))
387 	return true;
388 
389       /* lower_send_shared_vars only uses copy-in, but not copy-out
390 	 for these.  */
391       if (TREE_READONLY (decl)
392 	  || ((TREE_CODE (decl) == RESULT_DECL
393 	       || TREE_CODE (decl) == PARM_DECL)
394 	      && DECL_BY_REFERENCE (decl)))
395 	return false;
396 
397       /* Disallow copy-in/out in nested parallel if
398 	 decl is shared in outer parallel, otherwise
399 	 each thread could store the shared variable
400 	 in its own copy-in location, making the
401 	 variable no longer really shared.  */
402       if (shared_ctx->is_nested)
403 	{
404 	  omp_context *up;
405 
406 	  for (up = shared_ctx->outer; up; up = up->outer)
407 	    if ((is_taskreg_ctx (up)
408 		 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
409 		     && is_gimple_omp_offloaded (up->stmt)))
410 		&& maybe_lookup_decl (decl, up))
411 	      break;
412 
413 	  if (up)
414 	    {
415 	      tree c;
416 
417 	      if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
418 		{
419 		  for (c = gimple_omp_target_clauses (up->stmt);
420 		       c; c = OMP_CLAUSE_CHAIN (c))
421 		    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
422 			&& OMP_CLAUSE_DECL (c) == decl)
423 		      break;
424 		}
425 	      else
426 		for (c = gimple_omp_taskreg_clauses (up->stmt);
427 		     c; c = OMP_CLAUSE_CHAIN (c))
428 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
429 		      && OMP_CLAUSE_DECL (c) == decl)
430 		    break;
431 
432 	      if (c)
433 		goto maybe_mark_addressable_and_ret;
434 	    }
435 	}
436 
437       /* For tasks avoid using copy-in/out.  As tasks can be
438 	 deferred or executed in different thread, when GOMP_task
439 	 returns, the task hasn't necessarily terminated.  */
440       if (is_task_ctx (shared_ctx))
441 	{
442 	  tree outer;
443 	maybe_mark_addressable_and_ret:
444 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
445 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
446 	    {
447 	      /* Taking address of OUTER in lower_send_shared_vars
448 		 might need regimplification of everything that uses the
449 		 variable.  */
450 	      if (!task_shared_vars)
451 		task_shared_vars = BITMAP_ALLOC (NULL);
452 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
453 	      TREE_ADDRESSABLE (outer) = 1;
454 	    }
455 	  return true;
456 	}
457     }
458 
459   return false;
460 }
461 
462 /* Construct a new automatic decl similar to VAR.  */
463 
464 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)465 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
466 {
467   tree copy = copy_var_decl (var, name, type);
468 
469   DECL_CONTEXT (copy) = current_function_decl;
470   DECL_CHAIN (copy) = ctx->block_vars;
471   /* If VAR is listed in task_shared_vars, it means it wasn't
472      originally addressable and is just because task needs to take
473      it's address.  But we don't need to take address of privatizations
474      from that var.  */
475   if (TREE_ADDRESSABLE (var)
476       && task_shared_vars
477       && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
478     TREE_ADDRESSABLE (copy) = 0;
479   ctx->block_vars = copy;
480 
481   return copy;
482 }
483 
484 static tree
omp_copy_decl_1(tree var,omp_context * ctx)485 omp_copy_decl_1 (tree var, omp_context *ctx)
486 {
487   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
488 }
489 
490 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
491    as appropriate.  */
492 static tree
omp_build_component_ref(tree obj,tree field)493 omp_build_component_ref (tree obj, tree field)
494 {
495   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
496   if (TREE_THIS_VOLATILE (field))
497     TREE_THIS_VOLATILE (ret) |= 1;
498   if (TREE_READONLY (field))
499     TREE_READONLY (ret) |= 1;
500   return ret;
501 }
502 
503 /* Build tree nodes to access the field for VAR on the receiver side.  */
504 
505 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)506 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
507 {
508   tree x, field = lookup_field (var, ctx);
509 
510   /* If the receiver record type was remapped in the child function,
511      remap the field into the new record type.  */
512   x = maybe_lookup_field (field, ctx);
513   if (x != NULL)
514     field = x;
515 
516   x = build_simple_mem_ref (ctx->receiver_decl);
517   TREE_THIS_NOTRAP (x) = 1;
518   x = omp_build_component_ref (x, field);
519   if (by_ref)
520     {
521       x = build_simple_mem_ref (x);
522       TREE_THIS_NOTRAP (x) = 1;
523     }
524 
525   return x;
526 }
527 
528 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
529    of a parallel, this is a component reference; for workshare constructs
530    this is some variable.  */
531 
532 static tree
533 build_outer_var_ref (tree var, omp_context *ctx,
534 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
535 {
536   tree x;
537 
538   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
539     x = var;
540   else if (is_variable_sized (var))
541     {
542       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
543       x = build_outer_var_ref (x, ctx, code);
544       x = build_simple_mem_ref (x);
545     }
546   else if (is_taskreg_ctx (ctx))
547     {
548       bool by_ref = use_pointer_for_field (var, NULL);
549       x = build_receiver_ref (var, by_ref, ctx);
550     }
551   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
552 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
553 	   || (code == OMP_CLAUSE_PRIVATE
554 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
555 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
556 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
557     {
558       /* #pragma omp simd isn't a worksharing construct, and can reference
559 	 even private vars in its linear etc. clauses.
560 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
561 	 to private vars in all worksharing constructs.  */
562       x = NULL_TREE;
563       if (ctx->outer && is_taskreg_ctx (ctx))
564 	x = lookup_decl (var, ctx->outer);
565       else if (ctx->outer)
566 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
567       if (x == NULL_TREE)
568 	x = var;
569     }
570   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
571     {
572       gcc_assert (ctx->outer);
573       splay_tree_node n
574 	= splay_tree_lookup (ctx->outer->field_map,
575 			     (splay_tree_key) &DECL_UID (var));
576       if (n == NULL)
577 	{
578 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
579 	    x = var;
580 	  else
581 	    x = lookup_decl (var, ctx->outer);
582 	}
583       else
584 	{
585 	  tree field = (tree) n->value;
586 	  /* If the receiver record type was remapped in the child function,
587 	     remap the field into the new record type.  */
588 	  x = maybe_lookup_field (field, ctx->outer);
589 	  if (x != NULL)
590 	    field = x;
591 
592 	  x = build_simple_mem_ref (ctx->outer->receiver_decl);
593 	  x = omp_build_component_ref (x, field);
594 	  if (use_pointer_for_field (var, ctx->outer))
595 	    x = build_simple_mem_ref (x);
596 	}
597     }
598   else if (ctx->outer)
599     {
600       omp_context *outer = ctx->outer;
601       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
602 	{
603 	  outer = outer->outer;
604 	  gcc_assert (outer
605 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
606 	}
607       x = lookup_decl (var, outer);
608     }
609   else if (omp_is_reference (var))
610     /* This can happen with orphaned constructs.  If var is reference, it is
611        possible it is shared and as such valid.  */
612     x = var;
613   else if (omp_member_access_dummy_var (var))
614     x = var;
615   else
616     gcc_unreachable ();
617 
618   if (x == var)
619     {
620       tree t = omp_member_access_dummy_var (var);
621       if (t)
622 	{
623 	  x = DECL_VALUE_EXPR (var);
624 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
625 	  if (o != t)
626 	    x = unshare_and_remap (x, t, o);
627 	  else
628 	    x = unshare_expr (x);
629 	}
630     }
631 
632   if (omp_is_reference (var))
633     x = build_simple_mem_ref (x);
634 
635   return x;
636 }
637 
638 /* Build tree nodes to access the field for VAR on the sender side.  */
639 
640 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)641 build_sender_ref (splay_tree_key key, omp_context *ctx)
642 {
643   tree field = lookup_sfield (key, ctx);
644   return omp_build_component_ref (ctx->sender_decl, field);
645 }
646 
647 static tree
build_sender_ref(tree var,omp_context * ctx)648 build_sender_ref (tree var, omp_context *ctx)
649 {
650   return build_sender_ref ((splay_tree_key) var, ctx);
651 }
652 
653 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
654    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
655 
656 static void
657 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
658 		   bool base_pointers_restrict = false)
659 {
660   tree field, type, sfield = NULL_TREE;
661   splay_tree_key key = (splay_tree_key) var;
662 
663   if ((mask & 8) != 0)
664     {
665       key = (splay_tree_key) &DECL_UID (var);
666       gcc_checking_assert (key != (splay_tree_key) var);
667     }
668   gcc_assert ((mask & 1) == 0
669 	      || !splay_tree_lookup (ctx->field_map, key));
670   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
671 	      || !splay_tree_lookup (ctx->sfield_map, key));
672   gcc_assert ((mask & 3) == 3
673 	      || !is_gimple_omp_oacc (ctx->stmt));
674 
675   type = TREE_TYPE (var);
676   /* Prevent redeclaring the var in the split-off function with a restrict
677      pointer type.  Note that we only clear type itself, restrict qualifiers in
678      the pointed-to type will be ignored by points-to analysis.  */
679   if (POINTER_TYPE_P (type)
680       && TYPE_RESTRICT (type))
681     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
682 
683   if (mask & 4)
684     {
685       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
686       type = build_pointer_type (build_pointer_type (type));
687     }
688   else if (by_ref)
689     {
690       type = build_pointer_type (type);
691       if (base_pointers_restrict)
692 	type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
693     }
694   else if ((mask & 3) == 1 && omp_is_reference (var))
695     type = TREE_TYPE (type);
696 
697   field = build_decl (DECL_SOURCE_LOCATION (var),
698 		      FIELD_DECL, DECL_NAME (var), type);
699 
700   /* Remember what variable this field was created for.  This does have a
701      side effect of making dwarf2out ignore this member, so for helpful
702      debugging we clear it later in delete_omp_context.  */
703   DECL_ABSTRACT_ORIGIN (field) = var;
704   if (type == TREE_TYPE (var))
705     {
706       SET_DECL_ALIGN (field, DECL_ALIGN (var));
707       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
708       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
709     }
710   else
711     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
712 
713   if ((mask & 3) == 3)
714     {
715       insert_field_into_struct (ctx->record_type, field);
716       if (ctx->srecord_type)
717 	{
718 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
719 			       FIELD_DECL, DECL_NAME (var), type);
720 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
721 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
722 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
723 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
724 	  insert_field_into_struct (ctx->srecord_type, sfield);
725 	}
726     }
727   else
728     {
729       if (ctx->srecord_type == NULL_TREE)
730 	{
731 	  tree t;
732 
733 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
734 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
735 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
736 	    {
737 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
738 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
739 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
740 	      insert_field_into_struct (ctx->srecord_type, sfield);
741 	      splay_tree_insert (ctx->sfield_map,
742 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
743 				 (splay_tree_value) sfield);
744 	    }
745 	}
746       sfield = field;
747       insert_field_into_struct ((mask & 1) ? ctx->record_type
748 				: ctx->srecord_type, field);
749     }
750 
751   if (mask & 1)
752     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
753   if ((mask & 2) && ctx->sfield_map)
754     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
755 }
756 
757 static tree
install_var_local(tree var,omp_context * ctx)758 install_var_local (tree var, omp_context *ctx)
759 {
760   tree new_var = omp_copy_decl_1 (var, ctx);
761   insert_decl_map (&ctx->cb, var, new_var);
762   return new_var;
763 }
764 
765 /* Adjust the replacement for DECL in CTX for the new context.  This means
766    copying the DECL_VALUE_EXPR, and fixing up the type.  */
767 
768 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)769 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
770 {
771   tree new_decl, size;
772 
773   new_decl = lookup_decl (decl, ctx);
774 
775   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
776 
777   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
778       && DECL_HAS_VALUE_EXPR_P (decl))
779     {
780       tree ve = DECL_VALUE_EXPR (decl);
781       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
782       SET_DECL_VALUE_EXPR (new_decl, ve);
783       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
784     }
785 
786   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
787     {
788       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
789       if (size == error_mark_node)
790 	size = TYPE_SIZE (TREE_TYPE (new_decl));
791       DECL_SIZE (new_decl) = size;
792 
793       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
794       if (size == error_mark_node)
795 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
796       DECL_SIZE_UNIT (new_decl) = size;
797     }
798 }
799 
800 /* The callback for remap_decl.  Search all containing contexts for a
801    mapping of the variable; this avoids having to duplicate the splay
802    tree ahead of time.  We know a mapping doesn't already exist in the
803    given context.  Create new mappings to implement default semantics.  */
804 
805 static tree
omp_copy_decl(tree var,copy_body_data * cb)806 omp_copy_decl (tree var, copy_body_data *cb)
807 {
808   omp_context *ctx = (omp_context *) cb;
809   tree new_var;
810 
811   if (TREE_CODE (var) == LABEL_DECL)
812     {
813       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
814 	return var;
815       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
816       DECL_CONTEXT (new_var) = current_function_decl;
817       insert_decl_map (&ctx->cb, var, new_var);
818       return new_var;
819     }
820 
821   while (!is_taskreg_ctx (ctx))
822     {
823       ctx = ctx->outer;
824       if (ctx == NULL)
825 	return var;
826       new_var = maybe_lookup_decl (var, ctx);
827       if (new_var)
828 	return new_var;
829     }
830 
831   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
832     return var;
833 
834   return error_mark_node;
835 }
836 
837 /* Create a new context, with OUTER_CTX being the surrounding context.  */
838 
839 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)840 new_omp_context (gimple *stmt, omp_context *outer_ctx)
841 {
842   omp_context *ctx = XCNEW (omp_context);
843 
844   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
845 		     (splay_tree_value) ctx);
846   ctx->stmt = stmt;
847 
848   if (outer_ctx)
849     {
850       ctx->outer = outer_ctx;
851       ctx->cb = outer_ctx->cb;
852       ctx->cb.block = NULL;
853       ctx->depth = outer_ctx->depth + 1;
854     }
855   else
856     {
857       ctx->cb.src_fn = current_function_decl;
858       ctx->cb.dst_fn = current_function_decl;
859       ctx->cb.src_node = cgraph_node::get (current_function_decl);
860       gcc_checking_assert (ctx->cb.src_node);
861       ctx->cb.dst_node = ctx->cb.src_node;
862       ctx->cb.src_cfun = cfun;
863       ctx->cb.copy_decl = omp_copy_decl;
864       ctx->cb.eh_lp_nr = 0;
865       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
866       ctx->cb.dont_remap_vla_if_no_change = true;
867       ctx->depth = 1;
868     }
869 
870   ctx->cb.decl_map = new hash_map<tree, tree>;
871 
872   return ctx;
873 }
874 
875 static gimple_seq maybe_catch_exception (gimple_seq);
876 
877 /* Finalize task copyfn.  */
878 
879 static void
finalize_task_copyfn(gomp_task * task_stmt)880 finalize_task_copyfn (gomp_task *task_stmt)
881 {
882   struct function *child_cfun;
883   tree child_fn;
884   gimple_seq seq = NULL, new_seq;
885   gbind *bind;
886 
887   child_fn = gimple_omp_task_copy_fn (task_stmt);
888   if (child_fn == NULL_TREE)
889     return;
890 
891   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
892   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
893 
894   push_cfun (child_cfun);
895   bind = gimplify_body (child_fn, false);
896   gimple_seq_add_stmt (&seq, bind);
897   new_seq = maybe_catch_exception (seq);
898   if (new_seq != seq)
899     {
900       bind = gimple_build_bind (NULL, new_seq, NULL);
901       seq = NULL;
902       gimple_seq_add_stmt (&seq, bind);
903     }
904   gimple_set_body (child_fn, seq);
905   pop_cfun ();
906 
907   /* Inform the callgraph about the new function.  */
908   cgraph_node *node = cgraph_node::get_create (child_fn);
909   node->parallelized_function = 1;
910   cgraph_node::add_new_function (child_fn, false);
911 }
912 
913 /* Destroy a omp_context data structures.  Called through the splay tree
914    value delete callback.  */
915 
916 static void
delete_omp_context(splay_tree_value value)917 delete_omp_context (splay_tree_value value)
918 {
919   omp_context *ctx = (omp_context *) value;
920 
921   delete ctx->cb.decl_map;
922 
923   if (ctx->field_map)
924     splay_tree_delete (ctx->field_map);
925   if (ctx->sfield_map)
926     splay_tree_delete (ctx->sfield_map);
927 
928   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
929      it produces corrupt debug information.  */
930   if (ctx->record_type)
931     {
932       tree t;
933       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
934 	DECL_ABSTRACT_ORIGIN (t) = NULL;
935     }
936   if (ctx->srecord_type)
937     {
938       tree t;
939       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
940 	DECL_ABSTRACT_ORIGIN (t) = NULL;
941     }
942 
943   if (is_task_ctx (ctx))
944     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
945 
946   XDELETE (ctx);
947 }
948 
949 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
950    context.  */
951 
952 static void
fixup_child_record_type(omp_context * ctx)953 fixup_child_record_type (omp_context *ctx)
954 {
955   tree f, type = ctx->record_type;
956 
957   if (!ctx->receiver_decl)
958     return;
959   /* ??? It isn't sufficient to just call remap_type here, because
960      variably_modified_type_p doesn't work the way we expect for
961      record types.  Testing each field for whether it needs remapping
962      and creating a new record by hand works, however.  */
963   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
964     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
965       break;
966   if (f)
967     {
968       tree name, new_fields = NULL;
969 
970       type = lang_hooks.types.make_type (RECORD_TYPE);
971       name = DECL_NAME (TYPE_NAME (ctx->record_type));
972       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
973 			 TYPE_DECL, name, type);
974       TYPE_NAME (type) = name;
975 
976       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
977 	{
978 	  tree new_f = copy_node (f);
979 	  DECL_CONTEXT (new_f) = type;
980 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
981 	  DECL_CHAIN (new_f) = new_fields;
982 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
983 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
984 		     &ctx->cb, NULL);
985 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
986 		     &ctx->cb, NULL);
987 	  new_fields = new_f;
988 
989 	  /* Arrange to be able to look up the receiver field
990 	     given the sender field.  */
991 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
992 			     (splay_tree_value) new_f);
993 	}
994       TYPE_FIELDS (type) = nreverse (new_fields);
995       layout_type (type);
996     }
997 
998   /* In a target region we never modify any of the pointers in *.omp_data_i,
999      so attempt to help the optimizers.  */
1000   if (is_gimple_omp_offloaded (ctx->stmt))
1001     type = build_qualified_type (type, TYPE_QUAL_CONST);
1002 
1003   TREE_TYPE (ctx->receiver_decl)
1004     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1005 }
1006 
1007 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1008    specified by CLAUSES.  If BASE_POINTERS_RESTRICT, install var field with
1009    restrict.  */
1010 
1011 static void
1012 scan_sharing_clauses (tree clauses, omp_context *ctx,
1013 		      bool base_pointers_restrict = false)
1014 {
1015   tree c, decl;
1016   bool scan_array_reductions = false;
1017 
1018   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1019     {
1020       bool by_ref;
1021 
1022       switch (OMP_CLAUSE_CODE (c))
1023 	{
1024 	case OMP_CLAUSE_PRIVATE:
1025 	  decl = OMP_CLAUSE_DECL (c);
1026 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1027 	    goto do_private;
1028 	  else if (!is_variable_sized (decl))
1029 	    install_var_local (decl, ctx);
1030 	  break;
1031 
1032 	case OMP_CLAUSE_SHARED:
1033 	  decl = OMP_CLAUSE_DECL (c);
1034 	  /* Ignore shared directives in teams construct.  */
1035 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1036 	    {
1037 	      /* Global variables don't need to be copied,
1038 		 the receiver side will use them directly.  */
1039 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1040 	      if (is_global_var (odecl))
1041 		break;
1042 	      insert_decl_map (&ctx->cb, decl, odecl);
1043 	      break;
1044 	    }
1045 	  gcc_assert (is_taskreg_ctx (ctx));
1046 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1047 		      || !is_variable_sized (decl));
1048 	  /* Global variables don't need to be copied,
1049 	     the receiver side will use them directly.  */
1050 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1051 	    break;
1052 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1053 	    {
1054 	      use_pointer_for_field (decl, ctx);
1055 	      break;
1056 	    }
1057 	  by_ref = use_pointer_for_field (decl, NULL);
1058 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1059 	      || TREE_ADDRESSABLE (decl)
1060 	      || by_ref
1061 	      || omp_is_reference (decl))
1062 	    {
1063 	      by_ref = use_pointer_for_field (decl, ctx);
1064 	      install_var_field (decl, by_ref, 3, ctx);
1065 	      install_var_local (decl, ctx);
1066 	      break;
1067 	    }
1068 	  /* We don't need to copy const scalar vars back.  */
1069 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1070 	  goto do_private;
1071 
1072 	case OMP_CLAUSE_REDUCTION:
1073 	  decl = OMP_CLAUSE_DECL (c);
1074 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1075 	      && TREE_CODE (decl) == MEM_REF)
1076 	    {
1077 	      tree t = TREE_OPERAND (decl, 0);
1078 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1079 		t = TREE_OPERAND (t, 0);
1080 	      if (TREE_CODE (t) == INDIRECT_REF
1081 		  || TREE_CODE (t) == ADDR_EXPR)
1082 		t = TREE_OPERAND (t, 0);
1083 	      install_var_local (t, ctx);
1084 	      if (is_taskreg_ctx (ctx)
1085 		  && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1086 		  && !is_variable_sized (t))
1087 		{
1088 		  by_ref = use_pointer_for_field (t, ctx);
1089 		  install_var_field (t, by_ref, 3, ctx);
1090 		}
1091 	      break;
1092 	    }
1093 	  goto do_private;
1094 
1095 	case OMP_CLAUSE_LASTPRIVATE:
1096 	  /* Let the corresponding firstprivate clause create
1097 	     the variable.  */
1098 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1099 	    break;
1100 	  /* FALLTHRU */
1101 
1102 	case OMP_CLAUSE_FIRSTPRIVATE:
1103 	case OMP_CLAUSE_LINEAR:
1104 	  decl = OMP_CLAUSE_DECL (c);
1105 	do_private:
1106 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1107 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1108 	      && is_gimple_omp_offloaded (ctx->stmt))
1109 	    {
1110 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1111 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1112 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1113 		install_var_field (decl, true, 3, ctx);
1114 	      else
1115 		install_var_field (decl, false, 3, ctx);
1116 	    }
1117 	  if (is_variable_sized (decl))
1118 	    {
1119 	      if (is_task_ctx (ctx))
1120 		install_var_field (decl, false, 1, ctx);
1121 	      break;
1122 	    }
1123 	  else if (is_taskreg_ctx (ctx))
1124 	    {
1125 	      bool global
1126 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1127 	      by_ref = use_pointer_for_field (decl, NULL);
1128 
1129 	      if (is_task_ctx (ctx)
1130 		  && (global || by_ref || omp_is_reference (decl)))
1131 		{
1132 		  install_var_field (decl, false, 1, ctx);
1133 		  if (!global)
1134 		    install_var_field (decl, by_ref, 2, ctx);
1135 		}
1136 	      else if (!global)
1137 		install_var_field (decl, by_ref, 3, ctx);
1138 	    }
1139 	  install_var_local (decl, ctx);
1140 	  break;
1141 
1142 	case OMP_CLAUSE_USE_DEVICE_PTR:
1143 	  decl = OMP_CLAUSE_DECL (c);
1144 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1145 	    install_var_field (decl, true, 3, ctx);
1146 	  else
1147 	    install_var_field (decl, false, 3, ctx);
1148 	  if (DECL_SIZE (decl)
1149 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1150 	    {
1151 	      tree decl2 = DECL_VALUE_EXPR (decl);
1152 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1153 	      decl2 = TREE_OPERAND (decl2, 0);
1154 	      gcc_assert (DECL_P (decl2));
1155 	      install_var_local (decl2, ctx);
1156 	    }
1157 	  install_var_local (decl, ctx);
1158 	  break;
1159 
1160 	case OMP_CLAUSE_IS_DEVICE_PTR:
1161 	  decl = OMP_CLAUSE_DECL (c);
1162 	  goto do_private;
1163 
1164 	case OMP_CLAUSE__LOOPTEMP_:
1165 	  gcc_assert (is_taskreg_ctx (ctx));
1166 	  decl = OMP_CLAUSE_DECL (c);
1167 	  install_var_field (decl, false, 3, ctx);
1168 	  install_var_local (decl, ctx);
1169 	  break;
1170 
1171 	case OMP_CLAUSE_COPYPRIVATE:
1172 	case OMP_CLAUSE_COPYIN:
1173 	  decl = OMP_CLAUSE_DECL (c);
1174 	  by_ref = use_pointer_for_field (decl, NULL);
1175 	  install_var_field (decl, by_ref, 3, ctx);
1176 	  break;
1177 
1178 	case OMP_CLAUSE_FINAL:
1179 	case OMP_CLAUSE_IF:
1180 	case OMP_CLAUSE_NUM_THREADS:
1181 	case OMP_CLAUSE_NUM_TEAMS:
1182 	case OMP_CLAUSE_THREAD_LIMIT:
1183 	case OMP_CLAUSE_DEVICE:
1184 	case OMP_CLAUSE_SCHEDULE:
1185 	case OMP_CLAUSE_DIST_SCHEDULE:
1186 	case OMP_CLAUSE_DEPEND:
1187 	case OMP_CLAUSE_PRIORITY:
1188 	case OMP_CLAUSE_GRAINSIZE:
1189 	case OMP_CLAUSE_NUM_TASKS:
1190 	case OMP_CLAUSE_NUM_GANGS:
1191 	case OMP_CLAUSE_NUM_WORKERS:
1192 	case OMP_CLAUSE_VECTOR_LENGTH:
1193 	  if (ctx->outer)
1194 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1195 	  break;
1196 
1197 	case OMP_CLAUSE_TO:
1198 	case OMP_CLAUSE_FROM:
1199 	case OMP_CLAUSE_MAP:
1200 	  if (ctx->outer)
1201 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1202 	  decl = OMP_CLAUSE_DECL (c);
1203 	  /* Global variables with "omp declare target" attribute
1204 	     don't need to be copied, the receiver side will use them
1205 	     directly.  However, global variables with "omp declare target link"
1206 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1207 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1208 	      && DECL_P (decl)
1209 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1210 		   && (OMP_CLAUSE_MAP_KIND (c)
1211 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1212 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1213 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1214 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1215 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1216 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1217 	      && varpool_node::get_create (decl)->offloadable
1218 	      && !lookup_attribute ("omp declare target link",
1219 				    DECL_ATTRIBUTES (decl)))
1220 	    break;
1221 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1222 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1223 	    {
1224 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1225 		 not offloaded; there is nothing to map for those.  */
1226 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1227 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1228 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1229 		break;
1230 	    }
1231 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1232 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1233 		  || (OMP_CLAUSE_MAP_KIND (c)
1234 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1235 	    {
1236 	      if (TREE_CODE (decl) == COMPONENT_REF
1237 		  || (TREE_CODE (decl) == INDIRECT_REF
1238 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1239 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1240 			  == REFERENCE_TYPE)))
1241 		break;
1242 	      if (DECL_SIZE (decl)
1243 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 		{
1245 		  tree decl2 = DECL_VALUE_EXPR (decl);
1246 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1247 		  decl2 = TREE_OPERAND (decl2, 0);
1248 		  gcc_assert (DECL_P (decl2));
1249 		  install_var_local (decl2, ctx);
1250 		}
1251 	      install_var_local (decl, ctx);
1252 	      break;
1253 	    }
1254 	  if (DECL_P (decl))
1255 	    {
1256 	      if (DECL_SIZE (decl)
1257 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1258 		{
1259 		  tree decl2 = DECL_VALUE_EXPR (decl);
1260 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1261 		  decl2 = TREE_OPERAND (decl2, 0);
1262 		  gcc_assert (DECL_P (decl2));
1263 		  install_var_field (decl2, true, 3, ctx);
1264 		  install_var_local (decl2, ctx);
1265 		  install_var_local (decl, ctx);
1266 		}
1267 	      else
1268 		{
1269 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1270 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1271 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1272 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1273 		    install_var_field (decl, true, 7, ctx);
1274 		  else
1275 		    install_var_field (decl, true, 3, ctx,
1276 				       base_pointers_restrict);
1277 		  if (is_gimple_omp_offloaded (ctx->stmt)
1278 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1279 		    install_var_local (decl, ctx);
1280 		}
1281 	    }
1282 	  else
1283 	    {
1284 	      tree base = get_base_address (decl);
1285 	      tree nc = OMP_CLAUSE_CHAIN (c);
1286 	      if (DECL_P (base)
1287 		  && nc != NULL_TREE
1288 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1289 		  && OMP_CLAUSE_DECL (nc) == base
1290 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1291 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1292 		{
1293 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1294 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1295 		}
1296 	      else
1297 		{
1298 		  if (ctx->outer)
1299 		    {
1300 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1301 		      decl = OMP_CLAUSE_DECL (c);
1302 		    }
1303 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1304 						  (splay_tree_key) decl));
1305 		  tree field
1306 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1307 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1308 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1309 		  insert_field_into_struct (ctx->record_type, field);
1310 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1311 				     (splay_tree_value) field);
1312 		}
1313 	    }
1314 	  break;
1315 
1316 	case OMP_CLAUSE__GRIDDIM_:
1317 	  if (ctx->outer)
1318 	    {
1319 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1320 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1321 	    }
1322 	  break;
1323 
1324 	case OMP_CLAUSE_NOWAIT:
1325 	case OMP_CLAUSE_ORDERED:
1326 	case OMP_CLAUSE_COLLAPSE:
1327 	case OMP_CLAUSE_UNTIED:
1328 	case OMP_CLAUSE_MERGEABLE:
1329 	case OMP_CLAUSE_PROC_BIND:
1330 	case OMP_CLAUSE_SAFELEN:
1331 	case OMP_CLAUSE_SIMDLEN:
1332 	case OMP_CLAUSE_THREADS:
1333 	case OMP_CLAUSE_SIMD:
1334 	case OMP_CLAUSE_NOGROUP:
1335 	case OMP_CLAUSE_DEFAULTMAP:
1336 	case OMP_CLAUSE_ASYNC:
1337 	case OMP_CLAUSE_WAIT:
1338 	case OMP_CLAUSE_GANG:
1339 	case OMP_CLAUSE_WORKER:
1340 	case OMP_CLAUSE_VECTOR:
1341 	case OMP_CLAUSE_INDEPENDENT:
1342 	case OMP_CLAUSE_AUTO:
1343 	case OMP_CLAUSE_SEQ:
1344 	case OMP_CLAUSE_TILE:
1345 	case OMP_CLAUSE__SIMT_:
1346 	case OMP_CLAUSE_DEFAULT:
1347 	  break;
1348 
1349 	case OMP_CLAUSE_ALIGNED:
1350 	  decl = OMP_CLAUSE_DECL (c);
1351 	  if (is_global_var (decl)
1352 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1353 	    install_var_local (decl, ctx);
1354 	  break;
1355 
1356 	case OMP_CLAUSE__CACHE_:
1357 	default:
1358 	  gcc_unreachable ();
1359 	}
1360     }
1361 
1362   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1363     {
1364       switch (OMP_CLAUSE_CODE (c))
1365 	{
1366 	case OMP_CLAUSE_LASTPRIVATE:
1367 	  /* Let the corresponding firstprivate clause create
1368 	     the variable.  */
1369 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1370 	    scan_array_reductions = true;
1371 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1372 	    break;
1373 	  /* FALLTHRU */
1374 
1375 	case OMP_CLAUSE_FIRSTPRIVATE:
1376 	case OMP_CLAUSE_PRIVATE:
1377 	case OMP_CLAUSE_LINEAR:
1378 	case OMP_CLAUSE_IS_DEVICE_PTR:
1379 	  decl = OMP_CLAUSE_DECL (c);
1380 	  if (is_variable_sized (decl))
1381 	    {
1382 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1383 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1384 		  && is_gimple_omp_offloaded (ctx->stmt))
1385 		{
1386 		  tree decl2 = DECL_VALUE_EXPR (decl);
1387 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1388 		  decl2 = TREE_OPERAND (decl2, 0);
1389 		  gcc_assert (DECL_P (decl2));
1390 		  install_var_local (decl2, ctx);
1391 		  fixup_remapped_decl (decl2, ctx, false);
1392 		}
1393 	      install_var_local (decl, ctx);
1394 	    }
1395 	  fixup_remapped_decl (decl, ctx,
1396 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1397 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1398 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1399 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1400 	    scan_array_reductions = true;
1401 	  break;
1402 
1403 	case OMP_CLAUSE_REDUCTION:
1404 	  decl = OMP_CLAUSE_DECL (c);
1405 	  if (TREE_CODE (decl) != MEM_REF)
1406 	    {
1407 	      if (is_variable_sized (decl))
1408 		install_var_local (decl, ctx);
1409 	      fixup_remapped_decl (decl, ctx, false);
1410 	    }
1411 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1412 	    scan_array_reductions = true;
1413 	  break;
1414 
1415 	case OMP_CLAUSE_SHARED:
1416 	  /* Ignore shared directives in teams construct.  */
1417 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1418 	    break;
1419 	  decl = OMP_CLAUSE_DECL (c);
1420 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1421 	    break;
1422 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1423 	    {
1424 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1425 								 ctx->outer)))
1426 		break;
1427 	      bool by_ref = use_pointer_for_field (decl, ctx);
1428 	      install_var_field (decl, by_ref, 11, ctx);
1429 	      break;
1430 	    }
1431 	  fixup_remapped_decl (decl, ctx, false);
1432 	  break;
1433 
1434 	case OMP_CLAUSE_MAP:
1435 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1436 	    break;
1437 	  decl = OMP_CLAUSE_DECL (c);
1438 	  if (DECL_P (decl)
1439 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1440 		   && (OMP_CLAUSE_MAP_KIND (c)
1441 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1442 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1443 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1444 	      && varpool_node::get_create (decl)->offloadable)
1445 	    break;
1446 	  if (DECL_P (decl))
1447 	    {
1448 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1449 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1450 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1451 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1452 		{
1453 		  tree new_decl = lookup_decl (decl, ctx);
1454 		  TREE_TYPE (new_decl)
1455 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1456 		}
1457 	      else if (DECL_SIZE (decl)
1458 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 		{
1460 		  tree decl2 = DECL_VALUE_EXPR (decl);
1461 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1462 		  decl2 = TREE_OPERAND (decl2, 0);
1463 		  gcc_assert (DECL_P (decl2));
1464 		  fixup_remapped_decl (decl2, ctx, false);
1465 		  fixup_remapped_decl (decl, ctx, true);
1466 		}
1467 	      else
1468 		fixup_remapped_decl (decl, ctx, false);
1469 	    }
1470 	  break;
1471 
1472 	case OMP_CLAUSE_COPYPRIVATE:
1473 	case OMP_CLAUSE_COPYIN:
1474 	case OMP_CLAUSE_DEFAULT:
1475 	case OMP_CLAUSE_IF:
1476 	case OMP_CLAUSE_NUM_THREADS:
1477 	case OMP_CLAUSE_NUM_TEAMS:
1478 	case OMP_CLAUSE_THREAD_LIMIT:
1479 	case OMP_CLAUSE_DEVICE:
1480 	case OMP_CLAUSE_SCHEDULE:
1481 	case OMP_CLAUSE_DIST_SCHEDULE:
1482 	case OMP_CLAUSE_NOWAIT:
1483 	case OMP_CLAUSE_ORDERED:
1484 	case OMP_CLAUSE_COLLAPSE:
1485 	case OMP_CLAUSE_UNTIED:
1486 	case OMP_CLAUSE_FINAL:
1487 	case OMP_CLAUSE_MERGEABLE:
1488 	case OMP_CLAUSE_PROC_BIND:
1489 	case OMP_CLAUSE_SAFELEN:
1490 	case OMP_CLAUSE_SIMDLEN:
1491 	case OMP_CLAUSE_ALIGNED:
1492 	case OMP_CLAUSE_DEPEND:
1493 	case OMP_CLAUSE__LOOPTEMP_:
1494 	case OMP_CLAUSE_TO:
1495 	case OMP_CLAUSE_FROM:
1496 	case OMP_CLAUSE_PRIORITY:
1497 	case OMP_CLAUSE_GRAINSIZE:
1498 	case OMP_CLAUSE_NUM_TASKS:
1499 	case OMP_CLAUSE_THREADS:
1500 	case OMP_CLAUSE_SIMD:
1501 	case OMP_CLAUSE_NOGROUP:
1502 	case OMP_CLAUSE_DEFAULTMAP:
1503 	case OMP_CLAUSE_USE_DEVICE_PTR:
1504 	case OMP_CLAUSE_ASYNC:
1505 	case OMP_CLAUSE_WAIT:
1506 	case OMP_CLAUSE_NUM_GANGS:
1507 	case OMP_CLAUSE_NUM_WORKERS:
1508 	case OMP_CLAUSE_VECTOR_LENGTH:
1509 	case OMP_CLAUSE_GANG:
1510 	case OMP_CLAUSE_WORKER:
1511 	case OMP_CLAUSE_VECTOR:
1512 	case OMP_CLAUSE_INDEPENDENT:
1513 	case OMP_CLAUSE_AUTO:
1514 	case OMP_CLAUSE_SEQ:
1515 	case OMP_CLAUSE_TILE:
1516 	case OMP_CLAUSE__GRIDDIM_:
1517 	case OMP_CLAUSE__SIMT_:
1518 	  break;
1519 
1520 	case OMP_CLAUSE__CACHE_:
1521 	default:
1522 	  gcc_unreachable ();
1523 	}
1524     }
1525 
1526   gcc_checking_assert (!scan_array_reductions
1527 		       || !is_gimple_omp_oacc (ctx->stmt));
1528   if (scan_array_reductions)
1529     {
1530       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1531 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1532 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1533 	  {
1534 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1535 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1536 	  }
1537 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1538 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1539 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1540 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1541 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1542 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1543     }
1544 }
1545 
1546 /* Create a new name for omp child function.  Returns an identifier. */
1547 
1548 static tree
create_omp_child_function_name(bool task_copy)1549 create_omp_child_function_name (bool task_copy)
1550 {
1551   return clone_function_name (current_function_decl,
1552 			      task_copy ? "_omp_cpyfn" : "_omp_fn");
1553 }
1554 
1555 /* Return true if CTX may belong to offloaded code: either if current function
1556    is offloaded, or any enclosing context corresponds to a target region.  */
1557 
1558 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1559 omp_maybe_offloaded_ctx (omp_context *ctx)
1560 {
1561   if (cgraph_node::get (current_function_decl)->offloadable)
1562     return true;
1563   for (; ctx; ctx = ctx->outer)
1564     if (is_gimple_omp_offloaded (ctx->stmt))
1565       return true;
1566   return false;
1567 }
1568 
1569 /* Build a decl for the omp child function.  It'll not contain a body
1570    yet, just the bare decl.  */
1571 
1572 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1573 create_omp_child_function (omp_context *ctx, bool task_copy)
1574 {
1575   tree decl, type, name, t;
1576 
1577   name = create_omp_child_function_name (task_copy);
1578   if (task_copy)
1579     type = build_function_type_list (void_type_node, ptr_type_node,
1580 				     ptr_type_node, NULL_TREE);
1581   else
1582     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1583 
1584   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1585 
1586   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1587 		       || !task_copy);
1588   if (!task_copy)
1589     ctx->cb.dst_fn = decl;
1590   else
1591     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1592 
1593   TREE_STATIC (decl) = 1;
1594   TREE_USED (decl) = 1;
1595   DECL_ARTIFICIAL (decl) = 1;
1596   DECL_IGNORED_P (decl) = 0;
1597   TREE_PUBLIC (decl) = 0;
1598   DECL_UNINLINABLE (decl) = 1;
1599   DECL_EXTERNAL (decl) = 0;
1600   DECL_CONTEXT (decl) = NULL_TREE;
1601   DECL_INITIAL (decl) = make_node (BLOCK);
1602   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1603   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1604   /* Remove omp declare simd attribute from the new attributes.  */
1605   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1606     {
1607       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1608 	a = a2;
1609       a = TREE_CHAIN (a);
1610       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1611 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1612 	  *p = TREE_CHAIN (*p);
1613 	else
1614 	  {
1615 	    tree chain = TREE_CHAIN (*p);
1616 	    *p = copy_node (*p);
1617 	    p = &TREE_CHAIN (*p);
1618 	    *p = chain;
1619 	  }
1620     }
1621   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1622     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1623   DECL_FUNCTION_SPECIFIC_TARGET (decl)
1624     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1625   DECL_FUNCTION_VERSIONED (decl)
1626     = DECL_FUNCTION_VERSIONED (current_function_decl);
1627 
1628   if (omp_maybe_offloaded_ctx (ctx))
1629     {
1630       cgraph_node::get_create (decl)->offloadable = 1;
1631       if (ENABLE_OFFLOADING)
1632 	g->have_offload = true;
1633     }
1634 
1635   if (cgraph_node::get_create (decl)->offloadable
1636       && !lookup_attribute ("omp declare target",
1637                            DECL_ATTRIBUTES (current_function_decl)))
1638     {
1639       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1640 				 ? "omp target entrypoint"
1641 				 : "omp declare target");
1642       DECL_ATTRIBUTES (decl)
1643 	= tree_cons (get_identifier (target_attr),
1644 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1645     }
1646 
1647   t = build_decl (DECL_SOURCE_LOCATION (decl),
1648 		  RESULT_DECL, NULL_TREE, void_type_node);
1649   DECL_ARTIFICIAL (t) = 1;
1650   DECL_IGNORED_P (t) = 1;
1651   DECL_CONTEXT (t) = decl;
1652   DECL_RESULT (decl) = t;
1653 
1654   tree data_name = get_identifier (".omp_data_i");
1655   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1656 		  ptr_type_node);
1657   DECL_ARTIFICIAL (t) = 1;
1658   DECL_NAMELESS (t) = 1;
1659   DECL_ARG_TYPE (t) = ptr_type_node;
1660   DECL_CONTEXT (t) = current_function_decl;
1661   TREE_USED (t) = 1;
1662   TREE_READONLY (t) = 1;
1663   DECL_ARGUMENTS (decl) = t;
1664   if (!task_copy)
1665     ctx->receiver_decl = t;
1666   else
1667     {
1668       t = build_decl (DECL_SOURCE_LOCATION (decl),
1669 		      PARM_DECL, get_identifier (".omp_data_o"),
1670 		      ptr_type_node);
1671       DECL_ARTIFICIAL (t) = 1;
1672       DECL_NAMELESS (t) = 1;
1673       DECL_ARG_TYPE (t) = ptr_type_node;
1674       DECL_CONTEXT (t) = current_function_decl;
1675       TREE_USED (t) = 1;
1676       TREE_ADDRESSABLE (t) = 1;
1677       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1678       DECL_ARGUMENTS (decl) = t;
1679     }
1680 
1681   /* Allocate memory for the function structure.  The call to
1682      allocate_struct_function clobbers CFUN, so we need to restore
1683      it afterward.  */
1684   push_struct_function (decl);
1685   cfun->function_end_locus = gimple_location (ctx->stmt);
1686   init_tree_ssa (cfun);
1687   pop_cfun ();
1688 }
1689 
1690 /* Callback for walk_gimple_seq.  Check if combined parallel
1691    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1692 
1693 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1694 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1695 		       bool *handled_ops_p,
1696 		       struct walk_stmt_info *wi)
1697 {
1698   gimple *stmt = gsi_stmt (*gsi_p);
1699 
1700   *handled_ops_p = true;
1701   switch (gimple_code (stmt))
1702     {
1703     WALK_SUBSTMTS;
1704 
1705     case GIMPLE_OMP_FOR:
1706       if (gimple_omp_for_combined_into_p (stmt)
1707 	  && gimple_omp_for_kind (stmt)
1708 	     == *(const enum gf_mask *) (wi->info))
1709 	{
1710 	  wi->info = stmt;
1711 	  return integer_zero_node;
1712 	}
1713       break;
1714     default:
1715       break;
1716     }
1717   return NULL;
1718 }
1719 
1720 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task.  */
1721 
1722 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)1723 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1724 			      omp_context *outer_ctx)
1725 {
1726   struct walk_stmt_info wi;
1727 
1728   memset (&wi, 0, sizeof (wi));
1729   wi.val_only = true;
1730   wi.info = (void *) &msk;
1731   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1732   if (wi.info != (void *) &msk)
1733     {
1734       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1735       struct omp_for_data fd;
1736       omp_extract_for_data (for_stmt, &fd, NULL);
1737       /* We need two temporaries with fd.loop.v type (istart/iend)
1738 	 and then (fd.collapse - 1) temporaries with the same
1739 	 type for count2 ... countN-1 vars if not constant.  */
1740       size_t count = 2, i;
1741       tree type = fd.iter_type;
1742       if (fd.collapse > 1
1743 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1744 	{
1745 	  count += fd.collapse - 1;
1746 	  /* If there are lastprivate clauses on the inner
1747 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1748 	     of iterations (product of count1 ... countN-1).  */
1749 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1750 			       OMP_CLAUSE_LASTPRIVATE))
1751 	    count++;
1752 	  else if (msk == GF_OMP_FOR_KIND_FOR
1753 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1754 				       OMP_CLAUSE_LASTPRIVATE))
1755 	    count++;
1756 	}
1757       for (i = 0; i < count; i++)
1758 	{
1759 	  tree temp = create_tmp_var (type);
1760 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1761 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1762 	  OMP_CLAUSE_DECL (c) = temp;
1763 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1764 	  gimple_omp_taskreg_set_clauses (stmt, c);
1765 	}
1766     }
1767 }
1768 
1769 /* Scan an OpenMP parallel directive.  */
1770 
1771 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1772 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1773 {
1774   omp_context *ctx;
1775   tree name;
1776   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1777 
1778   /* Ignore parallel directives with empty bodies, unless there
1779      are copyin clauses.  */
1780   if (optimize > 0
1781       && empty_body_p (gimple_omp_body (stmt))
1782       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1783 			  OMP_CLAUSE_COPYIN) == NULL)
1784     {
1785       gsi_replace (gsi, gimple_build_nop (), false);
1786       return;
1787     }
1788 
1789   if (gimple_omp_parallel_combined_p (stmt))
1790     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1791 
1792   ctx = new_omp_context (stmt, outer_ctx);
1793   taskreg_contexts.safe_push (ctx);
1794   if (taskreg_nesting_level > 1)
1795     ctx->is_nested = true;
1796   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1797   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1798   name = create_tmp_var_name (".omp_data_s");
1799   name = build_decl (gimple_location (stmt),
1800 		     TYPE_DECL, name, ctx->record_type);
1801   DECL_ARTIFICIAL (name) = 1;
1802   DECL_NAMELESS (name) = 1;
1803   TYPE_NAME (ctx->record_type) = name;
1804   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1805   if (!gimple_omp_parallel_grid_phony (stmt))
1806     {
1807       create_omp_child_function (ctx, false);
1808       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1809     }
1810 
1811   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1812   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1813 
1814   if (TYPE_FIELDS (ctx->record_type) == NULL)
1815     ctx->record_type = ctx->receiver_decl = NULL;
1816 }
1817 
1818 /* Scan an OpenMP task directive.  */
1819 
1820 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1821 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1822 {
1823   omp_context *ctx;
1824   tree name, t;
1825   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1826 
1827   /* Ignore task directives with empty bodies, unless they have depend
1828      clause.  */
1829   if (optimize > 0
1830       && empty_body_p (gimple_omp_body (stmt))
1831       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1832     {
1833       gsi_replace (gsi, gimple_build_nop (), false);
1834       return;
1835     }
1836 
1837   if (gimple_omp_task_taskloop_p (stmt))
1838     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1839 
1840   ctx = new_omp_context (stmt, outer_ctx);
1841   taskreg_contexts.safe_push (ctx);
1842   if (taskreg_nesting_level > 1)
1843     ctx->is_nested = true;
1844   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1845   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1846   name = create_tmp_var_name (".omp_data_s");
1847   name = build_decl (gimple_location (stmt),
1848 		     TYPE_DECL, name, ctx->record_type);
1849   DECL_ARTIFICIAL (name) = 1;
1850   DECL_NAMELESS (name) = 1;
1851   TYPE_NAME (ctx->record_type) = name;
1852   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1853   create_omp_child_function (ctx, false);
1854   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1855 
1856   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1857 
1858   if (ctx->srecord_type)
1859     {
1860       name = create_tmp_var_name (".omp_data_a");
1861       name = build_decl (gimple_location (stmt),
1862 			 TYPE_DECL, name, ctx->srecord_type);
1863       DECL_ARTIFICIAL (name) = 1;
1864       DECL_NAMELESS (name) = 1;
1865       TYPE_NAME (ctx->srecord_type) = name;
1866       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1867       create_omp_child_function (ctx, true);
1868     }
1869 
1870   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1871 
1872   if (TYPE_FIELDS (ctx->record_type) == NULL)
1873     {
1874       ctx->record_type = ctx->receiver_decl = NULL;
1875       t = build_int_cst (long_integer_type_node, 0);
1876       gimple_omp_task_set_arg_size (stmt, t);
1877       t = build_int_cst (long_integer_type_node, 1);
1878       gimple_omp_task_set_arg_align (stmt, t);
1879     }
1880 }
1881 
1882 /* Helper function for finish_taskreg_scan, called through walk_tree.
1883    If maybe_lookup_decl_in_outer_context returns non-NULL for some
1884    tree, replace it in the expression.  */
1885 
1886 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)1887 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1888 {
1889   if (VAR_P (*tp))
1890     {
1891       omp_context *ctx = (omp_context *) data;
1892       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1893       if (t != *tp)
1894 	{
1895 	  if (DECL_HAS_VALUE_EXPR_P (t))
1896 	    t = unshare_expr (DECL_VALUE_EXPR (t));
1897 	  *tp = t;
1898 	}
1899       *walk_subtrees = 0;
1900     }
1901   else if (IS_TYPE_OR_DECL_P (*tp))
1902     *walk_subtrees = 0;
1903   return NULL_TREE;
1904 }
1905 
1906 /* If any decls have been made addressable during scan_omp,
1907    adjust their fields if needed, and layout record types
1908    of parallel/task constructs.  */
1909 
1910 static void
finish_taskreg_scan(omp_context * ctx)1911 finish_taskreg_scan (omp_context *ctx)
1912 {
1913   if (ctx->record_type == NULL_TREE)
1914     return;
1915 
1916   /* If any task_shared_vars were needed, verify all
1917      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1918      statements if use_pointer_for_field hasn't changed
1919      because of that.  If it did, update field types now.  */
1920   if (task_shared_vars)
1921     {
1922       tree c;
1923 
1924       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1925 	   c; c = OMP_CLAUSE_CHAIN (c))
1926 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1927 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1928 	  {
1929 	    tree decl = OMP_CLAUSE_DECL (c);
1930 
1931 	    /* Global variables don't need to be copied,
1932 	       the receiver side will use them directly.  */
1933 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1934 	      continue;
1935 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1936 		|| !use_pointer_for_field (decl, ctx))
1937 	      continue;
1938 	    tree field = lookup_field (decl, ctx);
1939 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1940 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1941 	      continue;
1942 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1943 	    TREE_THIS_VOLATILE (field) = 0;
1944 	    DECL_USER_ALIGN (field) = 0;
1945 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1946 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1947 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1948 	    if (ctx->srecord_type)
1949 	      {
1950 		tree sfield = lookup_sfield (decl, ctx);
1951 		TREE_TYPE (sfield) = TREE_TYPE (field);
1952 		TREE_THIS_VOLATILE (sfield) = 0;
1953 		DECL_USER_ALIGN (sfield) = 0;
1954 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1955 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1956 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1957 	      }
1958 	  }
1959     }
1960 
1961   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1962     {
1963       layout_type (ctx->record_type);
1964       fixup_child_record_type (ctx);
1965     }
1966   else
1967     {
1968       location_t loc = gimple_location (ctx->stmt);
1969       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1970       /* Move VLA fields to the end.  */
1971       p = &TYPE_FIELDS (ctx->record_type);
1972       while (*p)
1973 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1974 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1975 	  {
1976 	    *q = *p;
1977 	    *p = TREE_CHAIN (*p);
1978 	    TREE_CHAIN (*q) = NULL_TREE;
1979 	    q = &TREE_CHAIN (*q);
1980 	  }
1981 	else
1982 	  p = &DECL_CHAIN (*p);
1983       *p = vla_fields;
1984       if (gimple_omp_task_taskloop_p (ctx->stmt))
1985 	{
1986 	  /* Move fields corresponding to first and second _looptemp_
1987 	     clause first.  There are filled by GOMP_taskloop
1988 	     and thus need to be in specific positions.  */
1989 	  tree c1 = gimple_omp_task_clauses (ctx->stmt);
1990 	  c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1991 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1992 				     OMP_CLAUSE__LOOPTEMP_);
1993 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1994 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1995 	  p = &TYPE_FIELDS (ctx->record_type);
1996 	  while (*p)
1997 	    if (*p == f1 || *p == f2)
1998 	      *p = DECL_CHAIN (*p);
1999 	    else
2000 	      p = &DECL_CHAIN (*p);
2001 	  DECL_CHAIN (f1) = f2;
2002 	  DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2003 	  TYPE_FIELDS (ctx->record_type) = f1;
2004 	  if (ctx->srecord_type)
2005 	    {
2006 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2007 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2008 	      p = &TYPE_FIELDS (ctx->srecord_type);
2009 	      while (*p)
2010 		if (*p == f1 || *p == f2)
2011 		  *p = DECL_CHAIN (*p);
2012 		else
2013 		  p = &DECL_CHAIN (*p);
2014 	      DECL_CHAIN (f1) = f2;
2015 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2016 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2017 	    }
2018 	}
2019       layout_type (ctx->record_type);
2020       fixup_child_record_type (ctx);
2021       if (ctx->srecord_type)
2022 	layout_type (ctx->srecord_type);
2023       tree t = fold_convert_loc (loc, long_integer_type_node,
2024 				 TYPE_SIZE_UNIT (ctx->record_type));
2025       if (TREE_CODE (t) != INTEGER_CST)
2026 	{
2027 	  t = unshare_expr (t);
2028 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2029 	}
2030       gimple_omp_task_set_arg_size (ctx->stmt, t);
2031       t = build_int_cst (long_integer_type_node,
2032 			 TYPE_ALIGN_UNIT (ctx->record_type));
2033       gimple_omp_task_set_arg_align (ctx->stmt, t);
2034     }
2035 }
2036 
2037 /* Find the enclosing offload context.  */
2038 
2039 static omp_context *
enclosing_target_ctx(omp_context * ctx)2040 enclosing_target_ctx (omp_context *ctx)
2041 {
2042   for (; ctx; ctx = ctx->outer)
2043     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2044       break;
2045 
2046   return ctx;
2047 }
2048 
2049 /* Return true if ctx is part of an oacc kernels region.  */
2050 
2051 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2052 ctx_in_oacc_kernels_region (omp_context *ctx)
2053 {
2054   for (;ctx != NULL; ctx = ctx->outer)
2055     {
2056       gimple *stmt = ctx->stmt;
2057       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2058 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2059 	return true;
2060     }
2061 
2062   return false;
2063 }
2064 
2065 /* Check the parallelism clauses inside a kernels regions.
2066    Until kernels handling moves to use the same loop indirection
2067    scheme as parallel, we need to do this checking early.  */
2068 
2069 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2070 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2071 {
2072   bool checking = true;
2073   unsigned outer_mask = 0;
2074   unsigned this_mask = 0;
2075   bool has_seq = false, has_auto = false;
2076 
2077   if (ctx->outer)
2078     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2079   if (!stmt)
2080     {
2081       checking = false;
2082       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2083 	return outer_mask;
2084       stmt = as_a <gomp_for *> (ctx->stmt);
2085     }
2086 
2087   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2088     {
2089       switch (OMP_CLAUSE_CODE (c))
2090 	{
2091 	case OMP_CLAUSE_GANG:
2092 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2093 	  break;
2094 	case OMP_CLAUSE_WORKER:
2095 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2096 	  break;
2097 	case OMP_CLAUSE_VECTOR:
2098 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2099 	  break;
2100 	case OMP_CLAUSE_SEQ:
2101 	  has_seq = true;
2102 	  break;
2103 	case OMP_CLAUSE_AUTO:
2104 	  has_auto = true;
2105 	  break;
2106 	default:
2107 	  break;
2108 	}
2109     }
2110 
2111   if (checking)
2112     {
2113       if (has_seq && (this_mask || has_auto))
2114 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2115 		  " OpenACC loop specifiers");
2116       else if (has_auto && this_mask)
2117 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2118 		  " OpenACC loop specifiers");
2119 
2120       if (this_mask & outer_mask)
2121 	error_at (gimple_location (stmt), "inner loop uses same"
2122 		  " OpenACC parallelism as containing loop");
2123     }
2124 
2125   return outer_mask | this_mask;
2126 }
2127 
2128 /* Scan a GIMPLE_OMP_FOR.  */
2129 
2130 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2131 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2132 {
2133   omp_context *ctx;
2134   size_t i;
2135   tree clauses = gimple_omp_for_clauses (stmt);
2136 
2137   ctx = new_omp_context (stmt, outer_ctx);
2138 
2139   if (is_gimple_omp_oacc (stmt))
2140     {
2141       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2142 
2143       if (!tgt || is_oacc_parallel (tgt))
2144 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2145 	  {
2146 	    char const *check = NULL;
2147 
2148 	    switch (OMP_CLAUSE_CODE (c))
2149 	      {
2150 	      case OMP_CLAUSE_GANG:
2151 		check = "gang";
2152 		break;
2153 
2154 	      case OMP_CLAUSE_WORKER:
2155 		check = "worker";
2156 		break;
2157 
2158 	      case OMP_CLAUSE_VECTOR:
2159 		check = "vector";
2160 		break;
2161 
2162 	      default:
2163 		break;
2164 	      }
2165 
2166 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2167 	      error_at (gimple_location (stmt),
2168 			"argument not permitted on %qs clause in"
2169 			" OpenACC %<parallel%>", check);
2170 	  }
2171 
2172       if (tgt && is_oacc_kernels (tgt))
2173 	{
2174 	  /* Strip out reductions, as they are not  handled yet.  */
2175 	  tree *prev_ptr = &clauses;
2176 
2177 	  while (tree probe = *prev_ptr)
2178 	    {
2179 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2180 
2181 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2182 		*prev_ptr = *next_ptr;
2183 	      else
2184 		prev_ptr = next_ptr;
2185 	    }
2186 
2187 	  gimple_omp_for_set_clauses (stmt, clauses);
2188 	  check_oacc_kernel_gwv (stmt, ctx);
2189 	}
2190     }
2191 
2192   scan_sharing_clauses (clauses, ctx);
2193 
2194   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2195   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2196     {
2197       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2198       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2199       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2200       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2201     }
2202   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2203   return ctx;
2204 }
2205 
2206 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2207 
2208 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2209 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2210 	       omp_context *outer_ctx)
2211 {
2212   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2213   gsi_replace (gsi, bind, false);
2214   gimple_seq seq = NULL;
2215   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2216   tree cond = create_tmp_var_raw (integer_type_node);
2217   DECL_CONTEXT (cond) = current_function_decl;
2218   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2219   gimple_bind_set_vars (bind, cond);
2220   gimple_call_set_lhs (g, cond);
2221   gimple_seq_add_stmt (&seq, g);
2222   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2223   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2224   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2225   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2226   gimple_seq_add_stmt (&seq, g);
2227   g = gimple_build_label (lab1);
2228   gimple_seq_add_stmt (&seq, g);
2229   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2230   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2231   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2232   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2233   gimple_omp_for_set_clauses (new_stmt, clause);
2234   gimple_seq_add_stmt (&seq, new_stmt);
2235   g = gimple_build_goto (lab3);
2236   gimple_seq_add_stmt (&seq, g);
2237   g = gimple_build_label (lab2);
2238   gimple_seq_add_stmt (&seq, g);
2239   gimple_seq_add_stmt (&seq, stmt);
2240   g = gimple_build_label (lab3);
2241   gimple_seq_add_stmt (&seq, g);
2242   gimple_bind_set_body (bind, seq);
2243   update_stmt (bind);
2244   scan_omp_for (new_stmt, outer_ctx);
2245   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2246 }
2247 
2248 /* Scan an OpenMP sections directive.  */
2249 
2250 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2251 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2252 {
2253   omp_context *ctx;
2254 
2255   ctx = new_omp_context (stmt, outer_ctx);
2256   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2257   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2258 }
2259 
2260 /* Scan an OpenMP single directive.  */
2261 
2262 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2263 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2264 {
2265   omp_context *ctx;
2266   tree name;
2267 
2268   ctx = new_omp_context (stmt, outer_ctx);
2269   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2270   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2271   name = create_tmp_var_name (".omp_copy_s");
2272   name = build_decl (gimple_location (stmt),
2273 		     TYPE_DECL, name, ctx->record_type);
2274   TYPE_NAME (ctx->record_type) = name;
2275 
2276   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2277   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2278 
2279   if (TYPE_FIELDS (ctx->record_type) == NULL)
2280     ctx->record_type = NULL;
2281   else
2282     layout_type (ctx->record_type);
2283 }
2284 
2285 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2286    used in the corresponding offloaded function are restrict.  */
2287 
2288 static bool
omp_target_base_pointers_restrict_p(tree clauses)2289 omp_target_base_pointers_restrict_p (tree clauses)
2290 {
2291   /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2292      used by OpenACC.  */
2293   if (flag_openacc == 0)
2294     return false;
2295 
2296   /* I.  Basic example:
2297 
2298        void foo (void)
2299        {
2300 	 unsigned int a[2], b[2];
2301 
2302 	 #pragma acc kernels \
2303 	   copyout (a) \
2304 	   copyout (b)
2305 	 {
2306 	   a[0] = 0;
2307 	   b[0] = 1;
2308 	 }
2309        }
2310 
2311      After gimplification, we have:
2312 
2313        #pragma omp target oacc_kernels \
2314 	 map(force_from:a [len: 8]) \
2315 	 map(force_from:b [len: 8])
2316        {
2317 	 a[0] = 0;
2318 	 b[0] = 1;
2319        }
2320 
2321      Because both mappings have the force prefix, we know that they will be
2322      allocated when calling the corresponding offloaded function, which means we
2323      can mark the base pointers for a and b in the offloaded function as
2324      restrict.  */
2325 
2326   tree c;
2327   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2328     {
2329       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2330 	return false;
2331 
2332       switch (OMP_CLAUSE_MAP_KIND (c))
2333 	{
2334 	case GOMP_MAP_FORCE_ALLOC:
2335 	case GOMP_MAP_FORCE_TO:
2336 	case GOMP_MAP_FORCE_FROM:
2337 	case GOMP_MAP_FORCE_TOFROM:
2338 	  break;
2339 	default:
2340 	  return false;
2341 	}
2342     }
2343 
2344   return true;
2345 }
2346 
2347 /* Scan a GIMPLE_OMP_TARGET.  */
2348 
2349 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)2350 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2351 {
2352   omp_context *ctx;
2353   tree name;
2354   bool offloaded = is_gimple_omp_offloaded (stmt);
2355   tree clauses = gimple_omp_target_clauses (stmt);
2356 
2357   ctx = new_omp_context (stmt, outer_ctx);
2358   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2359   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2360   name = create_tmp_var_name (".omp_data_t");
2361   name = build_decl (gimple_location (stmt),
2362 		     TYPE_DECL, name, ctx->record_type);
2363   DECL_ARTIFICIAL (name) = 1;
2364   DECL_NAMELESS (name) = 1;
2365   TYPE_NAME (ctx->record_type) = name;
2366   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2367 
2368   bool base_pointers_restrict = false;
2369   if (offloaded)
2370     {
2371       create_omp_child_function (ctx, false);
2372       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2373 
2374       base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2375       if (base_pointers_restrict
2376 	  && dump_file && (dump_flags & TDF_DETAILS))
2377 	fprintf (dump_file,
2378 		 "Base pointers in offloaded function are restrict\n");
2379     }
2380 
2381   scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2382   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2383 
2384   if (TYPE_FIELDS (ctx->record_type) == NULL)
2385     ctx->record_type = ctx->receiver_decl = NULL;
2386   else
2387     {
2388       TYPE_FIELDS (ctx->record_type)
2389 	= nreverse (TYPE_FIELDS (ctx->record_type));
2390       if (flag_checking)
2391 	{
2392 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2393 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2394 	       field;
2395 	       field = DECL_CHAIN (field))
2396 	    gcc_assert (DECL_ALIGN (field) == align);
2397 	}
2398       layout_type (ctx->record_type);
2399       if (offloaded)
2400 	fixup_child_record_type (ctx);
2401     }
2402 }
2403 
2404 /* Scan an OpenMP teams directive.  */
2405 
2406 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)2407 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2408 {
2409   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2410   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2411   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2412 }
2413 
2414 /* Check nesting restrictions.  */
2415 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)2416 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2417 {
2418   tree c;
2419 
2420   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2421     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2422        the original copy of its contents.  */
2423     return true;
2424 
2425   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2426      inside an OpenACC CTX.  */
2427   if (!(is_gimple_omp (stmt)
2428 	&& is_gimple_omp_oacc (stmt))
2429       /* Except for atomic codes that we share with OpenMP.  */
2430       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2431 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2432     {
2433       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2434 	{
2435 	  error_at (gimple_location (stmt),
2436 		    "non-OpenACC construct inside of OpenACC routine");
2437 	  return false;
2438 	}
2439       else
2440 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2441 	  if (is_gimple_omp (octx->stmt)
2442 	      && is_gimple_omp_oacc (octx->stmt))
2443 	    {
2444 	      error_at (gimple_location (stmt),
2445 			"non-OpenACC construct inside of OpenACC region");
2446 	      return false;
2447 	    }
2448     }
2449 
2450   if (ctx != NULL)
2451     {
2452       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2453 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2454 	{
2455 	  c = NULL_TREE;
2456 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2457 	    {
2458 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2459 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2460 		{
2461 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2462 		      && (ctx->outer == NULL
2463 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2464 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2465 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2466 			      != GF_OMP_FOR_KIND_FOR)
2467 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2468 		    {
2469 		      error_at (gimple_location (stmt),
2470 				"%<ordered simd threads%> must be closely "
2471 				"nested inside of %<for simd%> region");
2472 		      return false;
2473 		    }
2474 		  return true;
2475 		}
2476 	    }
2477 	  error_at (gimple_location (stmt),
2478 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2479 		    " may not be nested inside %<simd%> region");
2480 	  return false;
2481 	}
2482       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2483 	{
2484 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2485 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2486 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2487 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2488 	    {
2489 	      error_at (gimple_location (stmt),
2490 			"only %<distribute%> or %<parallel%> regions are "
2491 			"allowed to be strictly nested inside %<teams%> "
2492 			"region");
2493 	      return false;
2494 	    }
2495 	}
2496     }
2497   switch (gimple_code (stmt))
2498     {
2499     case GIMPLE_OMP_FOR:
2500       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2501 	return true;
2502       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2503 	{
2504 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2505 	    {
2506 	      error_at (gimple_location (stmt),
2507 			"%<distribute%> region must be strictly nested "
2508 			"inside %<teams%> construct");
2509 	      return false;
2510 	    }
2511 	  return true;
2512 	}
2513       /* We split taskloop into task and nested taskloop in it.  */
2514       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2515 	return true;
2516       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2517 	{
2518 	  bool ok = false;
2519 
2520 	  if (ctx)
2521 	    switch (gimple_code (ctx->stmt))
2522 	      {
2523 	      case GIMPLE_OMP_FOR:
2524 		ok = (gimple_omp_for_kind (ctx->stmt)
2525 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2526 		break;
2527 
2528 	      case GIMPLE_OMP_TARGET:
2529 		switch (gimple_omp_target_kind (ctx->stmt))
2530 		  {
2531 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2532 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2533 		    ok = true;
2534 		    break;
2535 
2536 		  default:
2537 		    break;
2538 		  }
2539 
2540 	      default:
2541 		break;
2542 	      }
2543 	  else if (oacc_get_fn_attrib (current_function_decl))
2544 	    ok = true;
2545 	  if (!ok)
2546 	    {
2547 	      error_at (gimple_location (stmt),
2548 			"OpenACC loop directive must be associated with"
2549 			" an OpenACC compute region");
2550 	      return false;
2551 	    }
2552 	}
2553       /* FALLTHRU */
2554     case GIMPLE_CALL:
2555       if (is_gimple_call (stmt)
2556 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2557 	      == BUILT_IN_GOMP_CANCEL
2558 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2560 	{
2561 	  const char *bad = NULL;
2562 	  const char *kind = NULL;
2563 	  const char *construct
2564 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2565 	       == BUILT_IN_GOMP_CANCEL)
2566 	      ? "#pragma omp cancel"
2567 	      : "#pragma omp cancellation point";
2568 	  if (ctx == NULL)
2569 	    {
2570 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2571 			construct);
2572 	      return false;
2573 	    }
2574 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2575 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2576 		  : 0)
2577 	    {
2578 	    case 1:
2579 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2580 		bad = "#pragma omp parallel";
2581 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 		       == BUILT_IN_GOMP_CANCEL
2583 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 		ctx->cancellable = true;
2585 	      kind = "parallel";
2586 	      break;
2587 	    case 2:
2588 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2589 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2590 		bad = "#pragma omp for";
2591 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2592 		       == BUILT_IN_GOMP_CANCEL
2593 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2594 		{
2595 		  ctx->cancellable = true;
2596 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2597 				       OMP_CLAUSE_NOWAIT))
2598 		    warning_at (gimple_location (stmt), 0,
2599 				"%<#pragma omp cancel for%> inside "
2600 				"%<nowait%> for construct");
2601 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 				       OMP_CLAUSE_ORDERED))
2603 		    warning_at (gimple_location (stmt), 0,
2604 				"%<#pragma omp cancel for%> inside "
2605 				"%<ordered%> for construct");
2606 		}
2607 	      kind = "for";
2608 	      break;
2609 	    case 4:
2610 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2611 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2612 		bad = "#pragma omp sections";
2613 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2614 		       == BUILT_IN_GOMP_CANCEL
2615 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2616 		{
2617 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2618 		    {
2619 		      ctx->cancellable = true;
2620 		      if (omp_find_clause (gimple_omp_sections_clauses
2621 								(ctx->stmt),
2622 					   OMP_CLAUSE_NOWAIT))
2623 			warning_at (gimple_location (stmt), 0,
2624 				    "%<#pragma omp cancel sections%> inside "
2625 				    "%<nowait%> sections construct");
2626 		    }
2627 		  else
2628 		    {
2629 		      gcc_assert (ctx->outer
2630 				  && gimple_code (ctx->outer->stmt)
2631 				     == GIMPLE_OMP_SECTIONS);
2632 		      ctx->outer->cancellable = true;
2633 		      if (omp_find_clause (gimple_omp_sections_clauses
2634 							(ctx->outer->stmt),
2635 					   OMP_CLAUSE_NOWAIT))
2636 			warning_at (gimple_location (stmt), 0,
2637 				    "%<#pragma omp cancel sections%> inside "
2638 				    "%<nowait%> sections construct");
2639 		    }
2640 		}
2641 	      kind = "sections";
2642 	      break;
2643 	    case 8:
2644 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2645 		bad = "#pragma omp task";
2646 	      else
2647 		{
2648 		  for (omp_context *octx = ctx->outer;
2649 		       octx; octx = octx->outer)
2650 		    {
2651 		      switch (gimple_code (octx->stmt))
2652 			{
2653 			case GIMPLE_OMP_TASKGROUP:
2654 			  break;
2655 			case GIMPLE_OMP_TARGET:
2656 			  if (gimple_omp_target_kind (octx->stmt)
2657 			      != GF_OMP_TARGET_KIND_REGION)
2658 			    continue;
2659 			  /* FALLTHRU */
2660 			case GIMPLE_OMP_PARALLEL:
2661 			case GIMPLE_OMP_TEAMS:
2662 			  error_at (gimple_location (stmt),
2663 				    "%<%s taskgroup%> construct not closely "
2664 				    "nested inside of %<taskgroup%> region",
2665 				    construct);
2666 			  return false;
2667 			default:
2668 			  continue;
2669 			}
2670 		      break;
2671 		    }
2672 		  ctx->cancellable = true;
2673 		}
2674 	      kind = "taskgroup";
2675 	      break;
2676 	    default:
2677 	      error_at (gimple_location (stmt), "invalid arguments");
2678 	      return false;
2679 	    }
2680 	  if (bad)
2681 	    {
2682 	      error_at (gimple_location (stmt),
2683 			"%<%s %s%> construct not closely nested inside of %qs",
2684 			construct, kind, bad);
2685 	      return false;
2686 	    }
2687 	}
2688       /* FALLTHRU */
2689     case GIMPLE_OMP_SECTIONS:
2690     case GIMPLE_OMP_SINGLE:
2691       for (; ctx != NULL; ctx = ctx->outer)
2692 	switch (gimple_code (ctx->stmt))
2693 	  {
2694 	  case GIMPLE_OMP_FOR:
2695 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2696 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2697 	      break;
2698 	    /* FALLTHRU */
2699 	  case GIMPLE_OMP_SECTIONS:
2700 	  case GIMPLE_OMP_SINGLE:
2701 	  case GIMPLE_OMP_ORDERED:
2702 	  case GIMPLE_OMP_MASTER:
2703 	  case GIMPLE_OMP_TASK:
2704 	  case GIMPLE_OMP_CRITICAL:
2705 	    if (is_gimple_call (stmt))
2706 	      {
2707 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2708 		    != BUILT_IN_GOMP_BARRIER)
2709 		  return true;
2710 		error_at (gimple_location (stmt),
2711 			  "barrier region may not be closely nested inside "
2712 			  "of work-sharing, %<critical%>, %<ordered%>, "
2713 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2714 			  "region");
2715 		return false;
2716 	      }
2717 	    error_at (gimple_location (stmt),
2718 		      "work-sharing region may not be closely nested inside "
2719 		      "of work-sharing, %<critical%>, %<ordered%>, "
2720 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2721 	    return false;
2722 	  case GIMPLE_OMP_PARALLEL:
2723 	  case GIMPLE_OMP_TEAMS:
2724 	    return true;
2725 	  case GIMPLE_OMP_TARGET:
2726 	    if (gimple_omp_target_kind (ctx->stmt)
2727 		== GF_OMP_TARGET_KIND_REGION)
2728 	      return true;
2729 	    break;
2730 	  default:
2731 	    break;
2732 	  }
2733       break;
2734     case GIMPLE_OMP_MASTER:
2735       for (; ctx != NULL; ctx = ctx->outer)
2736 	switch (gimple_code (ctx->stmt))
2737 	  {
2738 	  case GIMPLE_OMP_FOR:
2739 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2740 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2741 	      break;
2742 	    /* FALLTHRU */
2743 	  case GIMPLE_OMP_SECTIONS:
2744 	  case GIMPLE_OMP_SINGLE:
2745 	  case GIMPLE_OMP_TASK:
2746 	    error_at (gimple_location (stmt),
2747 		      "%<master%> region may not be closely nested inside "
2748 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2749 		      "region");
2750 	    return false;
2751 	  case GIMPLE_OMP_PARALLEL:
2752 	  case GIMPLE_OMP_TEAMS:
2753 	    return true;
2754 	  case GIMPLE_OMP_TARGET:
2755 	    if (gimple_omp_target_kind (ctx->stmt)
2756 		== GF_OMP_TARGET_KIND_REGION)
2757 	      return true;
2758 	    break;
2759 	  default:
2760 	    break;
2761 	  }
2762       break;
2763     case GIMPLE_OMP_TASK:
2764       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2765 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2766 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2767 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2768 	  {
2769 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2770 	    error_at (OMP_CLAUSE_LOCATION (c),
2771 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2772 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2773 	    return false;
2774 	  }
2775       break;
2776     case GIMPLE_OMP_ORDERED:
2777       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2778 	   c; c = OMP_CLAUSE_CHAIN (c))
2779 	{
2780 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2781 	    {
2782 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2783 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2784 	      continue;
2785 	    }
2786 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2787 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2788 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2789 	    {
2790 	      tree oclause;
2791 	      /* Look for containing ordered(N) loop.  */
2792 	      if (ctx == NULL
2793 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2794 		  || (oclause
2795 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2796 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2797 		{
2798 		  error_at (OMP_CLAUSE_LOCATION (c),
2799 			    "%<ordered%> construct with %<depend%> clause "
2800 			    "must be closely nested inside an %<ordered%> "
2801 			    "loop");
2802 		  return false;
2803 		}
2804 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2805 		{
2806 		  error_at (OMP_CLAUSE_LOCATION (c),
2807 			    "%<ordered%> construct with %<depend%> clause "
2808 			    "must be closely nested inside a loop with "
2809 			    "%<ordered%> clause with a parameter");
2810 		  return false;
2811 		}
2812 	    }
2813 	  else
2814 	    {
2815 	      error_at (OMP_CLAUSE_LOCATION (c),
2816 			"invalid depend kind in omp %<ordered%> %<depend%>");
2817 	      return false;
2818 	    }
2819 	}
2820       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2821       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2822 	{
2823 	  /* ordered simd must be closely nested inside of simd region,
2824 	     and simd region must not encounter constructs other than
2825 	     ordered simd, therefore ordered simd may be either orphaned,
2826 	     or ctx->stmt must be simd.  The latter case is handled already
2827 	     earlier.  */
2828 	  if (ctx != NULL)
2829 	    {
2830 	      error_at (gimple_location (stmt),
2831 			"%<ordered%> %<simd%> must be closely nested inside "
2832 			"%<simd%> region");
2833 	      return false;
2834 	    }
2835 	}
2836       for (; ctx != NULL; ctx = ctx->outer)
2837 	switch (gimple_code (ctx->stmt))
2838 	  {
2839 	  case GIMPLE_OMP_CRITICAL:
2840 	  case GIMPLE_OMP_TASK:
2841 	  case GIMPLE_OMP_ORDERED:
2842 	  ordered_in_taskloop:
2843 	    error_at (gimple_location (stmt),
2844 		      "%<ordered%> region may not be closely nested inside "
2845 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2846 		      "%<taskloop%> region");
2847 	    return false;
2848 	  case GIMPLE_OMP_FOR:
2849 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2850 	      goto ordered_in_taskloop;
2851 	    tree o;
2852 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2853 				 OMP_CLAUSE_ORDERED);
2854 	    if (o == NULL)
2855 	      {
2856 		error_at (gimple_location (stmt),
2857 			  "%<ordered%> region must be closely nested inside "
2858 			  "a loop region with an %<ordered%> clause");
2859 		return false;
2860 	      }
2861 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2862 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2863 	      {
2864 		error_at (gimple_location (stmt),
2865 			  "%<ordered%> region without %<depend%> clause may "
2866 			  "not be closely nested inside a loop region with "
2867 			  "an %<ordered%> clause with a parameter");
2868 		return false;
2869 	      }
2870 	    return true;
2871 	  case GIMPLE_OMP_TARGET:
2872 	    if (gimple_omp_target_kind (ctx->stmt)
2873 		!= GF_OMP_TARGET_KIND_REGION)
2874 	      break;
2875 	    /* FALLTHRU */
2876 	  case GIMPLE_OMP_PARALLEL:
2877 	  case GIMPLE_OMP_TEAMS:
2878 	    error_at (gimple_location (stmt),
2879 		      "%<ordered%> region must be closely nested inside "
2880 		      "a loop region with an %<ordered%> clause");
2881 	    return false;
2882 	  default:
2883 	    break;
2884 	  }
2885       break;
2886     case GIMPLE_OMP_CRITICAL:
2887       {
2888 	tree this_stmt_name
2889 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2890 	for (; ctx != NULL; ctx = ctx->outer)
2891 	  if (gomp_critical *other_crit
2892 	        = dyn_cast <gomp_critical *> (ctx->stmt))
2893 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
2894 	      {
2895 		error_at (gimple_location (stmt),
2896 			  "%<critical%> region may not be nested inside "
2897 			   "a %<critical%> region with the same name");
2898 		return false;
2899 	      }
2900       }
2901       break;
2902     case GIMPLE_OMP_TEAMS:
2903       if (ctx == NULL
2904 	  || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2905 	  || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2906 	{
2907 	  error_at (gimple_location (stmt),
2908 		    "%<teams%> construct not closely nested inside of "
2909 		    "%<target%> construct");
2910 	  return false;
2911 	}
2912       break;
2913     case GIMPLE_OMP_TARGET:
2914       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2915 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2916 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2917 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2918 	  {
2919 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2920 	    error_at (OMP_CLAUSE_LOCATION (c),
2921 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2922 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2923 	    return false;
2924 	  }
2925       if (is_gimple_omp_offloaded (stmt)
2926 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
2927 	{
2928 	  error_at (gimple_location (stmt),
2929 		    "OpenACC region inside of OpenACC routine, nested "
2930 		    "parallelism not supported yet");
2931 	  return false;
2932 	}
2933       for (; ctx != NULL; ctx = ctx->outer)
2934 	{
2935 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2936 	    {
2937 	      if (is_gimple_omp (stmt)
2938 		  && is_gimple_omp_oacc (stmt)
2939 		  && is_gimple_omp (ctx->stmt))
2940 		{
2941 		  error_at (gimple_location (stmt),
2942 			    "OpenACC construct inside of non-OpenACC region");
2943 		  return false;
2944 		}
2945 	      continue;
2946 	    }
2947 
2948 	  const char *stmt_name, *ctx_stmt_name;
2949 	  switch (gimple_omp_target_kind (stmt))
2950 	    {
2951 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2952 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2953 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2954 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
2955 	      stmt_name = "target enter data"; break;
2956 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
2957 	      stmt_name = "target exit data"; break;
2958 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2959 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2960 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2961 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2962 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2963 	      stmt_name = "enter/exit data"; break;
2964 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2965 	      break;
2966 	    default: gcc_unreachable ();
2967 	    }
2968 	  switch (gimple_omp_target_kind (ctx->stmt))
2969 	    {
2970 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2971 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2972 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2973 	      ctx_stmt_name = "parallel"; break;
2974 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
2975 	      ctx_stmt_name = "kernels"; break;
2976 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2977 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2978 	      ctx_stmt_name = "host_data"; break;
2979 	    default: gcc_unreachable ();
2980 	    }
2981 
2982 	  /* OpenACC/OpenMP mismatch?  */
2983 	  if (is_gimple_omp_oacc (stmt)
2984 	      != is_gimple_omp_oacc (ctx->stmt))
2985 	    {
2986 	      error_at (gimple_location (stmt),
2987 			"%s %qs construct inside of %s %qs region",
2988 			(is_gimple_omp_oacc (stmt)
2989 			 ? "OpenACC" : "OpenMP"), stmt_name,
2990 			(is_gimple_omp_oacc (ctx->stmt)
2991 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2992 	      return false;
2993 	    }
2994 	  if (is_gimple_omp_offloaded (ctx->stmt))
2995 	    {
2996 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
2997 	      if (is_gimple_omp_oacc (ctx->stmt))
2998 		{
2999 		  error_at (gimple_location (stmt),
3000 			    "%qs construct inside of %qs region",
3001 			    stmt_name, ctx_stmt_name);
3002 		  return false;
3003 		}
3004 	      else
3005 		{
3006 		  warning_at (gimple_location (stmt), 0,
3007 			      "%qs construct inside of %qs region",
3008 			      stmt_name, ctx_stmt_name);
3009 		}
3010 	    }
3011 	}
3012       break;
3013     default:
3014       break;
3015     }
3016   return true;
3017 }
3018 
3019 
3020 /* Helper function scan_omp.
3021 
3022    Callback for walk_tree or operators in walk_gimple_stmt used to
3023    scan for OMP directives in TP.  */
3024 
3025 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3026 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3027 {
3028   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3029   omp_context *ctx = (omp_context *) wi->info;
3030   tree t = *tp;
3031 
3032   switch (TREE_CODE (t))
3033     {
3034     case VAR_DECL:
3035     case PARM_DECL:
3036     case LABEL_DECL:
3037     case RESULT_DECL:
3038       if (ctx)
3039 	{
3040 	  tree repl = remap_decl (t, &ctx->cb);
3041 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3042 	  *tp = repl;
3043 	}
3044       break;
3045 
3046     default:
3047       if (ctx && TYPE_P (t))
3048 	*tp = remap_type (t, &ctx->cb);
3049       else if (!DECL_P (t))
3050 	{
3051 	  *walk_subtrees = 1;
3052 	  if (ctx)
3053 	    {
3054 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3055 	      if (tem != TREE_TYPE (t))
3056 		{
3057 		  if (TREE_CODE (t) == INTEGER_CST)
3058 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3059 		  else
3060 		    TREE_TYPE (t) = tem;
3061 		}
3062 	    }
3063 	}
3064       break;
3065     }
3066 
3067   return NULL_TREE;
3068 }
3069 
3070 /* Return true if FNDECL is a setjmp or a longjmp.  */
3071 
3072 static bool
setjmp_or_longjmp_p(const_tree fndecl)3073 setjmp_or_longjmp_p (const_tree fndecl)
3074 {
3075   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3076       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3077 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3078     return true;
3079 
3080   tree declname = DECL_NAME (fndecl);
3081   if (!declname)
3082     return false;
3083   const char *name = IDENTIFIER_POINTER (declname);
3084   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3085 }
3086 
3087 
3088 /* Helper function for scan_omp.
3089 
3090    Callback for walk_gimple_stmt used to scan for OMP directives in
3091    the current statement in GSI.  */
3092 
3093 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)3094 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3095 		 struct walk_stmt_info *wi)
3096 {
3097   gimple *stmt = gsi_stmt (*gsi);
3098   omp_context *ctx = (omp_context *) wi->info;
3099 
3100   if (gimple_has_location (stmt))
3101     input_location = gimple_location (stmt);
3102 
3103   /* Check the nesting restrictions.  */
3104   bool remove = false;
3105   if (is_gimple_omp (stmt))
3106     remove = !check_omp_nesting_restrictions (stmt, ctx);
3107   else if (is_gimple_call (stmt))
3108     {
3109       tree fndecl = gimple_call_fndecl (stmt);
3110       if (fndecl)
3111 	{
3112 	  if (setjmp_or_longjmp_p (fndecl)
3113 	      && ctx
3114 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3115 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3116 	    {
3117 	      remove = true;
3118 	      error_at (gimple_location (stmt),
3119 			"setjmp/longjmp inside simd construct");
3120 	    }
3121 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3122 	    switch (DECL_FUNCTION_CODE (fndecl))
3123 	      {
3124 	      case BUILT_IN_GOMP_BARRIER:
3125 	      case BUILT_IN_GOMP_CANCEL:
3126 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3127 	      case BUILT_IN_GOMP_TASKYIELD:
3128 	      case BUILT_IN_GOMP_TASKWAIT:
3129 	      case BUILT_IN_GOMP_TASKGROUP_START:
3130 	      case BUILT_IN_GOMP_TASKGROUP_END:
3131 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3132 		break;
3133 	      default:
3134 		break;
3135 	      }
3136 	}
3137     }
3138   if (remove)
3139     {
3140       stmt = gimple_build_nop ();
3141       gsi_replace (gsi, stmt, false);
3142     }
3143 
3144   *handled_ops_p = true;
3145 
3146   switch (gimple_code (stmt))
3147     {
3148     case GIMPLE_OMP_PARALLEL:
3149       taskreg_nesting_level++;
3150       scan_omp_parallel (gsi, ctx);
3151       taskreg_nesting_level--;
3152       break;
3153 
3154     case GIMPLE_OMP_TASK:
3155       taskreg_nesting_level++;
3156       scan_omp_task (gsi, ctx);
3157       taskreg_nesting_level--;
3158       break;
3159 
3160     case GIMPLE_OMP_FOR:
3161       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3162 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3163 	  && omp_maybe_offloaded_ctx (ctx)
3164 	  && omp_max_simt_vf ())
3165 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3166       else
3167 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3168       break;
3169 
3170     case GIMPLE_OMP_SECTIONS:
3171       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3172       break;
3173 
3174     case GIMPLE_OMP_SINGLE:
3175       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3176       break;
3177 
3178     case GIMPLE_OMP_SECTION:
3179     case GIMPLE_OMP_MASTER:
3180     case GIMPLE_OMP_TASKGROUP:
3181     case GIMPLE_OMP_ORDERED:
3182     case GIMPLE_OMP_CRITICAL:
3183     case GIMPLE_OMP_GRID_BODY:
3184       ctx = new_omp_context (stmt, ctx);
3185       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3186       break;
3187 
3188     case GIMPLE_OMP_TARGET:
3189       if (is_gimple_omp_offloaded (stmt))
3190 	{
3191 	  taskreg_nesting_level++;
3192 	  scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3193 	  taskreg_nesting_level--;
3194 	}
3195       else
3196 	scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3197       break;
3198 
3199     case GIMPLE_OMP_TEAMS:
3200       scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3201       break;
3202 
3203     case GIMPLE_BIND:
3204       {
3205 	tree var;
3206 
3207 	*handled_ops_p = false;
3208 	if (ctx)
3209 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3210 	       var ;
3211 	       var = DECL_CHAIN (var))
3212 	    insert_decl_map (&ctx->cb, var, var);
3213       }
3214       break;
3215     default:
3216       *handled_ops_p = false;
3217       break;
3218     }
3219 
3220   return NULL_TREE;
3221 }
3222 
3223 
3224 /* Scan all the statements starting at the current statement.  CTX
3225    contains context information about the OMP directives and
3226    clauses found during the scan.  */
3227 
3228 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)3229 scan_omp (gimple_seq *body_p, omp_context *ctx)
3230 {
3231   location_t saved_location;
3232   struct walk_stmt_info wi;
3233 
3234   memset (&wi, 0, sizeof (wi));
3235   wi.info = ctx;
3236   wi.want_locations = true;
3237 
3238   saved_location = input_location;
3239   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3240   input_location = saved_location;
3241 }
3242 
3243 /* Re-gimplification and code generation routines.  */
3244 
3245 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3246    of BIND if in a method.  */
3247 
3248 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)3249 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3250 {
3251   if (DECL_ARGUMENTS (current_function_decl)
3252       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3253       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3254 	  == POINTER_TYPE))
3255     {
3256       tree vars = gimple_bind_vars (bind);
3257       for (tree *pvar = &vars; *pvar; )
3258 	if (omp_member_access_dummy_var (*pvar))
3259 	  *pvar = DECL_CHAIN (*pvar);
3260 	else
3261 	  pvar = &DECL_CHAIN (*pvar);
3262       gimple_bind_set_vars (bind, vars);
3263     }
3264 }
3265 
3266 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3267    block and its subblocks.  */
3268 
3269 static void
remove_member_access_dummy_vars(tree block)3270 remove_member_access_dummy_vars (tree block)
3271 {
3272   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3273     if (omp_member_access_dummy_var (*pvar))
3274       *pvar = DECL_CHAIN (*pvar);
3275     else
3276       pvar = &DECL_CHAIN (*pvar);
3277 
3278   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3279     remove_member_access_dummy_vars (block);
3280 }
3281 
3282 /* If a context was created for STMT when it was scanned, return it.  */
3283 
3284 static omp_context *
maybe_lookup_ctx(gimple * stmt)3285 maybe_lookup_ctx (gimple *stmt)
3286 {
3287   splay_tree_node n;
3288   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3289   return n ? (omp_context *) n->value : NULL;
3290 }
3291 
3292 
3293 /* Find the mapping for DECL in CTX or the immediately enclosing
3294    context that has a mapping for DECL.
3295 
3296    If CTX is a nested parallel directive, we may have to use the decl
3297    mappings created in CTX's parent context.  Suppose that we have the
3298    following parallel nesting (variable UIDs showed for clarity):
3299 
3300 	iD.1562 = 0;
3301      	#omp parallel shared(iD.1562)		-> outer parallel
3302 	  iD.1562 = iD.1562 + 1;
3303 
3304 	  #omp parallel shared (iD.1562)	-> inner parallel
3305 	     iD.1562 = iD.1562 - 1;
3306 
3307    Each parallel structure will create a distinct .omp_data_s structure
3308    for copying iD.1562 in/out of the directive:
3309 
3310   	outer parallel		.omp_data_s.1.i -> iD.1562
3311 	inner parallel		.omp_data_s.2.i -> iD.1562
3312 
3313    A shared variable mapping will produce a copy-out operation before
3314    the parallel directive and a copy-in operation after it.  So, in
3315    this case we would have:
3316 
3317   	iD.1562 = 0;
3318 	.omp_data_o.1.i = iD.1562;
3319 	#omp parallel shared(iD.1562)		-> outer parallel
3320 	  .omp_data_i.1 = &.omp_data_o.1
3321 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3322 
3323 	  .omp_data_o.2.i = iD.1562;		-> **
3324 	  #omp parallel shared(iD.1562)		-> inner parallel
3325 	    .omp_data_i.2 = &.omp_data_o.2
3326 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3327 
3328 
3329     ** This is a problem.  The symbol iD.1562 cannot be referenced
3330        inside the body of the outer parallel region.  But since we are
3331        emitting this copy operation while expanding the inner parallel
3332        directive, we need to access the CTX structure of the outer
3333        parallel directive to get the correct mapping:
3334 
3335 	  .omp_data_o.2.i = .omp_data_i.1->i
3336 
3337     Since there may be other workshare or parallel directives enclosing
3338     the parallel directive, it may be necessary to walk up the context
3339     parent chain.  This is not a problem in general because nested
3340     parallelism happens only rarely.  */
3341 
3342 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3343 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3344 {
3345   tree t;
3346   omp_context *up;
3347 
3348   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3349     t = maybe_lookup_decl (decl, up);
3350 
3351   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3352 
3353   return t ? t : decl;
3354 }
3355 
3356 
3357 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3358    in outer contexts.  */
3359 
3360 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3361 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3362 {
3363   tree t = NULL;
3364   omp_context *up;
3365 
3366   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3367     t = maybe_lookup_decl (decl, up);
3368 
3369   return t ? t : decl;
3370 }
3371 
3372 
3373 /* Construct the initialization value for reduction operation OP.  */
3374 
3375 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)3376 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3377 {
3378   switch (op)
3379     {
3380     case PLUS_EXPR:
3381     case MINUS_EXPR:
3382     case BIT_IOR_EXPR:
3383     case BIT_XOR_EXPR:
3384     case TRUTH_OR_EXPR:
3385     case TRUTH_ORIF_EXPR:
3386     case TRUTH_XOR_EXPR:
3387     case NE_EXPR:
3388       return build_zero_cst (type);
3389 
3390     case MULT_EXPR:
3391     case TRUTH_AND_EXPR:
3392     case TRUTH_ANDIF_EXPR:
3393     case EQ_EXPR:
3394       return fold_convert_loc (loc, type, integer_one_node);
3395 
3396     case BIT_AND_EXPR:
3397       return fold_convert_loc (loc, type, integer_minus_one_node);
3398 
3399     case MAX_EXPR:
3400       if (SCALAR_FLOAT_TYPE_P (type))
3401 	{
3402 	  REAL_VALUE_TYPE max, min;
3403 	  if (HONOR_INFINITIES (type))
3404 	    {
3405 	      real_inf (&max);
3406 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3407 	    }
3408 	  else
3409 	    real_maxval (&min, 1, TYPE_MODE (type));
3410 	  return build_real (type, min);
3411 	}
3412       else if (POINTER_TYPE_P (type))
3413 	{
3414 	  wide_int min
3415 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3416 	  return wide_int_to_tree (type, min);
3417 	}
3418       else
3419 	{
3420 	  gcc_assert (INTEGRAL_TYPE_P (type));
3421 	  return TYPE_MIN_VALUE (type);
3422 	}
3423 
3424     case MIN_EXPR:
3425       if (SCALAR_FLOAT_TYPE_P (type))
3426 	{
3427 	  REAL_VALUE_TYPE max;
3428 	  if (HONOR_INFINITIES (type))
3429 	    real_inf (&max);
3430 	  else
3431 	    real_maxval (&max, 0, TYPE_MODE (type));
3432 	  return build_real (type, max);
3433 	}
3434       else if (POINTER_TYPE_P (type))
3435 	{
3436 	  wide_int max
3437 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3438 	  return wide_int_to_tree (type, max);
3439 	}
3440       else
3441 	{
3442 	  gcc_assert (INTEGRAL_TYPE_P (type));
3443 	  return TYPE_MAX_VALUE (type);
3444 	}
3445 
3446     default:
3447       gcc_unreachable ();
3448     }
3449 }
3450 
3451 /* Construct the initialization value for reduction CLAUSE.  */
3452 
3453 tree
omp_reduction_init(tree clause,tree type)3454 omp_reduction_init (tree clause, tree type)
3455 {
3456   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3457 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3458 }
3459 
3460 /* Return alignment to be assumed for var in CLAUSE, which should be
3461    OMP_CLAUSE_ALIGNED.  */
3462 
3463 static tree
omp_clause_aligned_alignment(tree clause)3464 omp_clause_aligned_alignment (tree clause)
3465 {
3466   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3467     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3468 
3469   /* Otherwise return implementation defined alignment.  */
3470   unsigned int al = 1;
3471   opt_scalar_mode mode_iter;
3472   auto_vector_sizes sizes;
3473   targetm.vectorize.autovectorize_vector_sizes (&sizes);
3474   poly_uint64 vs = 0;
3475   for (unsigned int i = 0; i < sizes.length (); ++i)
3476     vs = ordered_max (vs, sizes[i]);
3477   static enum mode_class classes[]
3478     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3479   for (int i = 0; i < 4; i += 2)
3480     /* The for loop above dictates that we only walk through scalar classes.  */
3481     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3482       {
3483 	scalar_mode mode = mode_iter.require ();
3484 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3485 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3486 	  continue;
3487 	while (maybe_ne (vs, 0U)
3488 	       && known_lt (GET_MODE_SIZE (vmode), vs)
3489 	       && GET_MODE_2XWIDER_MODE (vmode).exists ())
3490 	  vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3491 
3492 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3493 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3494 	  continue;
3495 	poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3496 				       GET_MODE_SIZE (mode));
3497 	type = build_vector_type (type, nelts);
3498 	if (TYPE_MODE (type) != vmode)
3499 	  continue;
3500 	if (TYPE_ALIGN_UNIT (type) > al)
3501 	  al = TYPE_ALIGN_UNIT (type);
3502       }
3503   return build_int_cst (integer_type_node, al);
3504 }
3505 
3506 
3507 /* This structure is part of the interface between lower_rec_simd_input_clauses
3508    and lower_rec_input_clauses.  */
3509 
3510 struct omplow_simd_context {
omplow_simd_contextomplow_simd_context3511   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3512   tree idx;
3513   tree lane;
3514   vec<tree, va_heap> simt_eargs;
3515   gimple_seq simt_dlist;
3516   poly_uint64_pod max_vf;
3517   bool is_simt;
3518 };
3519 
3520 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3521    privatization.  */
3522 
3523 static bool
lower_rec_simd_input_clauses(tree new_var,omp_context * ctx,omplow_simd_context * sctx,tree & ivar,tree & lvar)3524 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3525 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3526 {
3527   if (known_eq (sctx->max_vf, 0U))
3528     {
3529       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3530       if (maybe_gt (sctx->max_vf, 1U))
3531 	{
3532 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3533 				    OMP_CLAUSE_SAFELEN);
3534 	  if (c)
3535 	    {
3536 	      poly_uint64 safe_len;
3537 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3538 		  || maybe_lt (safe_len, 1U))
3539 		sctx->max_vf = 1;
3540 	      else
3541 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3542 	    }
3543 	}
3544       if (maybe_gt (sctx->max_vf, 1U))
3545 	{
3546 	  sctx->idx = create_tmp_var (unsigned_type_node);
3547 	  sctx->lane = create_tmp_var (unsigned_type_node);
3548 	}
3549     }
3550   if (known_eq (sctx->max_vf, 1U))
3551     return false;
3552 
3553   if (sctx->is_simt)
3554     {
3555       if (is_gimple_reg (new_var))
3556 	{
3557 	  ivar = lvar = new_var;
3558 	  return true;
3559 	}
3560       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3561       ivar = lvar = create_tmp_var (type);
3562       TREE_ADDRESSABLE (ivar) = 1;
3563       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3564 					  NULL, DECL_ATTRIBUTES (ivar));
3565       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3566       tree clobber = build_constructor (type, NULL);
3567       TREE_THIS_VOLATILE (clobber) = 1;
3568       gimple *g = gimple_build_assign (ivar, clobber);
3569       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3570     }
3571   else
3572     {
3573       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3574       tree avar = create_tmp_var_raw (atype);
3575       if (TREE_ADDRESSABLE (new_var))
3576 	TREE_ADDRESSABLE (avar) = 1;
3577       DECL_ATTRIBUTES (avar)
3578 	= tree_cons (get_identifier ("omp simd array"), NULL,
3579 		     DECL_ATTRIBUTES (avar));
3580       gimple_add_tmp_var (avar);
3581       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3582 		     NULL_TREE, NULL_TREE);
3583       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3584 		     NULL_TREE, NULL_TREE);
3585     }
3586   if (DECL_P (new_var))
3587     {
3588       SET_DECL_VALUE_EXPR (new_var, lvar);
3589       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3590     }
3591   return true;
3592 }
3593 
3594 /* Helper function of lower_rec_input_clauses.  For a reference
3595    in simd reduction, add an underlying variable it will reference.  */
3596 
3597 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)3598 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3599 {
3600   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3601   if (TREE_CONSTANT (z))
3602     {
3603       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3604 			      get_name (new_vard));
3605       gimple_add_tmp_var (z);
3606       TREE_ADDRESSABLE (z) = 1;
3607       z = build_fold_addr_expr_loc (loc, z);
3608       gimplify_assign (new_vard, z, ilist);
3609     }
3610 }
3611 
3612 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3613    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3614    private variables.  Initialization statements go in ILIST, while calls
3615    to destructors go in DLIST.  */
3616 
3617 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)3618 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3619 			 omp_context *ctx, struct omp_for_data *fd)
3620 {
3621   tree c, dtor, copyin_seq, x, ptr;
3622   bool copyin_by_ref = false;
3623   bool lastprivate_firstprivate = false;
3624   bool reduction_omp_orig_ref = false;
3625   int pass;
3626   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3627 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3628   omplow_simd_context sctx = omplow_simd_context ();
3629   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3630   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3631   gimple_seq llist[3] = { };
3632 
3633   copyin_seq = NULL;
3634   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3635 
3636   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3637      with data sharing clauses referencing variable sized vars.  That
3638      is unnecessarily hard to support and very unlikely to result in
3639      vectorized code anyway.  */
3640   if (is_simd)
3641     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3642       switch (OMP_CLAUSE_CODE (c))
3643 	{
3644 	case OMP_CLAUSE_LINEAR:
3645 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3646 	    sctx.max_vf = 1;
3647 	  /* FALLTHRU */
3648 	case OMP_CLAUSE_PRIVATE:
3649 	case OMP_CLAUSE_FIRSTPRIVATE:
3650 	case OMP_CLAUSE_LASTPRIVATE:
3651 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3652 	    sctx.max_vf = 1;
3653 	  break;
3654 	case OMP_CLAUSE_REDUCTION:
3655 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3656 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3657 	    sctx.max_vf = 1;
3658 	  break;
3659 	default:
3660 	  continue;
3661 	}
3662 
3663   /* Add a placeholder for simduid.  */
3664   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3665     sctx.simt_eargs.safe_push (NULL_TREE);
3666 
3667   /* Do all the fixed sized types in the first pass, and the variable sized
3668      types in the second pass.  This makes sure that the scalar arguments to
3669      the variable sized types are processed before we use them in the
3670      variable sized operations.  */
3671   for (pass = 0; pass < 2; ++pass)
3672     {
3673       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3674 	{
3675 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3676 	  tree var, new_var;
3677 	  bool by_ref;
3678 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3679 
3680 	  switch (c_kind)
3681 	    {
3682 	    case OMP_CLAUSE_PRIVATE:
3683 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3684 		continue;
3685 	      break;
3686 	    case OMP_CLAUSE_SHARED:
3687 	      /* Ignore shared directives in teams construct.  */
3688 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3689 		continue;
3690 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3691 		{
3692 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3693 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3694 		  continue;
3695 		}
3696 	    case OMP_CLAUSE_FIRSTPRIVATE:
3697 	    case OMP_CLAUSE_COPYIN:
3698 	      break;
3699 	    case OMP_CLAUSE_LINEAR:
3700 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3701 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3702 		lastprivate_firstprivate = true;
3703 	      break;
3704 	    case OMP_CLAUSE_REDUCTION:
3705 	      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3706 		reduction_omp_orig_ref = true;
3707 	      break;
3708 	    case OMP_CLAUSE__LOOPTEMP_:
3709 	      /* Handle _looptemp_ clauses only on parallel/task.  */
3710 	      if (fd)
3711 		continue;
3712 	      break;
3713 	    case OMP_CLAUSE_LASTPRIVATE:
3714 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3715 		{
3716 		  lastprivate_firstprivate = true;
3717 		  if (pass != 0 || is_taskloop_ctx (ctx))
3718 		    continue;
3719 		}
3720 	      /* Even without corresponding firstprivate, if
3721 		 decl is Fortran allocatable, it needs outer var
3722 		 reference.  */
3723 	      else if (pass == 0
3724 		       && lang_hooks.decls.omp_private_outer_ref
3725 							(OMP_CLAUSE_DECL (c)))
3726 		lastprivate_firstprivate = true;
3727 	      break;
3728 	    case OMP_CLAUSE_ALIGNED:
3729 	      if (pass == 0)
3730 		continue;
3731 	      var = OMP_CLAUSE_DECL (c);
3732 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3733 		  && !is_global_var (var))
3734 		{
3735 		  new_var = maybe_lookup_decl (var, ctx);
3736 		  if (new_var == NULL_TREE)
3737 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3738 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3739 		  tree alarg = omp_clause_aligned_alignment (c);
3740 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3741 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3742 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3743 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3744 		  gimplify_and_add (x, ilist);
3745 		}
3746 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3747 		       && is_global_var (var))
3748 		{
3749 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3750 		  new_var = lookup_decl (var, ctx);
3751 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3752 		  t = build_fold_addr_expr_loc (clause_loc, t);
3753 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3754 		  tree alarg = omp_clause_aligned_alignment (c);
3755 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3756 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3757 		  t = fold_convert_loc (clause_loc, ptype, t);
3758 		  x = create_tmp_var (ptype);
3759 		  t = build2 (MODIFY_EXPR, ptype, x, t);
3760 		  gimplify_and_add (t, ilist);
3761 		  t = build_simple_mem_ref_loc (clause_loc, x);
3762 		  SET_DECL_VALUE_EXPR (new_var, t);
3763 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3764 		}
3765 	      continue;
3766 	    default:
3767 	      continue;
3768 	    }
3769 
3770 	  new_var = var = OMP_CLAUSE_DECL (c);
3771 	  if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3772 	    {
3773 	      var = TREE_OPERAND (var, 0);
3774 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3775 		var = TREE_OPERAND (var, 0);
3776 	      if (TREE_CODE (var) == INDIRECT_REF
3777 		  || TREE_CODE (var) == ADDR_EXPR)
3778 		var = TREE_OPERAND (var, 0);
3779 	      if (is_variable_sized (var))
3780 		{
3781 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3782 		  var = DECL_VALUE_EXPR (var);
3783 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3784 		  var = TREE_OPERAND (var, 0);
3785 		  gcc_assert (DECL_P (var));
3786 		}
3787 	      new_var = var;
3788 	    }
3789 	  if (c_kind != OMP_CLAUSE_COPYIN)
3790 	    new_var = lookup_decl (var, ctx);
3791 
3792 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3793 	    {
3794 	      if (pass != 0)
3795 		continue;
3796 	    }
3797 	  /* C/C++ array section reductions.  */
3798 	  else if (c_kind == OMP_CLAUSE_REDUCTION
3799 		   && var != OMP_CLAUSE_DECL (c))
3800 	    {
3801 	      if (pass == 0)
3802 		continue;
3803 
3804 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3805 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3806 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3807 		{
3808 		  tree b = TREE_OPERAND (orig_var, 1);
3809 		  b = maybe_lookup_decl (b, ctx);
3810 		  if (b == NULL)
3811 		    {
3812 		      b = TREE_OPERAND (orig_var, 1);
3813 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3814 		    }
3815 		  if (integer_zerop (bias))
3816 		    bias = b;
3817 		  else
3818 		    {
3819 		      bias = fold_convert_loc (clause_loc,
3820 					       TREE_TYPE (b), bias);
3821 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3822 					      TREE_TYPE (b), b, bias);
3823 		    }
3824 		  orig_var = TREE_OPERAND (orig_var, 0);
3825 		}
3826 	      if (TREE_CODE (orig_var) == INDIRECT_REF
3827 		  || TREE_CODE (orig_var) == ADDR_EXPR)
3828 		orig_var = TREE_OPERAND (orig_var, 0);
3829 	      tree d = OMP_CLAUSE_DECL (c);
3830 	      tree type = TREE_TYPE (d);
3831 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3832 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3833 	      const char *name = get_name (orig_var);
3834 	      if (TREE_CONSTANT (v))
3835 		{
3836 		  x = create_tmp_var_raw (type, name);
3837 		  gimple_add_tmp_var (x);
3838 		  TREE_ADDRESSABLE (x) = 1;
3839 		  x = build_fold_addr_expr_loc (clause_loc, x);
3840 		}
3841 	      else
3842 		{
3843 		  tree atmp
3844 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3845 		  tree t = maybe_lookup_decl (v, ctx);
3846 		  if (t)
3847 		    v = t;
3848 		  else
3849 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3850 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3851 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
3852 				       TREE_TYPE (v), v,
3853 				       build_int_cst (TREE_TYPE (v), 1));
3854 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
3855 				       TREE_TYPE (v), t,
3856 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
3857 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3858 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3859 		}
3860 
3861 	      tree ptype = build_pointer_type (TREE_TYPE (type));
3862 	      x = fold_convert_loc (clause_loc, ptype, x);
3863 	      tree y = create_tmp_var (ptype, name);
3864 	      gimplify_assign (y, x, ilist);
3865 	      x = y;
3866 	      tree yb = y;
3867 
3868 	      if (!integer_zerop (bias))
3869 		{
3870 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3871 					   bias);
3872 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3873 					 x);
3874 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3875 					pointer_sized_int_node, yb, bias);
3876 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3877 		  yb = create_tmp_var (ptype, name);
3878 		  gimplify_assign (yb, x, ilist);
3879 		  x = yb;
3880 		}
3881 
3882 	      d = TREE_OPERAND (d, 0);
3883 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3884 		d = TREE_OPERAND (d, 0);
3885 	      if (TREE_CODE (d) == ADDR_EXPR)
3886 		{
3887 		  if (orig_var != var)
3888 		    {
3889 		      gcc_assert (is_variable_sized (orig_var));
3890 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3891 					    x);
3892 		      gimplify_assign (new_var, x, ilist);
3893 		      tree new_orig_var = lookup_decl (orig_var, ctx);
3894 		      tree t = build_fold_indirect_ref (new_var);
3895 		      DECL_IGNORED_P (new_var) = 0;
3896 		      TREE_THIS_NOTRAP (t);
3897 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
3898 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3899 		    }
3900 		  else
3901 		    {
3902 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3903 				  build_int_cst (ptype, 0));
3904 		      SET_DECL_VALUE_EXPR (new_var, x);
3905 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3906 		    }
3907 		}
3908 	      else
3909 		{
3910 		  gcc_assert (orig_var == var);
3911 		  if (TREE_CODE (d) == INDIRECT_REF)
3912 		    {
3913 		      x = create_tmp_var (ptype, name);
3914 		      TREE_ADDRESSABLE (x) = 1;
3915 		      gimplify_assign (x, yb, ilist);
3916 		      x = build_fold_addr_expr_loc (clause_loc, x);
3917 		    }
3918 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3919 		  gimplify_assign (new_var, x, ilist);
3920 		}
3921 	      tree y1 = create_tmp_var (ptype, NULL);
3922 	      gimplify_assign (y1, y, ilist);
3923 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
3924 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
3925 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
3926 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3927 		{
3928 		  y2 = create_tmp_var (ptype, NULL);
3929 		  gimplify_assign (y2, y, ilist);
3930 		  tree ref = build_outer_var_ref (var, ctx);
3931 		  /* For ref build_outer_var_ref already performs this.  */
3932 		  if (TREE_CODE (d) == INDIRECT_REF)
3933 		    gcc_assert (omp_is_reference (var));
3934 		  else if (TREE_CODE (d) == ADDR_EXPR)
3935 		    ref = build_fold_addr_expr (ref);
3936 		  else if (omp_is_reference (var))
3937 		    ref = build_fold_addr_expr (ref);
3938 		  ref = fold_convert_loc (clause_loc, ptype, ref);
3939 		  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3940 		      && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3941 		    {
3942 		      y3 = create_tmp_var (ptype, NULL);
3943 		      gimplify_assign (y3, unshare_expr (ref), ilist);
3944 		    }
3945 		  if (is_simd)
3946 		    {
3947 		      y4 = create_tmp_var (ptype, NULL);
3948 		      gimplify_assign (y4, ref, dlist);
3949 		    }
3950 		}
3951 	      tree i = create_tmp_var (TREE_TYPE (v), NULL);
3952 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3953 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
3954 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
3955 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
3956 	      if (y2)
3957 		{
3958 		  i2 = create_tmp_var (TREE_TYPE (v), NULL);
3959 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3960 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
3961 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
3962 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3963 		}
3964 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3965 		{
3966 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3967 		  tree decl_placeholder
3968 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3969 		  SET_DECL_VALUE_EXPR (decl_placeholder,
3970 				       build_simple_mem_ref (y1));
3971 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3972 		  SET_DECL_VALUE_EXPR (placeholder,
3973 				       y3 ? build_simple_mem_ref (y3)
3974 				       : error_mark_node);
3975 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3976 		  x = lang_hooks.decls.omp_clause_default_ctor
3977 				(c, build_simple_mem_ref (y1),
3978 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3979 		  if (x)
3980 		    gimplify_and_add (x, ilist);
3981 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3982 		    {
3983 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3984 		      lower_omp (&tseq, ctx);
3985 		      gimple_seq_add_seq (ilist, tseq);
3986 		    }
3987 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3988 		  if (is_simd)
3989 		    {
3990 		      SET_DECL_VALUE_EXPR (decl_placeholder,
3991 					   build_simple_mem_ref (y2));
3992 		      SET_DECL_VALUE_EXPR (placeholder,
3993 					   build_simple_mem_ref (y4));
3994 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3995 		      lower_omp (&tseq, ctx);
3996 		      gimple_seq_add_seq (dlist, tseq);
3997 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3998 		    }
3999 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4000 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4001 		  x = lang_hooks.decls.omp_clause_dtor
4002 					(c, build_simple_mem_ref (y2));
4003 		  if (x)
4004 		    {
4005 		      gimple_seq tseq = NULL;
4006 		      dtor = x;
4007 		      gimplify_stmt (&dtor, &tseq);
4008 		      gimple_seq_add_seq (dlist, tseq);
4009 		    }
4010 		}
4011 	      else
4012 		{
4013 		  x = omp_reduction_init (c, TREE_TYPE (type));
4014 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4015 
4016 		  /* reduction(-:var) sums up the partial results, so it
4017 		     acts identically to reduction(+:var).  */
4018 		  if (code == MINUS_EXPR)
4019 		    code = PLUS_EXPR;
4020 
4021 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4022 		  if (is_simd)
4023 		    {
4024 		      x = build2 (code, TREE_TYPE (type),
4025 				  build_simple_mem_ref (y4),
4026 				  build_simple_mem_ref (y2));
4027 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4028 		    }
4029 		}
4030 	      gimple *g
4031 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4032 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4033 	      gimple_seq_add_stmt (ilist, g);
4034 	      if (y3)
4035 		{
4036 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4037 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4038 		  gimple_seq_add_stmt (ilist, g);
4039 		}
4040 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4041 				       build_int_cst (TREE_TYPE (i), 1));
4042 	      gimple_seq_add_stmt (ilist, g);
4043 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4044 	      gimple_seq_add_stmt (ilist, g);
4045 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4046 	      if (y2)
4047 		{
4048 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4049 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4050 		  gimple_seq_add_stmt (dlist, g);
4051 		  if (y4)
4052 		    {
4053 		      g = gimple_build_assign
4054 					(y4, POINTER_PLUS_EXPR, y4,
4055 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4056 		      gimple_seq_add_stmt (dlist, g);
4057 		    }
4058 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4059 					   build_int_cst (TREE_TYPE (i2), 1));
4060 		  gimple_seq_add_stmt (dlist, g);
4061 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4062 		  gimple_seq_add_stmt (dlist, g);
4063 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4064 		}
4065 	      continue;
4066 	    }
4067 	  else if (is_variable_sized (var))
4068 	    {
4069 	      /* For variable sized types, we need to allocate the
4070 		 actual storage here.  Call alloca and store the
4071 		 result in the pointer decl that we created elsewhere.  */
4072 	      if (pass == 0)
4073 		continue;
4074 
4075 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4076 		{
4077 		  gcall *stmt;
4078 		  tree tmp, atmp;
4079 
4080 		  ptr = DECL_VALUE_EXPR (new_var);
4081 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4082 		  ptr = TREE_OPERAND (ptr, 0);
4083 		  gcc_assert (DECL_P (ptr));
4084 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4085 
4086 		  /* void *tmp = __builtin_alloca */
4087 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4088 		  stmt = gimple_build_call (atmp, 2, x,
4089 					    size_int (DECL_ALIGN (var)));
4090 		  tmp = create_tmp_var_raw (ptr_type_node);
4091 		  gimple_add_tmp_var (tmp);
4092 		  gimple_call_set_lhs (stmt, tmp);
4093 
4094 		  gimple_seq_add_stmt (ilist, stmt);
4095 
4096 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4097 		  gimplify_assign (ptr, x, ilist);
4098 		}
4099 	    }
4100 	  else if (omp_is_reference (var))
4101 	    {
4102 	      /* For references that are being privatized for Fortran,
4103 		 allocate new backing storage for the new pointer
4104 		 variable.  This allows us to avoid changing all the
4105 		 code that expects a pointer to something that expects
4106 		 a direct variable.  */
4107 	      if (pass == 0)
4108 		continue;
4109 
4110 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4111 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4112 		{
4113 		  x = build_receiver_ref (var, false, ctx);
4114 		  x = build_fold_addr_expr_loc (clause_loc, x);
4115 		}
4116 	      else if (TREE_CONSTANT (x))
4117 		{
4118 		  /* For reduction in SIMD loop, defer adding the
4119 		     initialization of the reference, because if we decide
4120 		     to use SIMD array for it, the initilization could cause
4121 		     expansion ICE.  */
4122 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4123 		    x = NULL_TREE;
4124 		  else
4125 		    {
4126 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4127 					      get_name (var));
4128 		      gimple_add_tmp_var (x);
4129 		      TREE_ADDRESSABLE (x) = 1;
4130 		      x = build_fold_addr_expr_loc (clause_loc, x);
4131 		    }
4132 		}
4133 	      else
4134 		{
4135 		  tree atmp
4136 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4137 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4138 		  tree al = size_int (TYPE_ALIGN (rtype));
4139 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4140 		}
4141 
4142 	      if (x)
4143 		{
4144 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4145 		  gimplify_assign (new_var, x, ilist);
4146 		}
4147 
4148 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4149 	    }
4150 	  else if (c_kind == OMP_CLAUSE_REDUCTION
4151 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4152 	    {
4153 	      if (pass == 0)
4154 		continue;
4155 	    }
4156 	  else if (pass != 0)
4157 	    continue;
4158 
4159 	  switch (OMP_CLAUSE_CODE (c))
4160 	    {
4161 	    case OMP_CLAUSE_SHARED:
4162 	      /* Ignore shared directives in teams construct.  */
4163 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4164 		continue;
4165 	      /* Shared global vars are just accessed directly.  */
4166 	      if (is_global_var (new_var))
4167 		break;
4168 	      /* For taskloop firstprivate/lastprivate, represented
4169 		 as firstprivate and shared clause on the task, new_var
4170 		 is the firstprivate var.  */
4171 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4172 		break;
4173 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4174 		 needs to be delayed until after fixup_child_record_type so
4175 		 that we get the correct type during the dereference.  */
4176 	      by_ref = use_pointer_for_field (var, ctx);
4177 	      x = build_receiver_ref (var, by_ref, ctx);
4178 	      SET_DECL_VALUE_EXPR (new_var, x);
4179 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4180 
4181 	      /* ??? If VAR is not passed by reference, and the variable
4182 		 hasn't been initialized yet, then we'll get a warning for
4183 		 the store into the omp_data_s structure.  Ideally, we'd be
4184 		 able to notice this and not store anything at all, but
4185 		 we're generating code too early.  Suppress the warning.  */
4186 	      if (!by_ref)
4187 		TREE_NO_WARNING (var) = 1;
4188 	      break;
4189 
4190 	    case OMP_CLAUSE_LASTPRIVATE:
4191 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4192 		break;
4193 	      /* FALLTHRU */
4194 
4195 	    case OMP_CLAUSE_PRIVATE:
4196 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4197 		x = build_outer_var_ref (var, ctx);
4198 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4199 		{
4200 		  if (is_task_ctx (ctx))
4201 		    x = build_receiver_ref (var, false, ctx);
4202 		  else
4203 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4204 		}
4205 	      else
4206 		x = NULL;
4207 	    do_private:
4208 	      tree nx;
4209 	      nx = lang_hooks.decls.omp_clause_default_ctor
4210 						(c, unshare_expr (new_var), x);
4211 	      if (is_simd)
4212 		{
4213 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4214 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4215 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4216 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4217 						       ivar, lvar))
4218 		    {
4219 		      if (nx)
4220 			x = lang_hooks.decls.omp_clause_default_ctor
4221 						(c, unshare_expr (ivar), x);
4222 		      if (nx && x)
4223 			gimplify_and_add (x, &llist[0]);
4224 		      if (y)
4225 			{
4226 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4227 			  if (y)
4228 			    {
4229 			      gimple_seq tseq = NULL;
4230 
4231 			      dtor = y;
4232 			      gimplify_stmt (&dtor, &tseq);
4233 			      gimple_seq_add_seq (&llist[1], tseq);
4234 			    }
4235 			}
4236 		      break;
4237 		    }
4238 		}
4239 	      if (nx)
4240 		gimplify_and_add (nx, ilist);
4241 	      /* FALLTHRU */
4242 
4243 	    do_dtor:
4244 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4245 	      if (x)
4246 		{
4247 		  gimple_seq tseq = NULL;
4248 
4249 		  dtor = x;
4250 		  gimplify_stmt (&dtor, &tseq);
4251 		  gimple_seq_add_seq (dlist, tseq);
4252 		}
4253 	      break;
4254 
4255 	    case OMP_CLAUSE_LINEAR:
4256 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4257 		goto do_firstprivate;
4258 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4259 		x = NULL;
4260 	      else
4261 		x = build_outer_var_ref (var, ctx);
4262 	      goto do_private;
4263 
4264 	    case OMP_CLAUSE_FIRSTPRIVATE:
4265 	      if (is_task_ctx (ctx))
4266 		{
4267 		  if (omp_is_reference (var) || is_variable_sized (var))
4268 		    goto do_dtor;
4269 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4270 									  ctx))
4271 			   || use_pointer_for_field (var, NULL))
4272 		    {
4273 		      x = build_receiver_ref (var, false, ctx);
4274 		      SET_DECL_VALUE_EXPR (new_var, x);
4275 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4276 		      goto do_dtor;
4277 		    }
4278 		}
4279 	    do_firstprivate:
4280 	      x = build_outer_var_ref (var, ctx);
4281 	      if (is_simd)
4282 		{
4283 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4284 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4285 		    {
4286 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4287 		      tree stept = TREE_TYPE (t);
4288 		      tree ct = omp_find_clause (clauses,
4289 						 OMP_CLAUSE__LOOPTEMP_);
4290 		      gcc_assert (ct);
4291 		      tree l = OMP_CLAUSE_DECL (ct);
4292 		      tree n1 = fd->loop.n1;
4293 		      tree step = fd->loop.step;
4294 		      tree itype = TREE_TYPE (l);
4295 		      if (POINTER_TYPE_P (itype))
4296 			itype = signed_type_for (itype);
4297 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4298 		      if (TYPE_UNSIGNED (itype)
4299 			  && fd->loop.cond_code == GT_EXPR)
4300 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4301 					 fold_build1 (NEGATE_EXPR, itype, l),
4302 					 fold_build1 (NEGATE_EXPR,
4303 						      itype, step));
4304 		      else
4305 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4306 		      t = fold_build2 (MULT_EXPR, stept,
4307 				       fold_convert (stept, l), t);
4308 
4309 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4310 			{
4311 			  x = lang_hooks.decls.omp_clause_linear_ctor
4312 							(c, new_var, x, t);
4313 			  gimplify_and_add (x, ilist);
4314 			  goto do_dtor;
4315 			}
4316 
4317 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4318 			x = fold_build2 (POINTER_PLUS_EXPR,
4319 					 TREE_TYPE (x), x, t);
4320 		      else
4321 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4322 		    }
4323 
4324 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4325 		       || TREE_ADDRESSABLE (new_var))
4326 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4327 						       ivar, lvar))
4328 		    {
4329 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4330 			{
4331 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4332 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4333 			  gimplify_and_add (x, ilist);
4334 			  gimple_stmt_iterator gsi
4335 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4336 			  gassign *g
4337 			    = gimple_build_assign (unshare_expr (lvar), iv);
4338 			  gsi_insert_before_without_update (&gsi, g,
4339 							    GSI_SAME_STMT);
4340 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4341 			  enum tree_code code = PLUS_EXPR;
4342 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4343 			    code = POINTER_PLUS_EXPR;
4344 			  g = gimple_build_assign (iv, code, iv, t);
4345 			  gsi_insert_before_without_update (&gsi, g,
4346 							    GSI_SAME_STMT);
4347 			  break;
4348 			}
4349 		      x = lang_hooks.decls.omp_clause_copy_ctor
4350 						(c, unshare_expr (ivar), x);
4351 		      gimplify_and_add (x, &llist[0]);
4352 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4353 		      if (x)
4354 			{
4355 			  gimple_seq tseq = NULL;
4356 
4357 			  dtor = x;
4358 			  gimplify_stmt (&dtor, &tseq);
4359 			  gimple_seq_add_seq (&llist[1], tseq);
4360 			}
4361 		      break;
4362 		    }
4363 		}
4364 	      x = lang_hooks.decls.omp_clause_copy_ctor
4365 						(c, unshare_expr (new_var), x);
4366 	      gimplify_and_add (x, ilist);
4367 	      goto do_dtor;
4368 
4369 	    case OMP_CLAUSE__LOOPTEMP_:
4370 	      gcc_assert (is_taskreg_ctx (ctx));
4371 	      x = build_outer_var_ref (var, ctx);
4372 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4373 	      gimplify_and_add (x, ilist);
4374 	      break;
4375 
4376 	    case OMP_CLAUSE_COPYIN:
4377 	      by_ref = use_pointer_for_field (var, NULL);
4378 	      x = build_receiver_ref (var, by_ref, ctx);
4379 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4380 	      append_to_statement_list (x, &copyin_seq);
4381 	      copyin_by_ref |= by_ref;
4382 	      break;
4383 
4384 	    case OMP_CLAUSE_REDUCTION:
4385 	      /* OpenACC reductions are initialized using the
4386 		 GOACC_REDUCTION internal function.  */
4387 	      if (is_gimple_omp_oacc (ctx->stmt))
4388 		break;
4389 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4390 		{
4391 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4392 		  gimple *tseq;
4393 		  x = build_outer_var_ref (var, ctx);
4394 
4395 		  if (omp_is_reference (var)
4396 		      && !useless_type_conversion_p (TREE_TYPE (placeholder),
4397 						     TREE_TYPE (x)))
4398 		    x = build_fold_addr_expr_loc (clause_loc, x);
4399 		  SET_DECL_VALUE_EXPR (placeholder, x);
4400 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4401 		  tree new_vard = new_var;
4402 		  if (omp_is_reference (var))
4403 		    {
4404 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4405 		      new_vard = TREE_OPERAND (new_var, 0);
4406 		      gcc_assert (DECL_P (new_vard));
4407 		    }
4408 		  if (is_simd
4409 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4410 						       ivar, lvar))
4411 		    {
4412 		      if (new_vard == new_var)
4413 			{
4414 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4415 			  SET_DECL_VALUE_EXPR (new_var, ivar);
4416 			}
4417 		      else
4418 			{
4419 			  SET_DECL_VALUE_EXPR (new_vard,
4420 					       build_fold_addr_expr (ivar));
4421 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4422 			}
4423 		      x = lang_hooks.decls.omp_clause_default_ctor
4424 				(c, unshare_expr (ivar),
4425 				 build_outer_var_ref (var, ctx));
4426 		      if (x)
4427 			gimplify_and_add (x, &llist[0]);
4428 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4429 			{
4430 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4431 			  lower_omp (&tseq, ctx);
4432 			  gimple_seq_add_seq (&llist[0], tseq);
4433 			}
4434 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4435 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4436 		      lower_omp (&tseq, ctx);
4437 		      gimple_seq_add_seq (&llist[1], tseq);
4438 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4439 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4440 		      if (new_vard == new_var)
4441 			SET_DECL_VALUE_EXPR (new_var, lvar);
4442 		      else
4443 			SET_DECL_VALUE_EXPR (new_vard,
4444 					     build_fold_addr_expr (lvar));
4445 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4446 		      if (x)
4447 			{
4448 			  tseq = NULL;
4449 			  dtor = x;
4450 			  gimplify_stmt (&dtor, &tseq);
4451 			  gimple_seq_add_seq (&llist[1], tseq);
4452 			}
4453 		      break;
4454 		    }
4455 		  /* If this is a reference to constant size reduction var
4456 		     with placeholder, we haven't emitted the initializer
4457 		     for it because it is undesirable if SIMD arrays are used.
4458 		     But if they aren't used, we need to emit the deferred
4459 		     initialization now.  */
4460 		  else if (omp_is_reference (var) && is_simd)
4461 		    handle_simd_reference (clause_loc, new_vard, ilist);
4462 		  x = lang_hooks.decls.omp_clause_default_ctor
4463 				(c, unshare_expr (new_var),
4464 				 build_outer_var_ref (var, ctx));
4465 		  if (x)
4466 		    gimplify_and_add (x, ilist);
4467 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4468 		    {
4469 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4470 		      lower_omp (&tseq, ctx);
4471 		      gimple_seq_add_seq (ilist, tseq);
4472 		    }
4473 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4474 		  if (is_simd)
4475 		    {
4476 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4477 		      lower_omp (&tseq, ctx);
4478 		      gimple_seq_add_seq (dlist, tseq);
4479 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4480 		    }
4481 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4482 		  goto do_dtor;
4483 		}
4484 	      else
4485 		{
4486 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
4487 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4488 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4489 
4490 		  /* reduction(-:var) sums up the partial results, so it
4491 		     acts identically to reduction(+:var).  */
4492 		  if (code == MINUS_EXPR)
4493 		    code = PLUS_EXPR;
4494 
4495 		  tree new_vard = new_var;
4496 		  if (is_simd && omp_is_reference (var))
4497 		    {
4498 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4499 		      new_vard = TREE_OPERAND (new_var, 0);
4500 		      gcc_assert (DECL_P (new_vard));
4501 		    }
4502 		  if (is_simd
4503 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4504 						       ivar, lvar))
4505 		    {
4506 		      tree ref = build_outer_var_ref (var, ctx);
4507 
4508 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4509 
4510 		      if (sctx.is_simt)
4511 			{
4512 			  if (!simt_lane)
4513 			    simt_lane = create_tmp_var (unsigned_type_node);
4514 			  x = build_call_expr_internal_loc
4515 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4516 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
4517 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
4518 			  gimplify_assign (ivar, x, &llist[2]);
4519 			}
4520 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
4521 		      ref = build_outer_var_ref (var, ctx);
4522 		      gimplify_assign (ref, x, &llist[1]);
4523 
4524 		      if (new_vard != new_var)
4525 			{
4526 			  SET_DECL_VALUE_EXPR (new_vard,
4527 					       build_fold_addr_expr (lvar));
4528 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4529 			}
4530 		    }
4531 		  else
4532 		    {
4533 		      if (omp_is_reference (var) && is_simd)
4534 			handle_simd_reference (clause_loc, new_vard, ilist);
4535 		      gimplify_assign (new_var, x, ilist);
4536 		      if (is_simd)
4537 			{
4538 			  tree ref = build_outer_var_ref (var, ctx);
4539 
4540 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
4541 			  ref = build_outer_var_ref (var, ctx);
4542 			  gimplify_assign (ref, x, dlist);
4543 			}
4544 		    }
4545 		}
4546 	      break;
4547 
4548 	    default:
4549 	      gcc_unreachable ();
4550 	    }
4551 	}
4552     }
4553 
4554   if (known_eq (sctx.max_vf, 1U))
4555     sctx.is_simt = false;
4556 
4557   if (sctx.lane || sctx.is_simt)
4558     {
4559       uid = create_tmp_var (ptr_type_node, "simduid");
4560       /* Don't want uninit warnings on simduid, it is always uninitialized,
4561 	 but we use it not for the value, but for the DECL_UID only.  */
4562       TREE_NO_WARNING (uid) = 1;
4563       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4564       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4565       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4566       gimple_omp_for_set_clauses (ctx->stmt, c);
4567     }
4568   /* Emit calls denoting privatized variables and initializing a pointer to
4569      structure that holds private variables as fields after ompdevlow pass.  */
4570   if (sctx.is_simt)
4571     {
4572       sctx.simt_eargs[0] = uid;
4573       gimple *g
4574 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4575       gimple_call_set_lhs (g, uid);
4576       gimple_seq_add_stmt (ilist, g);
4577       sctx.simt_eargs.release ();
4578 
4579       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4580       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4581       gimple_call_set_lhs (g, simtrec);
4582       gimple_seq_add_stmt (ilist, g);
4583     }
4584   if (sctx.lane)
4585     {
4586       gimple *g
4587 	= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4588       gimple_call_set_lhs (g, sctx.lane);
4589       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4590       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4591       g = gimple_build_assign (sctx.lane, INTEGER_CST,
4592 			       build_int_cst (unsigned_type_node, 0));
4593       gimple_seq_add_stmt (ilist, g);
4594       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
4595       if (llist[2])
4596 	{
4597 	  tree simt_vf = create_tmp_var (unsigned_type_node);
4598 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4599 	  gimple_call_set_lhs (g, simt_vf);
4600 	  gimple_seq_add_stmt (dlist, g);
4601 
4602 	  tree t = build_int_cst (unsigned_type_node, 1);
4603 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4604 	  gimple_seq_add_stmt (dlist, g);
4605 
4606 	  t = build_int_cst (unsigned_type_node, 0);
4607 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4608 	  gimple_seq_add_stmt (dlist, g);
4609 
4610 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
4611 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
4612 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
4613 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4614 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
4615 
4616 	  gimple_seq_add_seq (dlist, llist[2]);
4617 
4618 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4619 	  gimple_seq_add_stmt (dlist, g);
4620 
4621 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
4622 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4623 	  gimple_seq_add_stmt (dlist, g);
4624 
4625 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
4626 	}
4627       for (int i = 0; i < 2; i++)
4628 	if (llist[i])
4629 	  {
4630 	    tree vf = create_tmp_var (unsigned_type_node);
4631 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4632 	    gimple_call_set_lhs (g, vf);
4633 	    gimple_seq *seq = i == 0 ? ilist : dlist;
4634 	    gimple_seq_add_stmt (seq, g);
4635 	    tree t = build_int_cst (unsigned_type_node, 0);
4636 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4637 	    gimple_seq_add_stmt (seq, g);
4638 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
4639 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
4640 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
4641 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
4642 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
4643 	    gimple_seq_add_seq (seq, llist[i]);
4644 	    t = build_int_cst (unsigned_type_node, 1);
4645 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4646 	    gimple_seq_add_stmt (seq, g);
4647 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
4648 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4649 	    gimple_seq_add_stmt (seq, g);
4650 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
4651 	  }
4652     }
4653   if (sctx.is_simt)
4654     {
4655       gimple_seq_add_seq (dlist, sctx.simt_dlist);
4656       gimple *g
4657 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4658       gimple_seq_add_stmt (dlist, g);
4659     }
4660 
4661   /* The copyin sequence is not to be executed by the main thread, since
4662      that would result in self-copies.  Perhaps not visible to scalars,
4663      but it certainly is to C++ operator=.  */
4664   if (copyin_seq)
4665     {
4666       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4667 			   0);
4668       x = build2 (NE_EXPR, boolean_type_node, x,
4669 		  build_int_cst (TREE_TYPE (x), 0));
4670       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4671       gimplify_and_add (x, ilist);
4672     }
4673 
4674   /* If any copyin variable is passed by reference, we must ensure the
4675      master thread doesn't modify it before it is copied over in all
4676      threads.  Similarly for variables in both firstprivate and
4677      lastprivate clauses we need to ensure the lastprivate copying
4678      happens after firstprivate copying in all threads.  And similarly
4679      for UDRs if initializer expression refers to omp_orig.  */
4680   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4681     {
4682       /* Don't add any barrier for #pragma omp simd or
4683 	 #pragma omp distribute.  */
4684       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4685 	  || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4686 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4687     }
4688 
4689   /* If max_vf is non-zero, then we can use only a vectorization factor
4690      up to the max_vf we chose.  So stick it into the safelen clause.  */
4691   if (maybe_ne (sctx.max_vf, 0U))
4692     {
4693       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4694 				OMP_CLAUSE_SAFELEN);
4695       poly_uint64 safe_len;
4696       if (c == NULL_TREE
4697 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4698 	      && maybe_gt (safe_len, sctx.max_vf)))
4699 	{
4700 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4701 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4702 						       sctx.max_vf);
4703 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4704 	  gimple_omp_for_set_clauses (ctx->stmt, c);
4705 	}
4706     }
4707 }
4708 
4709 
4710 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
4711    both parallel and workshare constructs.  PREDICATE may be NULL if it's
4712    always true.   */
4713 
4714 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * stmt_list,omp_context * ctx)4715 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4716 			   omp_context *ctx)
4717 {
4718   tree x, c, label = NULL, orig_clauses = clauses;
4719   bool par_clauses = false;
4720   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4721 
4722   /* Early exit if there are no lastprivate or linear clauses.  */
4723   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4724     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4725 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4726 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4727       break;
4728   if (clauses == NULL)
4729     {
4730       /* If this was a workshare clause, see if it had been combined
4731 	 with its parallel.  In that case, look for the clauses on the
4732 	 parallel statement itself.  */
4733       if (is_parallel_ctx (ctx))
4734 	return;
4735 
4736       ctx = ctx->outer;
4737       if (ctx == NULL || !is_parallel_ctx (ctx))
4738 	return;
4739 
4740       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4741 				 OMP_CLAUSE_LASTPRIVATE);
4742       if (clauses == NULL)
4743 	return;
4744       par_clauses = true;
4745     }
4746 
4747   bool maybe_simt = false;
4748   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4749       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4750     {
4751       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4752       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4753       if (simduid)
4754 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4755     }
4756 
4757   if (predicate)
4758     {
4759       gcond *stmt;
4760       tree label_true, arm1, arm2;
4761       enum tree_code pred_code = TREE_CODE (predicate);
4762 
4763       label = create_artificial_label (UNKNOWN_LOCATION);
4764       label_true = create_artificial_label (UNKNOWN_LOCATION);
4765       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4766 	{
4767 	  arm1 = TREE_OPERAND (predicate, 0);
4768 	  arm2 = TREE_OPERAND (predicate, 1);
4769 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4770 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4771 	}
4772       else
4773 	{
4774 	  arm1 = predicate;
4775 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4776 	  arm2 = boolean_false_node;
4777 	  pred_code = NE_EXPR;
4778 	}
4779       if (maybe_simt)
4780 	{
4781 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
4782 	  c = fold_convert (integer_type_node, c);
4783 	  simtcond = create_tmp_var (integer_type_node);
4784 	  gimplify_assign (simtcond, c, stmt_list);
4785 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4786 						 1, simtcond);
4787 	  c = create_tmp_var (integer_type_node);
4788 	  gimple_call_set_lhs (g, c);
4789 	  gimple_seq_add_stmt (stmt_list, g);
4790 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4791 				    label_true, label);
4792 	}
4793       else
4794 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4795       gimple_seq_add_stmt (stmt_list, stmt);
4796       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4797     }
4798 
4799   for (c = clauses; c ;)
4800     {
4801       tree var, new_var;
4802       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4803 
4804       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4805 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4806 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4807 	{
4808 	  var = OMP_CLAUSE_DECL (c);
4809 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4810 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4811 	      && is_taskloop_ctx (ctx))
4812 	    {
4813 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4814 	      new_var = lookup_decl (var, ctx->outer);
4815 	    }
4816 	  else
4817 	    {
4818 	      new_var = lookup_decl (var, ctx);
4819 	      /* Avoid uninitialized warnings for lastprivate and
4820 		 for linear iterators.  */
4821 	      if (predicate
4822 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4823 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4824 		TREE_NO_WARNING (new_var) = 1;
4825 	    }
4826 
4827 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4828 	    {
4829 	      tree val = DECL_VALUE_EXPR (new_var);
4830 	      if (TREE_CODE (val) == ARRAY_REF
4831 		  && VAR_P (TREE_OPERAND (val, 0))
4832 		  && lookup_attribute ("omp simd array",
4833 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
4834 								      0))))
4835 		{
4836 		  if (lastlane == NULL)
4837 		    {
4838 		      lastlane = create_tmp_var (unsigned_type_node);
4839 		      gcall *g
4840 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4841 						      2, simduid,
4842 						      TREE_OPERAND (val, 1));
4843 		      gimple_call_set_lhs (g, lastlane);
4844 		      gimple_seq_add_stmt (stmt_list, g);
4845 		    }
4846 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4847 				    TREE_OPERAND (val, 0), lastlane,
4848 				    NULL_TREE, NULL_TREE);
4849 		}
4850 	    }
4851 	  else if (maybe_simt)
4852 	    {
4853 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4854 			  ? DECL_VALUE_EXPR (new_var)
4855 			  : new_var);
4856 	      if (simtlast == NULL)
4857 		{
4858 		  simtlast = create_tmp_var (unsigned_type_node);
4859 		  gcall *g = gimple_build_call_internal
4860 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4861 		  gimple_call_set_lhs (g, simtlast);
4862 		  gimple_seq_add_stmt (stmt_list, g);
4863 		}
4864 	      x = build_call_expr_internal_loc
4865 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4866 		 TREE_TYPE (val), 2, val, simtlast);
4867 	      new_var = unshare_expr (new_var);
4868 	      gimplify_assign (new_var, x, stmt_list);
4869 	      new_var = unshare_expr (new_var);
4870 	    }
4871 
4872 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4873 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4874 	    {
4875 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4876 	      gimple_seq_add_seq (stmt_list,
4877 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4878 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4879 	    }
4880 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4881 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4882 	    {
4883 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4884 	      gimple_seq_add_seq (stmt_list,
4885 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4886 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4887 	    }
4888 
4889 	  x = NULL_TREE;
4890 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4891 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4892 	    {
4893 	      gcc_checking_assert (is_taskloop_ctx (ctx));
4894 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4895 							  ctx->outer->outer);
4896 	      if (is_global_var (ovar))
4897 		x = ovar;
4898 	    }
4899 	  if (!x)
4900 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4901 	  if (omp_is_reference (var))
4902 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4903 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4904 	  gimplify_and_add (x, stmt_list);
4905 	}
4906       c = OMP_CLAUSE_CHAIN (c);
4907       if (c == NULL && !par_clauses)
4908 	{
4909 	  /* If this was a workshare clause, see if it had been combined
4910 	     with its parallel.  In that case, continue looking for the
4911 	     clauses also on the parallel statement itself.  */
4912 	  if (is_parallel_ctx (ctx))
4913 	    break;
4914 
4915 	  ctx = ctx->outer;
4916 	  if (ctx == NULL || !is_parallel_ctx (ctx))
4917 	    break;
4918 
4919 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4920 			       OMP_CLAUSE_LASTPRIVATE);
4921 	  par_clauses = true;
4922 	}
4923     }
4924 
4925   if (label)
4926     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4927 }
4928 
4929 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4930    (which might be a placeholder).  INNER is true if this is an inner
4931    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
4932    join markers.  Generate the before-loop forking sequence in
4933    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
4934    general form of these sequences is
4935 
4936      GOACC_REDUCTION_SETUP
4937      GOACC_FORK
4938      GOACC_REDUCTION_INIT
4939      ...
4940      GOACC_REDUCTION_FINI
4941      GOACC_JOIN
4942      GOACC_REDUCTION_TEARDOWN.  */
4943 
4944 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)4945 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4946 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
4947 		       gimple_seq *join_seq, omp_context *ctx)
4948 {
4949   gimple_seq before_fork = NULL;
4950   gimple_seq after_fork = NULL;
4951   gimple_seq before_join = NULL;
4952   gimple_seq after_join = NULL;
4953   tree init_code = NULL_TREE, fini_code = NULL_TREE,
4954     setup_code = NULL_TREE, teardown_code = NULL_TREE;
4955   unsigned offset = 0;
4956 
4957   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4958     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4959       {
4960 	tree orig = OMP_CLAUSE_DECL (c);
4961 	tree var = maybe_lookup_decl (orig, ctx);
4962 	tree ref_to_res = NULL_TREE;
4963 	tree incoming, outgoing, v1, v2, v3;
4964 	bool is_private = false;
4965 
4966 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4967 	if (rcode == MINUS_EXPR)
4968 	  rcode = PLUS_EXPR;
4969 	else if (rcode == TRUTH_ANDIF_EXPR)
4970 	  rcode = BIT_AND_EXPR;
4971 	else if (rcode == TRUTH_ORIF_EXPR)
4972 	  rcode = BIT_IOR_EXPR;
4973 	tree op = build_int_cst (unsigned_type_node, rcode);
4974 
4975 	if (!var)
4976 	  var = orig;
4977 
4978 	incoming = outgoing = var;
4979 
4980 	if (!inner)
4981 	  {
4982 	    /* See if an outer construct also reduces this variable.  */
4983 	    omp_context *outer = ctx;
4984 
4985 	    while (omp_context *probe = outer->outer)
4986 	      {
4987 		enum gimple_code type = gimple_code (probe->stmt);
4988 		tree cls;
4989 
4990 		switch (type)
4991 		  {
4992 		  case GIMPLE_OMP_FOR:
4993 		    cls = gimple_omp_for_clauses (probe->stmt);
4994 		    break;
4995 
4996 		  case GIMPLE_OMP_TARGET:
4997 		    if (gimple_omp_target_kind (probe->stmt)
4998 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
4999 		      goto do_lookup;
5000 
5001 		    cls = gimple_omp_target_clauses (probe->stmt);
5002 		    break;
5003 
5004 		  default:
5005 		    goto do_lookup;
5006 		  }
5007 
5008 		outer = probe;
5009 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
5010 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5011 		      && orig == OMP_CLAUSE_DECL (cls))
5012 		    {
5013 		      incoming = outgoing = lookup_decl (orig, probe);
5014 		      goto has_outer_reduction;
5015 		    }
5016 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5017 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5018 			   && orig == OMP_CLAUSE_DECL (cls))
5019 		    {
5020 		      is_private = true;
5021 		      goto do_lookup;
5022 		    }
5023 	      }
5024 
5025 	  do_lookup:
5026 	    /* This is the outermost construct with this reduction,
5027 	       see if there's a mapping for it.  */
5028 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5029 		&& maybe_lookup_field (orig, outer) && !is_private)
5030 	      {
5031 		ref_to_res = build_receiver_ref (orig, false, outer);
5032 		if (omp_is_reference (orig))
5033 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5034 
5035 		tree type = TREE_TYPE (var);
5036 		if (POINTER_TYPE_P (type))
5037 		  type = TREE_TYPE (type);
5038 
5039 		outgoing = var;
5040 		incoming = omp_reduction_init_op (loc, rcode, type);
5041 	      }
5042 	    else
5043 	      {
5044 		/* Try to look at enclosing contexts for reduction var,
5045 		   use original if no mapping found.  */
5046 		tree t = NULL_TREE;
5047 		omp_context *c = ctx->outer;
5048 		while (c && !t)
5049 		  {
5050 		    t = maybe_lookup_decl (orig, c);
5051 		    c = c->outer;
5052 		  }
5053 		incoming = outgoing = (t ? t : orig);
5054 	      }
5055 
5056 	  has_outer_reduction:;
5057 	  }
5058 
5059 	if (!ref_to_res)
5060 	  ref_to_res = integer_zero_node;
5061 
5062 	if (omp_is_reference (orig))
5063 	  {
5064 	    tree type = TREE_TYPE (var);
5065 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5066 
5067 	    if (!inner)
5068 	      {
5069 		tree x = create_tmp_var (TREE_TYPE (type), id);
5070 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5071 	      }
5072 
5073 	    v1 = create_tmp_var (type, id);
5074 	    v2 = create_tmp_var (type, id);
5075 	    v3 = create_tmp_var (type, id);
5076 
5077 	    gimplify_assign (v1, var, fork_seq);
5078 	    gimplify_assign (v2, var, fork_seq);
5079 	    gimplify_assign (v3, var, fork_seq);
5080 
5081 	    var = build_simple_mem_ref (var);
5082 	    v1 = build_simple_mem_ref (v1);
5083 	    v2 = build_simple_mem_ref (v2);
5084 	    v3 = build_simple_mem_ref (v3);
5085 	    outgoing = build_simple_mem_ref (outgoing);
5086 
5087 	    if (!TREE_CONSTANT (incoming))
5088 	      incoming = build_simple_mem_ref (incoming);
5089 	  }
5090 	else
5091 	  v1 = v2 = v3 = var;
5092 
5093 	/* Determine position in reduction buffer, which may be used
5094 	   by target.  The parser has ensured that this is not a
5095 	   variable-sized type.  */
5096 	fixed_size_mode mode
5097 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5098 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5099 	offset = (offset + align - 1) & ~(align - 1);
5100 	tree off = build_int_cst (sizetype, offset);
5101 	offset += GET_MODE_SIZE (mode);
5102 
5103 	if (!init_code)
5104 	  {
5105 	    init_code = build_int_cst (integer_type_node,
5106 				       IFN_GOACC_REDUCTION_INIT);
5107 	    fini_code = build_int_cst (integer_type_node,
5108 				       IFN_GOACC_REDUCTION_FINI);
5109 	    setup_code = build_int_cst (integer_type_node,
5110 					IFN_GOACC_REDUCTION_SETUP);
5111 	    teardown_code = build_int_cst (integer_type_node,
5112 					   IFN_GOACC_REDUCTION_TEARDOWN);
5113 	  }
5114 
5115 	tree setup_call
5116 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5117 					  TREE_TYPE (var), 6, setup_code,
5118 					  unshare_expr (ref_to_res),
5119 					  incoming, level, op, off);
5120 	tree init_call
5121 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5122 					  TREE_TYPE (var), 6, init_code,
5123 					  unshare_expr (ref_to_res),
5124 					  v1, level, op, off);
5125 	tree fini_call
5126 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5127 					  TREE_TYPE (var), 6, fini_code,
5128 					  unshare_expr (ref_to_res),
5129 					  v2, level, op, off);
5130 	tree teardown_call
5131 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5132 					  TREE_TYPE (var), 6, teardown_code,
5133 					  ref_to_res, v3, level, op, off);
5134 
5135 	gimplify_assign (v1, setup_call, &before_fork);
5136 	gimplify_assign (v2, init_call, &after_fork);
5137 	gimplify_assign (v3, fini_call, &before_join);
5138 	gimplify_assign (outgoing, teardown_call, &after_join);
5139       }
5140 
5141   /* Now stitch things together.  */
5142   gimple_seq_add_seq (fork_seq, before_fork);
5143   if (fork)
5144     gimple_seq_add_stmt (fork_seq, fork);
5145   gimple_seq_add_seq (fork_seq, after_fork);
5146 
5147   gimple_seq_add_seq (join_seq, before_join);
5148   if (join)
5149     gimple_seq_add_stmt (join_seq, join);
5150   gimple_seq_add_seq (join_seq, after_join);
5151 }
5152 
5153 /* Generate code to implement the REDUCTION clauses.  */
5154 
5155 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,omp_context * ctx)5156 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5157 {
5158   gimple_seq sub_seq = NULL;
5159   gimple *stmt;
5160   tree x, c;
5161   int count = 0;
5162 
5163   /* OpenACC loop reductions are handled elsewhere.  */
5164   if (is_gimple_omp_oacc (ctx->stmt))
5165     return;
5166 
5167   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5168   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5169       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5170     return;
5171 
5172   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5173      update in that case, otherwise use a lock.  */
5174   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5175     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5176       {
5177 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5178 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5179 	  {
5180 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5181 	    count = -1;
5182 	    break;
5183 	  }
5184 	count++;
5185       }
5186 
5187   if (count == 0)
5188     return;
5189 
5190   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5191     {
5192       tree var, ref, new_var, orig_var;
5193       enum tree_code code;
5194       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5195 
5196       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5197 	continue;
5198 
5199       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5200       orig_var = var = OMP_CLAUSE_DECL (c);
5201       if (TREE_CODE (var) == MEM_REF)
5202 	{
5203 	  var = TREE_OPERAND (var, 0);
5204 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5205 	    var = TREE_OPERAND (var, 0);
5206 	  if (TREE_CODE (var) == ADDR_EXPR)
5207 	    var = TREE_OPERAND (var, 0);
5208 	  else
5209 	    {
5210 	      /* If this is a pointer or referenced based array
5211 		 section, the var could be private in the outer
5212 		 context e.g. on orphaned loop construct.  Pretend this
5213 		 is private variable's outer reference.  */
5214 	      ccode = OMP_CLAUSE_PRIVATE;
5215 	      if (TREE_CODE (var) == INDIRECT_REF)
5216 		var = TREE_OPERAND (var, 0);
5217 	    }
5218 	  orig_var = var;
5219 	  if (is_variable_sized (var))
5220 	    {
5221 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5222 	      var = DECL_VALUE_EXPR (var);
5223 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5224 	      var = TREE_OPERAND (var, 0);
5225 	      gcc_assert (DECL_P (var));
5226 	    }
5227 	}
5228       new_var = lookup_decl (var, ctx);
5229       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5230 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5231       ref = build_outer_var_ref (var, ctx, ccode);
5232       code = OMP_CLAUSE_REDUCTION_CODE (c);
5233 
5234       /* reduction(-:var) sums up the partial results, so it acts
5235 	 identically to reduction(+:var).  */
5236       if (code == MINUS_EXPR)
5237         code = PLUS_EXPR;
5238 
5239       if (count == 1)
5240 	{
5241 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5242 
5243 	  addr = save_expr (addr);
5244 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5245 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5246 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5247 	  gimplify_and_add (x, stmt_seqp);
5248 	  return;
5249 	}
5250       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5251 	{
5252 	  tree d = OMP_CLAUSE_DECL (c);
5253 	  tree type = TREE_TYPE (d);
5254 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5255 	  tree i = create_tmp_var (TREE_TYPE (v), NULL);
5256 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5257 	  tree bias = TREE_OPERAND (d, 1);
5258 	  d = TREE_OPERAND (d, 0);
5259 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5260 	    {
5261 	      tree b = TREE_OPERAND (d, 1);
5262 	      b = maybe_lookup_decl (b, ctx);
5263 	      if (b == NULL)
5264 		{
5265 		  b = TREE_OPERAND (d, 1);
5266 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5267 		}
5268 	      if (integer_zerop (bias))
5269 		bias = b;
5270 	      else
5271 		{
5272 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5273 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5274 					  TREE_TYPE (b), b, bias);
5275 		}
5276 	      d = TREE_OPERAND (d, 0);
5277 	    }
5278 	  /* For ref build_outer_var_ref already performs this, so
5279 	     only new_var needs a dereference.  */
5280 	  if (TREE_CODE (d) == INDIRECT_REF)
5281 	    {
5282 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5283 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5284 	    }
5285 	  else if (TREE_CODE (d) == ADDR_EXPR)
5286 	    {
5287 	      if (orig_var == var)
5288 		{
5289 		  new_var = build_fold_addr_expr (new_var);
5290 		  ref = build_fold_addr_expr (ref);
5291 		}
5292 	    }
5293 	  else
5294 	    {
5295 	      gcc_assert (orig_var == var);
5296 	      if (omp_is_reference (var))
5297 		ref = build_fold_addr_expr (ref);
5298 	    }
5299 	  if (DECL_P (v))
5300 	    {
5301 	      tree t = maybe_lookup_decl (v, ctx);
5302 	      if (t)
5303 		v = t;
5304 	      else
5305 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5306 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5307 	    }
5308 	  if (!integer_zerop (bias))
5309 	    {
5310 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
5311 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5312 					 TREE_TYPE (new_var), new_var,
5313 					 unshare_expr (bias));
5314 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5315 					 TREE_TYPE (ref), ref, bias);
5316 	    }
5317 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
5318 	  ref = fold_convert_loc (clause_loc, ptype, ref);
5319 	  tree m = create_tmp_var (ptype, NULL);
5320 	  gimplify_assign (m, new_var, stmt_seqp);
5321 	  new_var = m;
5322 	  m = create_tmp_var (ptype, NULL);
5323 	  gimplify_assign (m, ref, stmt_seqp);
5324 	  ref = m;
5325 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5326 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5327 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5328 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5329 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5330 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
5331 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5332 	    {
5333 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5334 	      tree decl_placeholder
5335 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5336 	      SET_DECL_VALUE_EXPR (placeholder, out);
5337 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5338 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5339 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5340 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5341 	      gimple_seq_add_seq (&sub_seq,
5342 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5343 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5344 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5345 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5346 	    }
5347 	  else
5348 	    {
5349 	      x = build2 (code, TREE_TYPE (out), out, priv);
5350 	      out = unshare_expr (out);
5351 	      gimplify_assign (out, x, &sub_seq);
5352 	    }
5353 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5354 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5355 	  gimple_seq_add_stmt (&sub_seq, g);
5356 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5357 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5358 	  gimple_seq_add_stmt (&sub_seq, g);
5359 	  g = gimple_build_assign (i, PLUS_EXPR, i,
5360 				   build_int_cst (TREE_TYPE (i), 1));
5361 	  gimple_seq_add_stmt (&sub_seq, g);
5362 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
5363 	  gimple_seq_add_stmt (&sub_seq, g);
5364 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5365 	}
5366       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5367 	{
5368 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5369 
5370 	  if (omp_is_reference (var)
5371 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
5372 					     TREE_TYPE (ref)))
5373 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
5374 	  SET_DECL_VALUE_EXPR (placeholder, ref);
5375 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5376 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5377 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5378 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5379 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5380 	}
5381       else
5382 	{
5383 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5384 	  ref = build_outer_var_ref (var, ctx);
5385 	  gimplify_assign (ref, x, &sub_seq);
5386 	}
5387     }
5388 
5389   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5390 			    0);
5391   gimple_seq_add_stmt (stmt_seqp, stmt);
5392 
5393   gimple_seq_add_seq (stmt_seqp, sub_seq);
5394 
5395   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5396 			    0);
5397   gimple_seq_add_stmt (stmt_seqp, stmt);
5398 }
5399 
5400 
5401 /* Generate code to implement the COPYPRIVATE clauses.  */
5402 
5403 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)5404 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5405 			    omp_context *ctx)
5406 {
5407   tree c;
5408 
5409   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5410     {
5411       tree var, new_var, ref, x;
5412       bool by_ref;
5413       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5414 
5415       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5416 	continue;
5417 
5418       var = OMP_CLAUSE_DECL (c);
5419       by_ref = use_pointer_for_field (var, NULL);
5420 
5421       ref = build_sender_ref (var, ctx);
5422       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5423       if (by_ref)
5424 	{
5425 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
5426 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5427 	}
5428       gimplify_assign (ref, x, slist);
5429 
5430       ref = build_receiver_ref (var, false, ctx);
5431       if (by_ref)
5432 	{
5433 	  ref = fold_convert_loc (clause_loc,
5434 				  build_pointer_type (TREE_TYPE (new_var)),
5435 				  ref);
5436 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
5437 	}
5438       if (omp_is_reference (var))
5439 	{
5440 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5441 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
5442 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5443 	}
5444       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5445       gimplify_and_add (x, rlist);
5446     }
5447 }
5448 
5449 
5450 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5451    and REDUCTION from the sender (aka parent) side.  */
5452 
5453 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)5454 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5455     		    omp_context *ctx)
5456 {
5457   tree c, t;
5458   int ignored_looptemp = 0;
5459   bool is_taskloop = false;
5460 
5461   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5462      by GOMP_taskloop.  */
5463   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5464     {
5465       ignored_looptemp = 2;
5466       is_taskloop = true;
5467     }
5468 
5469   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5470     {
5471       tree val, ref, x, var;
5472       bool by_ref, do_in = false, do_out = false;
5473       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5474 
5475       switch (OMP_CLAUSE_CODE (c))
5476 	{
5477 	case OMP_CLAUSE_PRIVATE:
5478 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5479 	    break;
5480 	  continue;
5481 	case OMP_CLAUSE_FIRSTPRIVATE:
5482 	case OMP_CLAUSE_COPYIN:
5483 	case OMP_CLAUSE_LASTPRIVATE:
5484 	case OMP_CLAUSE_REDUCTION:
5485 	  break;
5486 	case OMP_CLAUSE_SHARED:
5487 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5488 	    break;
5489 	  continue;
5490 	case OMP_CLAUSE__LOOPTEMP_:
5491 	  if (ignored_looptemp)
5492 	    {
5493 	      ignored_looptemp--;
5494 	      continue;
5495 	    }
5496 	  break;
5497 	default:
5498 	  continue;
5499 	}
5500 
5501       val = OMP_CLAUSE_DECL (c);
5502       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5503 	  && TREE_CODE (val) == MEM_REF)
5504 	{
5505 	  val = TREE_OPERAND (val, 0);
5506 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5507 	    val = TREE_OPERAND (val, 0);
5508 	  if (TREE_CODE (val) == INDIRECT_REF
5509 	      || TREE_CODE (val) == ADDR_EXPR)
5510 	    val = TREE_OPERAND (val, 0);
5511 	  if (is_variable_sized (val))
5512 	    continue;
5513 	}
5514 
5515       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5516 	 outer taskloop region.  */
5517       omp_context *ctx_for_o = ctx;
5518       if (is_taskloop
5519 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5520 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5521 	ctx_for_o = ctx->outer;
5522 
5523       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5524 
5525       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5526 	  && is_global_var (var))
5527 	continue;
5528 
5529       t = omp_member_access_dummy_var (var);
5530       if (t)
5531 	{
5532 	  var = DECL_VALUE_EXPR (var);
5533 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5534 	  if (o != t)
5535 	    var = unshare_and_remap (var, t, o);
5536 	  else
5537 	    var = unshare_expr (var);
5538 	}
5539 
5540       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5541 	{
5542 	  /* Handle taskloop firstprivate/lastprivate, where the
5543 	     lastprivate on GIMPLE_OMP_TASK is represented as
5544 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
5545 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5546 	  x = omp_build_component_ref (ctx->sender_decl, f);
5547 	  if (use_pointer_for_field (val, ctx))
5548 	    var = build_fold_addr_expr (var);
5549 	  gimplify_assign (x, var, ilist);
5550 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
5551 	  continue;
5552 	}
5553 
5554       if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5555 	   || val == OMP_CLAUSE_DECL (c))
5556 	  && is_variable_sized (val))
5557 	continue;
5558       by_ref = use_pointer_for_field (val, NULL);
5559 
5560       switch (OMP_CLAUSE_CODE (c))
5561 	{
5562 	case OMP_CLAUSE_FIRSTPRIVATE:
5563 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5564 	      && !by_ref
5565 	      && is_task_ctx (ctx))
5566 	    TREE_NO_WARNING (var) = 1;
5567 	  do_in = true;
5568 	  break;
5569 
5570 	case OMP_CLAUSE_PRIVATE:
5571 	case OMP_CLAUSE_COPYIN:
5572 	case OMP_CLAUSE__LOOPTEMP_:
5573 	  do_in = true;
5574 	  break;
5575 
5576 	case OMP_CLAUSE_LASTPRIVATE:
5577 	  if (by_ref || omp_is_reference (val))
5578 	    {
5579 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5580 		continue;
5581 	      do_in = true;
5582 	    }
5583 	  else
5584 	    {
5585 	      do_out = true;
5586 	      if (lang_hooks.decls.omp_private_outer_ref (val))
5587 		do_in = true;
5588 	    }
5589 	  break;
5590 
5591 	case OMP_CLAUSE_REDUCTION:
5592 	  do_in = true;
5593 	  if (val == OMP_CLAUSE_DECL (c))
5594 	    do_out = !(by_ref || omp_is_reference (val));
5595 	  else
5596 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5597 	  break;
5598 
5599 	default:
5600 	  gcc_unreachable ();
5601 	}
5602 
5603       if (do_in)
5604 	{
5605 	  ref = build_sender_ref (val, ctx);
5606 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5607 	  gimplify_assign (ref, x, ilist);
5608 	  if (is_task_ctx (ctx))
5609 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5610 	}
5611 
5612       if (do_out)
5613 	{
5614 	  ref = build_sender_ref (val, ctx);
5615 	  gimplify_assign (var, ref, olist);
5616 	}
5617     }
5618 }
5619 
5620 /* Generate code to implement SHARED from the sender (aka parent)
5621    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5622    list things that got automatically shared.  */
5623 
5624 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)5625 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5626 {
5627   tree var, ovar, nvar, t, f, x, record_type;
5628 
5629   if (ctx->record_type == NULL)
5630     return;
5631 
5632   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5633   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5634     {
5635       ovar = DECL_ABSTRACT_ORIGIN (f);
5636       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5637 	continue;
5638 
5639       nvar = maybe_lookup_decl (ovar, ctx);
5640       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5641 	continue;
5642 
5643       /* If CTX is a nested parallel directive.  Find the immediately
5644 	 enclosing parallel or workshare construct that contains a
5645 	 mapping for OVAR.  */
5646       var = lookup_decl_in_outer_ctx (ovar, ctx);
5647 
5648       t = omp_member_access_dummy_var (var);
5649       if (t)
5650 	{
5651 	  var = DECL_VALUE_EXPR (var);
5652 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5653 	  if (o != t)
5654 	    var = unshare_and_remap (var, t, o);
5655 	  else
5656 	    var = unshare_expr (var);
5657 	}
5658 
5659       if (use_pointer_for_field (ovar, ctx))
5660 	{
5661 	  x = build_sender_ref (ovar, ctx);
5662 	  var = build_fold_addr_expr (var);
5663 	  gimplify_assign (x, var, ilist);
5664 	}
5665       else
5666 	{
5667 	  x = build_sender_ref (ovar, ctx);
5668 	  gimplify_assign (x, var, ilist);
5669 
5670 	  if (!TREE_READONLY (var)
5671 	      /* We don't need to receive a new reference to a result
5672 	         or parm decl.  In fact we may not store to it as we will
5673 		 invalidate any pending RSO and generate wrong gimple
5674 		 during inlining.  */
5675 	      && !((TREE_CODE (var) == RESULT_DECL
5676 		    || TREE_CODE (var) == PARM_DECL)
5677 		   && DECL_BY_REFERENCE (var)))
5678 	    {
5679 	      x = build_sender_ref (ovar, ctx);
5680 	      gimplify_assign (var, x, olist);
5681 	    }
5682 	}
5683     }
5684 }
5685 
5686 /* Emit an OpenACC head marker call, encapulating the partitioning and
5687    other information that must be processed by the target compiler.
5688    Return the maximum number of dimensions the associated loop might
5689    be partitioned over.  */
5690 
5691 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)5692 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5693 		      gimple_seq *seq, omp_context *ctx)
5694 {
5695   unsigned levels = 0;
5696   unsigned tag = 0;
5697   tree gang_static = NULL_TREE;
5698   auto_vec<tree, 5> args;
5699 
5700   args.quick_push (build_int_cst
5701 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5702   args.quick_push (ddvar);
5703   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5704     {
5705       switch (OMP_CLAUSE_CODE (c))
5706 	{
5707 	case OMP_CLAUSE_GANG:
5708 	  tag |= OLF_DIM_GANG;
5709 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5710 	  /* static:* is represented by -1, and we can ignore it, as
5711 	     scheduling is always static.  */
5712 	  if (gang_static && integer_minus_onep (gang_static))
5713 	    gang_static = NULL_TREE;
5714 	  levels++;
5715 	  break;
5716 
5717 	case OMP_CLAUSE_WORKER:
5718 	  tag |= OLF_DIM_WORKER;
5719 	  levels++;
5720 	  break;
5721 
5722 	case OMP_CLAUSE_VECTOR:
5723 	  tag |= OLF_DIM_VECTOR;
5724 	  levels++;
5725 	  break;
5726 
5727 	case OMP_CLAUSE_SEQ:
5728 	  tag |= OLF_SEQ;
5729 	  break;
5730 
5731 	case OMP_CLAUSE_AUTO:
5732 	  tag |= OLF_AUTO;
5733 	  break;
5734 
5735 	case OMP_CLAUSE_INDEPENDENT:
5736 	  tag |= OLF_INDEPENDENT;
5737 	  break;
5738 
5739 	case OMP_CLAUSE_TILE:
5740 	  tag |= OLF_TILE;
5741 	  break;
5742 
5743 	default:
5744 	  continue;
5745 	}
5746     }
5747 
5748   if (gang_static)
5749     {
5750       if (DECL_P (gang_static))
5751 	gang_static = build_outer_var_ref (gang_static, ctx);
5752       tag |= OLF_GANG_STATIC;
5753     }
5754 
5755   /* In a parallel region, loops are implicitly INDEPENDENT.  */
5756   omp_context *tgt = enclosing_target_ctx (ctx);
5757   if (!tgt || is_oacc_parallel (tgt))
5758     tag |= OLF_INDEPENDENT;
5759 
5760   if (tag & OLF_TILE)
5761     /* Tiling could use all 3 levels.  */
5762     levels = 3;
5763   else
5764     {
5765       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5766 	 Ensure at least one level, or 2 for possible auto
5767 	 partitioning */
5768       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5769 				  << OLF_DIM_BASE) | OLF_SEQ));
5770 
5771       if (levels < 1u + maybe_auto)
5772 	levels = 1u + maybe_auto;
5773     }
5774 
5775   args.quick_push (build_int_cst (integer_type_node, levels));
5776   args.quick_push (build_int_cst (integer_type_node, tag));
5777   if (gang_static)
5778     args.quick_push (gang_static);
5779 
5780   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5781   gimple_set_location (call, loc);
5782   gimple_set_lhs (call, ddvar);
5783   gimple_seq_add_stmt (seq, call);
5784 
5785   return levels;
5786 }
5787 
5788 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
5789    partitioning level of the enclosed region.  */
5790 
5791 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)5792 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5793 			tree tofollow, gimple_seq *seq)
5794 {
5795   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5796 		     : IFN_UNIQUE_OACC_TAIL_MARK);
5797   tree marker = build_int_cst (integer_type_node, marker_kind);
5798   int nargs = 2 + (tofollow != NULL_TREE);
5799   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5800 					    marker, ddvar, tofollow);
5801   gimple_set_location (call, loc);
5802   gimple_set_lhs (call, ddvar);
5803   gimple_seq_add_stmt (seq, call);
5804 }
5805 
5806 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
5807    the loop clauses, from which we extract reductions.  Initialize
5808    HEAD and TAIL.  */
5809 
5810 static void
lower_oacc_head_tail(location_t loc,tree clauses,gimple_seq * head,gimple_seq * tail,omp_context * ctx)5811 lower_oacc_head_tail (location_t loc, tree clauses,
5812 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5813 {
5814   bool inner = false;
5815   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5816   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5817 
5818   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5819   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5820   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5821 
5822   gcc_assert (count);
5823   for (unsigned done = 1; count; count--, done++)
5824     {
5825       gimple_seq fork_seq = NULL;
5826       gimple_seq join_seq = NULL;
5827 
5828       tree place = build_int_cst (integer_type_node, -1);
5829       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5830 						fork_kind, ddvar, place);
5831       gimple_set_location (fork, loc);
5832       gimple_set_lhs (fork, ddvar);
5833 
5834       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5835 						join_kind, ddvar, place);
5836       gimple_set_location (join, loc);
5837       gimple_set_lhs (join, ddvar);
5838 
5839       /* Mark the beginning of this level sequence.  */
5840       if (inner)
5841 	lower_oacc_loop_marker (loc, ddvar, true,
5842 				build_int_cst (integer_type_node, count),
5843 				&fork_seq);
5844       lower_oacc_loop_marker (loc, ddvar, false,
5845 			      build_int_cst (integer_type_node, done),
5846 			      &join_seq);
5847 
5848       lower_oacc_reductions (loc, clauses, place, inner,
5849 			     fork, join, &fork_seq, &join_seq,  ctx);
5850 
5851       /* Append this level to head. */
5852       gimple_seq_add_seq (head, fork_seq);
5853       /* Prepend it to tail.  */
5854       gimple_seq_add_seq (&join_seq, *tail);
5855       *tail = join_seq;
5856 
5857       inner = true;
5858     }
5859 
5860   /* Mark the end of the sequence.  */
5861   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5862   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5863 }
5864 
5865 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5866    catch handler and return it.  This prevents programs from violating the
5867    structured block semantics with throws.  */
5868 
5869 static gimple_seq
maybe_catch_exception(gimple_seq body)5870 maybe_catch_exception (gimple_seq body)
5871 {
5872   gimple *g;
5873   tree decl;
5874 
5875   if (!flag_exceptions)
5876     return body;
5877 
5878   if (lang_hooks.eh_protect_cleanup_actions != NULL)
5879     decl = lang_hooks.eh_protect_cleanup_actions ();
5880   else
5881     decl = builtin_decl_explicit (BUILT_IN_TRAP);
5882 
5883   g = gimple_build_eh_must_not_throw (decl);
5884   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5885       			GIMPLE_TRY_CATCH);
5886 
5887  return gimple_seq_alloc_with_stmt (g);
5888 }
5889 
5890 
5891 /* Routines to lower OMP directives into OMP-GIMPLE.  */
5892 
5893 /* If ctx is a worksharing context inside of a cancellable parallel
5894    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5895    and conditional branch to parallel's cancel_label to handle
5896    cancellation in the implicit barrier.  */
5897 
5898 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple_seq * body)5899 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5900 {
5901   gimple *omp_return = gimple_seq_last_stmt (*body);
5902   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5903   if (gimple_omp_return_nowait_p (omp_return))
5904     return;
5905   if (ctx->outer
5906       && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5907       && ctx->outer->cancellable)
5908     {
5909       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5910       tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5911       tree lhs = create_tmp_var (c_bool_type);
5912       gimple_omp_return_set_lhs (omp_return, lhs);
5913       tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5914       gimple *g = gimple_build_cond (NE_EXPR, lhs,
5915 				    fold_convert (c_bool_type,
5916 						  boolean_false_node),
5917 				    ctx->outer->cancel_label, fallthru_label);
5918       gimple_seq_add_stmt (body, g);
5919       gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5920     }
5921 }
5922 
5923 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5924    CTX is the enclosing OMP context for the current statement.  */
5925 
5926 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)5927 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5928 {
5929   tree block, control;
5930   gimple_stmt_iterator tgsi;
5931   gomp_sections *stmt;
5932   gimple *t;
5933   gbind *new_stmt, *bind;
5934   gimple_seq ilist, dlist, olist, new_body;
5935 
5936   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5937 
5938   push_gimplify_context ();
5939 
5940   dlist = NULL;
5941   ilist = NULL;
5942   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5943       			   &ilist, &dlist, ctx, NULL);
5944 
5945   new_body = gimple_omp_body (stmt);
5946   gimple_omp_set_body (stmt, NULL);
5947   tgsi = gsi_start (new_body);
5948   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5949     {
5950       omp_context *sctx;
5951       gimple *sec_start;
5952 
5953       sec_start = gsi_stmt (tgsi);
5954       sctx = maybe_lookup_ctx (sec_start);
5955       gcc_assert (sctx);
5956 
5957       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5958       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5959 			    GSI_CONTINUE_LINKING);
5960       gimple_omp_set_body (sec_start, NULL);
5961 
5962       if (gsi_one_before_end_p (tgsi))
5963 	{
5964 	  gimple_seq l = NULL;
5965 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5966 				     &l, ctx);
5967 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5968 	  gimple_omp_section_set_last (sec_start);
5969 	}
5970 
5971       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5972 			GSI_CONTINUE_LINKING);
5973     }
5974 
5975   block = make_node (BLOCK);
5976   bind = gimple_build_bind (NULL, new_body, block);
5977 
5978   olist = NULL;
5979   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5980 
5981   block = make_node (BLOCK);
5982   new_stmt = gimple_build_bind (NULL, NULL, block);
5983   gsi_replace (gsi_p, new_stmt, true);
5984 
5985   pop_gimplify_context (new_stmt);
5986   gimple_bind_append_vars (new_stmt, ctx->block_vars);
5987   BLOCK_VARS (block) = gimple_bind_vars (bind);
5988   if (BLOCK_VARS (block))
5989     TREE_USED (block) = 1;
5990 
5991   new_body = NULL;
5992   gimple_seq_add_seq (&new_body, ilist);
5993   gimple_seq_add_stmt (&new_body, stmt);
5994   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5995   gimple_seq_add_stmt (&new_body, bind);
5996 
5997   control = create_tmp_var (unsigned_type_node, ".section");
5998   t = gimple_build_omp_continue (control, control);
5999   gimple_omp_sections_set_control (stmt, control);
6000   gimple_seq_add_stmt (&new_body, t);
6001 
6002   gimple_seq_add_seq (&new_body, olist);
6003   if (ctx->cancellable)
6004     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6005   gimple_seq_add_seq (&new_body, dlist);
6006 
6007   new_body = maybe_catch_exception (new_body);
6008 
6009   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6010 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6011   t = gimple_build_omp_return (nowait);
6012   gimple_seq_add_stmt (&new_body, t);
6013   maybe_add_implicit_barrier_cancel (ctx, &new_body);
6014 
6015   gimple_bind_set_body (new_stmt, new_body);
6016 }
6017 
6018 
6019 /* A subroutine of lower_omp_single.  Expand the simple form of
6020    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6021 
6022      	if (GOMP_single_start ())
6023 	  BODY;
6024 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
6025 
6026   FIXME.  It may be better to delay expanding the logic of this until
6027   pass_expand_omp.  The expanded logic may make the job more difficult
6028   to a synchronization analysis pass.  */
6029 
6030 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)6031 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6032 {
6033   location_t loc = gimple_location (single_stmt);
6034   tree tlabel = create_artificial_label (loc);
6035   tree flabel = create_artificial_label (loc);
6036   gimple *call, *cond;
6037   tree lhs, decl;
6038 
6039   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6040   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6041   call = gimple_build_call (decl, 0);
6042   gimple_call_set_lhs (call, lhs);
6043   gimple_seq_add_stmt (pre_p, call);
6044 
6045   cond = gimple_build_cond (EQ_EXPR, lhs,
6046 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6047 					      boolean_true_node),
6048 			    tlabel, flabel);
6049   gimple_seq_add_stmt (pre_p, cond);
6050   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6051   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6052   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6053 }
6054 
6055 
6056 /* A subroutine of lower_omp_single.  Expand the simple form of
6057    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6058 
6059 	#pragma omp single copyprivate (a, b, c)
6060 
6061    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6062 
6063       {
6064 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6065 	  {
6066 	    BODY;
6067 	    copyout.a = a;
6068 	    copyout.b = b;
6069 	    copyout.c = c;
6070 	    GOMP_single_copy_end (&copyout);
6071 	  }
6072 	else
6073 	  {
6074 	    a = copyout_p->a;
6075 	    b = copyout_p->b;
6076 	    c = copyout_p->c;
6077 	  }
6078 	GOMP_barrier ();
6079       }
6080 
6081   FIXME.  It may be better to delay expanding the logic of this until
6082   pass_expand_omp.  The expanded logic may make the job more difficult
6083   to a synchronization analysis pass.  */
6084 
6085 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)6086 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6087 		       omp_context *ctx)
6088 {
6089   tree ptr_type, t, l0, l1, l2, bfn_decl;
6090   gimple_seq copyin_seq;
6091   location_t loc = gimple_location (single_stmt);
6092 
6093   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6094 
6095   ptr_type = build_pointer_type (ctx->record_type);
6096   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6097 
6098   l0 = create_artificial_label (loc);
6099   l1 = create_artificial_label (loc);
6100   l2 = create_artificial_label (loc);
6101 
6102   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6103   t = build_call_expr_loc (loc, bfn_decl, 0);
6104   t = fold_convert_loc (loc, ptr_type, t);
6105   gimplify_assign (ctx->receiver_decl, t, pre_p);
6106 
6107   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6108 	      build_int_cst (ptr_type, 0));
6109   t = build3 (COND_EXPR, void_type_node, t,
6110 	      build_and_jump (&l0), build_and_jump (&l1));
6111   gimplify_and_add (t, pre_p);
6112 
6113   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6114 
6115   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6116 
6117   copyin_seq = NULL;
6118   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6119 			      &copyin_seq, ctx);
6120 
6121   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6122   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6123   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6124   gimplify_and_add (t, pre_p);
6125 
6126   t = build_and_jump (&l2);
6127   gimplify_and_add (t, pre_p);
6128 
6129   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6130 
6131   gimple_seq_add_seq (pre_p, copyin_seq);
6132 
6133   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6134 }
6135 
6136 
6137 /* Expand code for an OpenMP single directive.  */
6138 
6139 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)6140 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6141 {
6142   tree block;
6143   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6144   gbind *bind;
6145   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6146 
6147   push_gimplify_context ();
6148 
6149   block = make_node (BLOCK);
6150   bind = gimple_build_bind (NULL, NULL, block);
6151   gsi_replace (gsi_p, bind, true);
6152   bind_body = NULL;
6153   dlist = NULL;
6154   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6155 			   &bind_body, &dlist, ctx, NULL);
6156   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6157 
6158   gimple_seq_add_stmt (&bind_body, single_stmt);
6159 
6160   if (ctx->record_type)
6161     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6162   else
6163     lower_omp_single_simple (single_stmt, &bind_body);
6164 
6165   gimple_omp_set_body (single_stmt, NULL);
6166 
6167   gimple_seq_add_seq (&bind_body, dlist);
6168 
6169   bind_body = maybe_catch_exception (bind_body);
6170 
6171   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6172 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6173   gimple *g = gimple_build_omp_return (nowait);
6174   gimple_seq_add_stmt (&bind_body_tail, g);
6175   maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6176   if (ctx->record_type)
6177     {
6178       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6179       tree clobber = build_constructor (ctx->record_type, NULL);
6180       TREE_THIS_VOLATILE (clobber) = 1;
6181       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6182 						   clobber), GSI_SAME_STMT);
6183     }
6184   gimple_seq_add_seq (&bind_body, bind_body_tail);
6185   gimple_bind_set_body (bind, bind_body);
6186 
6187   pop_gimplify_context (bind);
6188 
6189   gimple_bind_append_vars (bind, ctx->block_vars);
6190   BLOCK_VARS (block) = ctx->block_vars;
6191   if (BLOCK_VARS (block))
6192     TREE_USED (block) = 1;
6193 }
6194 
6195 
6196 /* Expand code for an OpenMP master directive.  */
6197 
6198 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)6199 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6200 {
6201   tree block, lab = NULL, x, bfn_decl;
6202   gimple *stmt = gsi_stmt (*gsi_p);
6203   gbind *bind;
6204   location_t loc = gimple_location (stmt);
6205   gimple_seq tseq;
6206 
6207   push_gimplify_context ();
6208 
6209   block = make_node (BLOCK);
6210   bind = gimple_build_bind (NULL, NULL, block);
6211   gsi_replace (gsi_p, bind, true);
6212   gimple_bind_add_stmt (bind, stmt);
6213 
6214   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6215   x = build_call_expr_loc (loc, bfn_decl, 0);
6216   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6217   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6218   tseq = NULL;
6219   gimplify_and_add (x, &tseq);
6220   gimple_bind_add_seq (bind, tseq);
6221 
6222   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6223   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6224   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6225   gimple_omp_set_body (stmt, NULL);
6226 
6227   gimple_bind_add_stmt (bind, gimple_build_label (lab));
6228 
6229   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6230 
6231   pop_gimplify_context (bind);
6232 
6233   gimple_bind_append_vars (bind, ctx->block_vars);
6234   BLOCK_VARS (block) = ctx->block_vars;
6235 }
6236 
6237 
6238 /* Expand code for an OpenMP taskgroup directive.  */
6239 
6240 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)6241 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6242 {
6243   gimple *stmt = gsi_stmt (*gsi_p);
6244   gcall *x;
6245   gbind *bind;
6246   tree block = make_node (BLOCK);
6247 
6248   bind = gimple_build_bind (NULL, NULL, block);
6249   gsi_replace (gsi_p, bind, true);
6250   gimple_bind_add_stmt (bind, stmt);
6251 
6252   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6253 			 0);
6254   gimple_bind_add_stmt (bind, x);
6255 
6256   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6257   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6258   gimple_omp_set_body (stmt, NULL);
6259 
6260   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6261 
6262   gimple_bind_append_vars (bind, ctx->block_vars);
6263   BLOCK_VARS (block) = ctx->block_vars;
6264 }
6265 
6266 
6267 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
6268 
6269 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)6270 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6271 			   omp_context *ctx)
6272 {
6273   struct omp_for_data fd;
6274   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6275     return;
6276 
6277   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6278   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6279   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6280   if (!fd.ordered)
6281     return;
6282 
6283   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6284   tree c = gimple_omp_ordered_clauses (ord_stmt);
6285   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6286       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6287     {
6288       /* Merge depend clauses from multiple adjacent
6289 	 #pragma omp ordered depend(sink:...) constructs
6290 	 into one #pragma omp ordered depend(sink:...), so that
6291 	 we can optimize them together.  */
6292       gimple_stmt_iterator gsi = *gsi_p;
6293       gsi_next (&gsi);
6294       while (!gsi_end_p (gsi))
6295 	{
6296 	  gimple *stmt = gsi_stmt (gsi);
6297 	  if (is_gimple_debug (stmt)
6298 	      || gimple_code (stmt) == GIMPLE_NOP)
6299 	    {
6300 	      gsi_next (&gsi);
6301 	      continue;
6302 	    }
6303 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6304 	    break;
6305 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6306 	  c = gimple_omp_ordered_clauses (ord_stmt2);
6307 	  if (c == NULL_TREE
6308 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6309 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6310 	    break;
6311 	  while (*list_p)
6312 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
6313 	  *list_p = c;
6314 	  gsi_remove (&gsi, true);
6315 	}
6316     }
6317 
6318   /* Canonicalize sink dependence clauses into one folded clause if
6319      possible.
6320 
6321      The basic algorithm is to create a sink vector whose first
6322      element is the GCD of all the first elements, and whose remaining
6323      elements are the minimum of the subsequent columns.
6324 
6325      We ignore dependence vectors whose first element is zero because
6326      such dependencies are known to be executed by the same thread.
6327 
6328      We take into account the direction of the loop, so a minimum
6329      becomes a maximum if the loop is iterating forwards.  We also
6330      ignore sink clauses where the loop direction is unknown, or where
6331      the offsets are clearly invalid because they are not a multiple
6332      of the loop increment.
6333 
6334      For example:
6335 
6336 	#pragma omp for ordered(2)
6337 	for (i=0; i < N; ++i)
6338 	  for (j=0; j < M; ++j)
6339 	    {
6340 	      #pragma omp ordered \
6341 		depend(sink:i-8,j-2) \
6342 		depend(sink:i,j-1) \	// Completely ignored because i+0.
6343 		depend(sink:i-4,j-3) \
6344 		depend(sink:i-6,j-4)
6345 	      #pragma omp ordered depend(source)
6346 	    }
6347 
6348      Folded clause is:
6349 
6350 	depend(sink:-gcd(8,4,6),-min(2,3,4))
6351 	  -or-
6352 	depend(sink:-2,-2)
6353   */
6354 
6355   /* FIXME: Computing GCD's where the first element is zero is
6356      non-trivial in the presence of collapsed loops.  Do this later.  */
6357   if (fd.collapse > 1)
6358     return;
6359 
6360   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6361 
6362   /* wide_int is not a POD so it must be default-constructed.  */
6363   for (unsigned i = 0; i != 2 * len - 1; ++i)
6364     new (static_cast<void*>(folded_deps + i)) wide_int ();
6365 
6366   tree folded_dep = NULL_TREE;
6367   /* TRUE if the first dimension's offset is negative.  */
6368   bool neg_offset_p = false;
6369 
6370   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6371   unsigned int i;
6372   while ((c = *list_p) != NULL)
6373     {
6374       bool remove = false;
6375 
6376       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6377       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6378 	goto next_ordered_clause;
6379 
6380       tree vec;
6381       for (vec = OMP_CLAUSE_DECL (c), i = 0;
6382 	   vec && TREE_CODE (vec) == TREE_LIST;
6383 	   vec = TREE_CHAIN (vec), ++i)
6384 	{
6385 	  gcc_assert (i < len);
6386 
6387 	  /* omp_extract_for_data has canonicalized the condition.  */
6388 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
6389 		      || fd.loops[i].cond_code == GT_EXPR);
6390 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
6391 	  bool maybe_lexically_later = true;
6392 
6393 	  /* While the committee makes up its mind, bail if we have any
6394 	     non-constant steps.  */
6395 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6396 	    goto lower_omp_ordered_ret;
6397 
6398 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
6399 	  if (POINTER_TYPE_P (itype))
6400 	    itype = sizetype;
6401 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6402 					    TYPE_PRECISION (itype),
6403 					    TYPE_SIGN (itype));
6404 
6405 	  /* Ignore invalid offsets that are not multiples of the step.  */
6406 	  if (!wi::multiple_of_p (wi::abs (offset),
6407 				  wi::abs (wi::to_wide (fd.loops[i].step)),
6408 				  UNSIGNED))
6409 	    {
6410 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
6411 			  "ignoring sink clause with offset that is not "
6412 			  "a multiple of the loop step");
6413 	      remove = true;
6414 	      goto next_ordered_clause;
6415 	    }
6416 
6417 	  /* Calculate the first dimension.  The first dimension of
6418 	     the folded dependency vector is the GCD of the first
6419 	     elements, while ignoring any first elements whose offset
6420 	     is 0.  */
6421 	  if (i == 0)
6422 	    {
6423 	      /* Ignore dependence vectors whose first dimension is 0.  */
6424 	      if (offset == 0)
6425 		{
6426 		  remove = true;
6427 		  goto next_ordered_clause;
6428 		}
6429 	      else
6430 		{
6431 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6432 		    {
6433 		      error_at (OMP_CLAUSE_LOCATION (c),
6434 				"first offset must be in opposite direction "
6435 				"of loop iterations");
6436 		      goto lower_omp_ordered_ret;
6437 		    }
6438 		  if (forward)
6439 		    offset = -offset;
6440 		  neg_offset_p = forward;
6441 		  /* Initialize the first time around.  */
6442 		  if (folded_dep == NULL_TREE)
6443 		    {
6444 		      folded_dep = c;
6445 		      folded_deps[0] = offset;
6446 		    }
6447 		  else
6448 		    folded_deps[0] = wi::gcd (folded_deps[0],
6449 					      offset, UNSIGNED);
6450 		}
6451 	    }
6452 	  /* Calculate minimum for the remaining dimensions.  */
6453 	  else
6454 	    {
6455 	      folded_deps[len + i - 1] = offset;
6456 	      if (folded_dep == c)
6457 		folded_deps[i] = offset;
6458 	      else if (maybe_lexically_later
6459 		       && !wi::eq_p (folded_deps[i], offset))
6460 		{
6461 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
6462 		    {
6463 		      unsigned int j;
6464 		      folded_dep = c;
6465 		      for (j = 1; j <= i; j++)
6466 			folded_deps[j] = folded_deps[len + j - 1];
6467 		    }
6468 		  else
6469 		    maybe_lexically_later = false;
6470 		}
6471 	    }
6472 	}
6473       gcc_assert (i == len);
6474 
6475       remove = true;
6476 
6477     next_ordered_clause:
6478       if (remove)
6479 	*list_p = OMP_CLAUSE_CHAIN (c);
6480       else
6481 	list_p = &OMP_CLAUSE_CHAIN (c);
6482     }
6483 
6484   if (folded_dep)
6485     {
6486       if (neg_offset_p)
6487 	folded_deps[0] = -folded_deps[0];
6488 
6489       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6490       if (POINTER_TYPE_P (itype))
6491 	itype = sizetype;
6492 
6493       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6494 	= wide_int_to_tree (itype, folded_deps[0]);
6495       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6496       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6497     }
6498 
6499  lower_omp_ordered_ret:
6500 
6501   /* Ordered without clauses is #pragma omp threads, while we want
6502      a nop instead if we remove all clauses.  */
6503   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6504     gsi_replace (gsi_p, gimple_build_nop (), true);
6505 }
6506 
6507 
6508 /* Expand code for an OpenMP ordered directive.  */
6509 
6510 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)6511 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6512 {
6513   tree block;
6514   gimple *stmt = gsi_stmt (*gsi_p), *g;
6515   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6516   gcall *x;
6517   gbind *bind;
6518   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6519 			       OMP_CLAUSE_SIMD);
6520   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6521      loop.  */
6522   bool maybe_simt
6523     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6524   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6525 				  OMP_CLAUSE_THREADS);
6526 
6527   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6528 		       OMP_CLAUSE_DEPEND))
6529     {
6530       /* FIXME: This is needs to be moved to the expansion to verify various
6531 	 conditions only testable on cfg with dominators computed, and also
6532 	 all the depend clauses to be merged still might need to be available
6533 	 for the runtime checks.  */
6534       if (0)
6535 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6536       return;
6537     }
6538 
6539   push_gimplify_context ();
6540 
6541   block = make_node (BLOCK);
6542   bind = gimple_build_bind (NULL, NULL, block);
6543   gsi_replace (gsi_p, bind, true);
6544   gimple_bind_add_stmt (bind, stmt);
6545 
6546   if (simd)
6547     {
6548       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6549 				      build_int_cst (NULL_TREE, threads));
6550       cfun->has_simduid_loops = true;
6551     }
6552   else
6553     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6554 			   0);
6555   gimple_bind_add_stmt (bind, x);
6556 
6557   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6558   if (maybe_simt)
6559     {
6560       counter = create_tmp_var (integer_type_node);
6561       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6562       gimple_call_set_lhs (g, counter);
6563       gimple_bind_add_stmt (bind, g);
6564 
6565       body = create_artificial_label (UNKNOWN_LOCATION);
6566       test = create_artificial_label (UNKNOWN_LOCATION);
6567       gimple_bind_add_stmt (bind, gimple_build_label (body));
6568 
6569       tree simt_pred = create_tmp_var (integer_type_node);
6570       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6571       gimple_call_set_lhs (g, simt_pred);
6572       gimple_bind_add_stmt (bind, g);
6573 
6574       tree t = create_artificial_label (UNKNOWN_LOCATION);
6575       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6576       gimple_bind_add_stmt (bind, g);
6577 
6578       gimple_bind_add_stmt (bind, gimple_build_label (t));
6579     }
6580   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6581   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6582   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6583   gimple_omp_set_body (stmt, NULL);
6584 
6585   if (maybe_simt)
6586     {
6587       gimple_bind_add_stmt (bind, gimple_build_label (test));
6588       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6589       gimple_bind_add_stmt (bind, g);
6590 
6591       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6592       tree nonneg = create_tmp_var (integer_type_node);
6593       gimple_seq tseq = NULL;
6594       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6595       gimple_bind_add_seq (bind, tseq);
6596 
6597       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6598       gimple_call_set_lhs (g, nonneg);
6599       gimple_bind_add_stmt (bind, g);
6600 
6601       tree end = create_artificial_label (UNKNOWN_LOCATION);
6602       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6603       gimple_bind_add_stmt (bind, g);
6604 
6605       gimple_bind_add_stmt (bind, gimple_build_label (end));
6606     }
6607   if (simd)
6608     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6609 				    build_int_cst (NULL_TREE, threads));
6610   else
6611     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6612 			   0);
6613   gimple_bind_add_stmt (bind, x);
6614 
6615   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6616 
6617   pop_gimplify_context (bind);
6618 
6619   gimple_bind_append_vars (bind, ctx->block_vars);
6620   BLOCK_VARS (block) = gimple_bind_vars (bind);
6621 }
6622 
6623 
6624 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
6625    substitution of a couple of function calls.  But in the NAMED case,
6626    requires that languages coordinate a symbol name.  It is therefore
6627    best put here in common code.  */
6628 
6629 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6630 
6631 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)6632 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6633 {
6634   tree block;
6635   tree name, lock, unlock;
6636   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6637   gbind *bind;
6638   location_t loc = gimple_location (stmt);
6639   gimple_seq tbody;
6640 
6641   name = gimple_omp_critical_name (stmt);
6642   if (name)
6643     {
6644       tree decl;
6645 
6646       if (!critical_name_mutexes)
6647 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6648 
6649       tree *n = critical_name_mutexes->get (name);
6650       if (n == NULL)
6651 	{
6652 	  char *new_str;
6653 
6654 	  decl = create_tmp_var_raw (ptr_type_node);
6655 
6656 	  new_str = ACONCAT ((".gomp_critical_user_",
6657 			      IDENTIFIER_POINTER (name), NULL));
6658 	  DECL_NAME (decl) = get_identifier (new_str);
6659 	  TREE_PUBLIC (decl) = 1;
6660 	  TREE_STATIC (decl) = 1;
6661 	  DECL_COMMON (decl) = 1;
6662 	  DECL_ARTIFICIAL (decl) = 1;
6663 	  DECL_IGNORED_P (decl) = 1;
6664 
6665 	  varpool_node::finalize_decl (decl);
6666 
6667 	  critical_name_mutexes->put (name, decl);
6668 	}
6669       else
6670 	decl = *n;
6671 
6672       /* If '#pragma omp critical' is inside offloaded region or
6673 	 inside function marked as offloadable, the symbol must be
6674 	 marked as offloadable too.  */
6675       omp_context *octx;
6676       if (cgraph_node::get (current_function_decl)->offloadable)
6677 	varpool_node::get_create (decl)->offloadable = 1;
6678       else
6679 	for (octx = ctx->outer; octx; octx = octx->outer)
6680 	  if (is_gimple_omp_offloaded (octx->stmt))
6681 	    {
6682 	      varpool_node::get_create (decl)->offloadable = 1;
6683 	      break;
6684 	    }
6685 
6686       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6687       lock = build_call_expr_loc (loc, lock, 1,
6688 				  build_fold_addr_expr_loc (loc, decl));
6689 
6690       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6691       unlock = build_call_expr_loc (loc, unlock, 1,
6692 				build_fold_addr_expr_loc (loc, decl));
6693     }
6694   else
6695     {
6696       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6697       lock = build_call_expr_loc (loc, lock, 0);
6698 
6699       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6700       unlock = build_call_expr_loc (loc, unlock, 0);
6701     }
6702 
6703   push_gimplify_context ();
6704 
6705   block = make_node (BLOCK);
6706   bind = gimple_build_bind (NULL, NULL, block);
6707   gsi_replace (gsi_p, bind, true);
6708   gimple_bind_add_stmt (bind, stmt);
6709 
6710   tbody = gimple_bind_body (bind);
6711   gimplify_and_add (lock, &tbody);
6712   gimple_bind_set_body (bind, tbody);
6713 
6714   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6715   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6716   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6717   gimple_omp_set_body (stmt, NULL);
6718 
6719   tbody = gimple_bind_body (bind);
6720   gimplify_and_add (unlock, &tbody);
6721   gimple_bind_set_body (bind, tbody);
6722 
6723   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6724 
6725   pop_gimplify_context (bind);
6726   gimple_bind_append_vars (bind, ctx->block_vars);
6727   BLOCK_VARS (block) = gimple_bind_vars (bind);
6728 }
6729 
6730 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
6731    for a lastprivate clause.  Given a loop control predicate of (V
6732    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
6733    is appended to *DLIST, iterator initialization is appended to
6734    *BODY_P.  */
6735 
6736 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,struct omp_context * ctx)6737 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6738 			   gimple_seq *dlist, struct omp_context *ctx)
6739 {
6740   tree clauses, cond, vinit;
6741   enum tree_code cond_code;
6742   gimple_seq stmts;
6743 
6744   cond_code = fd->loop.cond_code;
6745   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6746 
6747   /* When possible, use a strict equality expression.  This can let VRP
6748      type optimizations deduce the value and remove a copy.  */
6749   if (tree_fits_shwi_p (fd->loop.step))
6750     {
6751       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6752       if (step == 1 || step == -1)
6753 	cond_code = EQ_EXPR;
6754     }
6755 
6756   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6757       || gimple_omp_for_grid_phony (fd->for_stmt))
6758     cond = omp_grid_lastprivate_predicate (fd);
6759   else
6760     {
6761       tree n2 = fd->loop.n2;
6762       if (fd->collapse > 1
6763 	  && TREE_CODE (n2) != INTEGER_CST
6764 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
6765 	{
6766 	  struct omp_context *taskreg_ctx = NULL;
6767 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6768 	    {
6769 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6770 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6771 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6772 		{
6773 		  if (gimple_omp_for_combined_into_p (gfor))
6774 		    {
6775 		      gcc_assert (ctx->outer->outer
6776 				  && is_parallel_ctx (ctx->outer->outer));
6777 		      taskreg_ctx = ctx->outer->outer;
6778 		    }
6779 		  else
6780 		    {
6781 		      struct omp_for_data outer_fd;
6782 		      omp_extract_for_data (gfor, &outer_fd, NULL);
6783 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6784 		    }
6785 		}
6786 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6787 		taskreg_ctx = ctx->outer->outer;
6788 	    }
6789 	  else if (is_taskreg_ctx (ctx->outer))
6790 	    taskreg_ctx = ctx->outer;
6791 	  if (taskreg_ctx)
6792 	    {
6793 	      int i;
6794 	      tree taskreg_clauses
6795 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6796 	      tree innerc = omp_find_clause (taskreg_clauses,
6797 					     OMP_CLAUSE__LOOPTEMP_);
6798 	      gcc_assert (innerc);
6799 	      for (i = 0; i < fd->collapse; i++)
6800 		{
6801 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6802 					    OMP_CLAUSE__LOOPTEMP_);
6803 		  gcc_assert (innerc);
6804 		}
6805 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6806 					OMP_CLAUSE__LOOPTEMP_);
6807 	      if (innerc)
6808 		n2 = fold_convert (TREE_TYPE (n2),
6809 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
6810 						taskreg_ctx));
6811 	    }
6812 	}
6813       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6814     }
6815 
6816   clauses = gimple_omp_for_clauses (fd->for_stmt);
6817   stmts = NULL;
6818   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6819   if (!gimple_seq_empty_p (stmts))
6820     {
6821       gimple_seq_add_seq (&stmts, *dlist);
6822       *dlist = stmts;
6823 
6824       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
6825       vinit = fd->loop.n1;
6826       if (cond_code == EQ_EXPR
6827 	  && tree_fits_shwi_p (fd->loop.n2)
6828 	  && ! integer_zerop (fd->loop.n2))
6829 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6830       else
6831 	vinit = unshare_expr (vinit);
6832 
6833       /* Initialize the iterator variable, so that threads that don't execute
6834 	 any iterations don't execute the lastprivate clauses by accident.  */
6835       gimplify_assign (fd->loop.v, vinit, body_p);
6836     }
6837 }
6838 
6839 
6840 /* Lower code for an OMP loop directive.  */
6841 
6842 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)6843 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6844 {
6845   tree *rhs_p, block;
6846   struct omp_for_data fd, *fdp = NULL;
6847   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6848   gbind *new_stmt;
6849   gimple_seq omp_for_body, body, dlist;
6850   gimple_seq oacc_head = NULL, oacc_tail = NULL;
6851   size_t i;
6852 
6853   push_gimplify_context ();
6854 
6855   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6856 
6857   block = make_node (BLOCK);
6858   new_stmt = gimple_build_bind (NULL, NULL, block);
6859   /* Replace at gsi right away, so that 'stmt' is no member
6860      of a sequence anymore as we're going to add to a different
6861      one below.  */
6862   gsi_replace (gsi_p, new_stmt, true);
6863 
6864   /* Move declaration of temporaries in the loop body before we make
6865      it go away.  */
6866   omp_for_body = gimple_omp_body (stmt);
6867   if (!gimple_seq_empty_p (omp_for_body)
6868       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6869     {
6870       gbind *inner_bind
6871 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6872       tree vars = gimple_bind_vars (inner_bind);
6873       gimple_bind_append_vars (new_stmt, vars);
6874       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6875 	 keep them on the inner_bind and it's block.  */
6876       gimple_bind_set_vars (inner_bind, NULL_TREE);
6877       if (gimple_bind_block (inner_bind))
6878 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6879     }
6880 
6881   if (gimple_omp_for_combined_into_p (stmt))
6882     {
6883       omp_extract_for_data (stmt, &fd, NULL);
6884       fdp = &fd;
6885 
6886       /* We need two temporaries with fd.loop.v type (istart/iend)
6887 	 and then (fd.collapse - 1) temporaries with the same
6888 	 type for count2 ... countN-1 vars if not constant.  */
6889       size_t count = 2;
6890       tree type = fd.iter_type;
6891       if (fd.collapse > 1
6892 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6893 	count += fd.collapse - 1;
6894       bool taskreg_for
6895 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6896 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6897       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6898       tree simtc = NULL;
6899       tree clauses = *pc;
6900       if (taskreg_for)
6901 	outerc
6902 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6903 			     OMP_CLAUSE__LOOPTEMP_);
6904       if (ctx->simt_stmt)
6905 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6906 				 OMP_CLAUSE__LOOPTEMP_);
6907       for (i = 0; i < count; i++)
6908 	{
6909 	  tree temp;
6910 	  if (taskreg_for)
6911 	    {
6912 	      gcc_assert (outerc);
6913 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6914 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6915 					OMP_CLAUSE__LOOPTEMP_);
6916 	    }
6917 	  else
6918 	    {
6919 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
6920 		 clause, another without, make sure they have the same
6921 		 decls in _looptemp_ clauses, because the outer stmt
6922 		 they are combined into will look up just one inner_stmt.  */
6923 	      if (ctx->simt_stmt)
6924 		temp = OMP_CLAUSE_DECL (simtc);
6925 	      else
6926 		temp = create_tmp_var (type);
6927 	      insert_decl_map (&ctx->outer->cb, temp, temp);
6928 	    }
6929 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6930 	  OMP_CLAUSE_DECL (*pc) = temp;
6931 	  pc = &OMP_CLAUSE_CHAIN (*pc);
6932 	  if (ctx->simt_stmt)
6933 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6934 				     OMP_CLAUSE__LOOPTEMP_);
6935 	}
6936       *pc = clauses;
6937     }
6938 
6939   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
6940   dlist = NULL;
6941   body = NULL;
6942   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6943 			   fdp);
6944   gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6945 
6946   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6947 
6948   /* Lower the header expressions.  At this point, we can assume that
6949      the header is of the form:
6950 
6951      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6952 
6953      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6954      using the .omp_data_s mapping, if needed.  */
6955   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6956     {
6957       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6958       if (!is_gimple_min_invariant (*rhs_p))
6959 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6960       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6961 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6962 
6963       rhs_p = gimple_omp_for_final_ptr (stmt, i);
6964       if (!is_gimple_min_invariant (*rhs_p))
6965 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6966       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6967 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6968 
6969       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6970       if (!is_gimple_min_invariant (*rhs_p))
6971 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6972     }
6973 
6974   /* Once lowered, extract the bounds and clauses.  */
6975   omp_extract_for_data (stmt, &fd, NULL);
6976 
6977   if (is_gimple_omp_oacc (ctx->stmt)
6978       && !ctx_in_oacc_kernels_region (ctx))
6979     lower_oacc_head_tail (gimple_location (stmt),
6980 			  gimple_omp_for_clauses (stmt),
6981 			  &oacc_head, &oacc_tail, ctx);
6982 
6983   /* Add OpenACC partitioning and reduction markers just before the loop.  */
6984   if (oacc_head)
6985     gimple_seq_add_seq (&body, oacc_head);
6986 
6987   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6988 
6989   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6990     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6991       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6992 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6993 	{
6994 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6995 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6996 	    OMP_CLAUSE_LINEAR_STEP (c)
6997 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6998 						ctx);
6999 	}
7000 
7001   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
7002 		     && gimple_omp_for_grid_phony (stmt));
7003   if (!phony_loop)
7004     gimple_seq_add_stmt (&body, stmt);
7005   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
7006 
7007   if (!phony_loop)
7008     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
7009 							   fd.loop.v));
7010 
7011   /* After the loop, add exit clauses.  */
7012   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
7013 
7014   if (ctx->cancellable)
7015     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
7016 
7017   gimple_seq_add_seq (&body, dlist);
7018 
7019   body = maybe_catch_exception (body);
7020 
7021   if (!phony_loop)
7022     {
7023       /* Region exit marker goes at the end of the loop body.  */
7024       gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
7025       maybe_add_implicit_barrier_cancel (ctx, &body);
7026     }
7027 
7028   /* Add OpenACC joining and reduction markers just after the loop.  */
7029   if (oacc_tail)
7030     gimple_seq_add_seq (&body, oacc_tail);
7031 
7032   pop_gimplify_context (new_stmt);
7033 
7034   gimple_bind_append_vars (new_stmt, ctx->block_vars);
7035   maybe_remove_omp_member_access_dummy_vars (new_stmt);
7036   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7037   if (BLOCK_VARS (block))
7038     TREE_USED (block) = 1;
7039 
7040   gimple_bind_set_body (new_stmt, body);
7041   gimple_omp_set_body (stmt, NULL);
7042   gimple_omp_for_set_pre_body (stmt, NULL);
7043 }
7044 
7045 /* Callback for walk_stmts.  Check if the current statement only contains
7046    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
7047 
7048 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)7049 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7050     			 bool *handled_ops_p,
7051     			 struct walk_stmt_info *wi)
7052 {
7053   int *info = (int *) wi->info;
7054   gimple *stmt = gsi_stmt (*gsi_p);
7055 
7056   *handled_ops_p = true;
7057   switch (gimple_code (stmt))
7058     {
7059     WALK_SUBSTMTS;
7060 
7061     case GIMPLE_DEBUG:
7062       break;
7063     case GIMPLE_OMP_FOR:
7064     case GIMPLE_OMP_SECTIONS:
7065       *info = *info == 0 ? 1 : -1;
7066       break;
7067     default:
7068       *info = -1;
7069       break;
7070     }
7071   return NULL;
7072 }
7073 
7074 struct omp_taskcopy_context
7075 {
7076   /* This field must be at the beginning, as we do "inheritance": Some
7077      callback functions for tree-inline.c (e.g., omp_copy_decl)
7078      receive a copy_body_data pointer that is up-casted to an
7079      omp_context pointer.  */
7080   copy_body_data cb;
7081   omp_context *ctx;
7082 };
7083 
7084 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)7085 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7086 {
7087   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7088 
7089   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7090     return create_tmp_var (TREE_TYPE (var));
7091 
7092   return var;
7093 }
7094 
7095 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)7096 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7097 {
7098   tree name, new_fields = NULL, type, f;
7099 
7100   type = lang_hooks.types.make_type (RECORD_TYPE);
7101   name = DECL_NAME (TYPE_NAME (orig_type));
7102   name = build_decl (gimple_location (tcctx->ctx->stmt),
7103 		     TYPE_DECL, name, type);
7104   TYPE_NAME (type) = name;
7105 
7106   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7107     {
7108       tree new_f = copy_node (f);
7109       DECL_CONTEXT (new_f) = type;
7110       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7111       TREE_CHAIN (new_f) = new_fields;
7112       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7113       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7114       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7115 		 &tcctx->cb, NULL);
7116       new_fields = new_f;
7117       tcctx->cb.decl_map->put (f, new_f);
7118     }
7119   TYPE_FIELDS (type) = nreverse (new_fields);
7120   layout_type (type);
7121   return type;
7122 }
7123 
7124 /* Create task copyfn.  */
7125 
7126 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)7127 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7128 {
7129   struct function *child_cfun;
7130   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7131   tree record_type, srecord_type, bind, list;
7132   bool record_needs_remap = false, srecord_needs_remap = false;
7133   splay_tree_node n;
7134   struct omp_taskcopy_context tcctx;
7135   location_t loc = gimple_location (task_stmt);
7136   size_t looptempno = 0;
7137 
7138   child_fn = gimple_omp_task_copy_fn (task_stmt);
7139   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7140   gcc_assert (child_cfun->cfg == NULL);
7141   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7142 
7143   /* Reset DECL_CONTEXT on function arguments.  */
7144   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7145     DECL_CONTEXT (t) = child_fn;
7146 
7147   /* Populate the function.  */
7148   push_gimplify_context ();
7149   push_cfun (child_cfun);
7150 
7151   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7152   TREE_SIDE_EFFECTS (bind) = 1;
7153   list = NULL;
7154   DECL_SAVED_TREE (child_fn) = bind;
7155   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7156 
7157   /* Remap src and dst argument types if needed.  */
7158   record_type = ctx->record_type;
7159   srecord_type = ctx->srecord_type;
7160   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7161     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7162       {
7163 	record_needs_remap = true;
7164 	break;
7165       }
7166   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7167     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7168       {
7169 	srecord_needs_remap = true;
7170 	break;
7171       }
7172 
7173   if (record_needs_remap || srecord_needs_remap)
7174     {
7175       memset (&tcctx, '\0', sizeof (tcctx));
7176       tcctx.cb.src_fn = ctx->cb.src_fn;
7177       tcctx.cb.dst_fn = child_fn;
7178       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7179       gcc_checking_assert (tcctx.cb.src_node);
7180       tcctx.cb.dst_node = tcctx.cb.src_node;
7181       tcctx.cb.src_cfun = ctx->cb.src_cfun;
7182       tcctx.cb.copy_decl = task_copyfn_copy_decl;
7183       tcctx.cb.eh_lp_nr = 0;
7184       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7185       tcctx.cb.decl_map = new hash_map<tree, tree>;
7186       tcctx.ctx = ctx;
7187 
7188       if (record_needs_remap)
7189 	record_type = task_copyfn_remap_type (&tcctx, record_type);
7190       if (srecord_needs_remap)
7191 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7192     }
7193   else
7194     tcctx.cb.decl_map = NULL;
7195 
7196   arg = DECL_ARGUMENTS (child_fn);
7197   TREE_TYPE (arg) = build_pointer_type (record_type);
7198   sarg = DECL_CHAIN (arg);
7199   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7200 
7201   /* First pass: initialize temporaries used in record_type and srecord_type
7202      sizes and field offsets.  */
7203   if (tcctx.cb.decl_map)
7204     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7205       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7206 	{
7207 	  tree *p;
7208 
7209 	  decl = OMP_CLAUSE_DECL (c);
7210 	  p = tcctx.cb.decl_map->get (decl);
7211 	  if (p == NULL)
7212 	    continue;
7213 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7214 	  sf = (tree) n->value;
7215 	  sf = *tcctx.cb.decl_map->get (sf);
7216 	  src = build_simple_mem_ref_loc (loc, sarg);
7217 	  src = omp_build_component_ref (src, sf);
7218 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7219 	  append_to_statement_list (t, &list);
7220 	}
7221 
7222   /* Second pass: copy shared var pointers and copy construct non-VLA
7223      firstprivate vars.  */
7224   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7225     switch (OMP_CLAUSE_CODE (c))
7226       {
7227 	splay_tree_key key;
7228       case OMP_CLAUSE_SHARED:
7229 	decl = OMP_CLAUSE_DECL (c);
7230 	key = (splay_tree_key) decl;
7231 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7232 	  key = (splay_tree_key) &DECL_UID (decl);
7233 	n = splay_tree_lookup (ctx->field_map, key);
7234 	if (n == NULL)
7235 	  break;
7236 	f = (tree) n->value;
7237 	if (tcctx.cb.decl_map)
7238 	  f = *tcctx.cb.decl_map->get (f);
7239 	n = splay_tree_lookup (ctx->sfield_map, key);
7240 	sf = (tree) n->value;
7241 	if (tcctx.cb.decl_map)
7242 	  sf = *tcctx.cb.decl_map->get (sf);
7243 	src = build_simple_mem_ref_loc (loc, sarg);
7244 	src = omp_build_component_ref (src, sf);
7245 	dst = build_simple_mem_ref_loc (loc, arg);
7246 	dst = omp_build_component_ref (dst, f);
7247 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7248 	append_to_statement_list (t, &list);
7249 	break;
7250       case OMP_CLAUSE__LOOPTEMP_:
7251 	/* Fields for first two _looptemp_ clauses are initialized by
7252 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
7253         if (looptempno < 2)
7254 	  {
7255 	    looptempno++;
7256 	    break;
7257 	  }
7258 	/* FALLTHRU */
7259       case OMP_CLAUSE_FIRSTPRIVATE:
7260 	decl = OMP_CLAUSE_DECL (c);
7261 	if (is_variable_sized (decl))
7262 	  break;
7263 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7264 	if (n == NULL)
7265 	  break;
7266 	f = (tree) n->value;
7267 	if (tcctx.cb.decl_map)
7268 	  f = *tcctx.cb.decl_map->get (f);
7269 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7270 	if (n != NULL)
7271 	  {
7272 	    sf = (tree) n->value;
7273 	    if (tcctx.cb.decl_map)
7274 	      sf = *tcctx.cb.decl_map->get (sf);
7275 	    src = build_simple_mem_ref_loc (loc, sarg);
7276 	    src = omp_build_component_ref (src, sf);
7277 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7278 	      src = build_simple_mem_ref_loc (loc, src);
7279 	  }
7280 	else
7281 	  src = decl;
7282 	dst = build_simple_mem_ref_loc (loc, arg);
7283 	dst = omp_build_component_ref (dst, f);
7284 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7285 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7286 	else
7287 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7288 	append_to_statement_list (t, &list);
7289 	break;
7290       case OMP_CLAUSE_PRIVATE:
7291 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7292 	  break;
7293 	decl = OMP_CLAUSE_DECL (c);
7294 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7295 	f = (tree) n->value;
7296 	if (tcctx.cb.decl_map)
7297 	  f = *tcctx.cb.decl_map->get (f);
7298 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7299 	if (n != NULL)
7300 	  {
7301 	    sf = (tree) n->value;
7302 	    if (tcctx.cb.decl_map)
7303 	      sf = *tcctx.cb.decl_map->get (sf);
7304 	    src = build_simple_mem_ref_loc (loc, sarg);
7305 	    src = omp_build_component_ref (src, sf);
7306 	    if (use_pointer_for_field (decl, NULL))
7307 	      src = build_simple_mem_ref_loc (loc, src);
7308 	  }
7309 	else
7310 	  src = decl;
7311 	dst = build_simple_mem_ref_loc (loc, arg);
7312 	dst = omp_build_component_ref (dst, f);
7313 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7314 	append_to_statement_list (t, &list);
7315 	break;
7316       default:
7317 	break;
7318       }
7319 
7320   /* Last pass: handle VLA firstprivates.  */
7321   if (tcctx.cb.decl_map)
7322     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7323       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7324 	{
7325 	  tree ind, ptr, df;
7326 
7327 	  decl = OMP_CLAUSE_DECL (c);
7328 	  if (!is_variable_sized (decl))
7329 	    continue;
7330 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7331 	  if (n == NULL)
7332 	    continue;
7333 	  f = (tree) n->value;
7334 	  f = *tcctx.cb.decl_map->get (f);
7335 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7336 	  ind = DECL_VALUE_EXPR (decl);
7337 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7338 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7339 	  n = splay_tree_lookup (ctx->sfield_map,
7340 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7341 	  sf = (tree) n->value;
7342 	  sf = *tcctx.cb.decl_map->get (sf);
7343 	  src = build_simple_mem_ref_loc (loc, sarg);
7344 	  src = omp_build_component_ref (src, sf);
7345 	  src = build_simple_mem_ref_loc (loc, src);
7346 	  dst = build_simple_mem_ref_loc (loc, arg);
7347 	  dst = omp_build_component_ref (dst, f);
7348 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7349 	  append_to_statement_list (t, &list);
7350 	  n = splay_tree_lookup (ctx->field_map,
7351 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7352 	  df = (tree) n->value;
7353 	  df = *tcctx.cb.decl_map->get (df);
7354 	  ptr = build_simple_mem_ref_loc (loc, arg);
7355 	  ptr = omp_build_component_ref (ptr, df);
7356 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7357 		      build_fold_addr_expr_loc (loc, dst));
7358 	  append_to_statement_list (t, &list);
7359 	}
7360 
7361   t = build1 (RETURN_EXPR, void_type_node, NULL);
7362   append_to_statement_list (t, &list);
7363 
7364   if (tcctx.cb.decl_map)
7365     delete tcctx.cb.decl_map;
7366   pop_gimplify_context (NULL);
7367   BIND_EXPR_BODY (bind) = list;
7368   pop_cfun ();
7369 }
7370 
7371 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)7372 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7373 {
7374   tree c, clauses;
7375   gimple *g;
7376   size_t n_in = 0, n_out = 0, idx = 2, i;
7377 
7378   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7379   gcc_assert (clauses);
7380   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7381     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7382       switch (OMP_CLAUSE_DEPEND_KIND (c))
7383 	{
7384 	case OMP_CLAUSE_DEPEND_IN:
7385 	  n_in++;
7386 	  break;
7387 	case OMP_CLAUSE_DEPEND_OUT:
7388 	case OMP_CLAUSE_DEPEND_INOUT:
7389 	  n_out++;
7390 	  break;
7391 	case OMP_CLAUSE_DEPEND_SOURCE:
7392 	case OMP_CLAUSE_DEPEND_SINK:
7393 	  /* FALLTHRU */
7394 	default:
7395 	  gcc_unreachable ();
7396 	}
7397   tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7398   tree array = create_tmp_var (type);
7399   TREE_ADDRESSABLE (array) = 1;
7400   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7401 		   NULL_TREE);
7402   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7403   gimple_seq_add_stmt (iseq, g);
7404   r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7405 	      NULL_TREE);
7406   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7407   gimple_seq_add_stmt (iseq, g);
7408   for (i = 0; i < 2; i++)
7409     {
7410       if ((i ? n_in : n_out) == 0)
7411 	continue;
7412       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7413 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7414 	    && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7415 	  {
7416 	    tree t = OMP_CLAUSE_DECL (c);
7417 	    t = fold_convert (ptr_type_node, t);
7418 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7419 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7420 			NULL_TREE, NULL_TREE);
7421 	    g = gimple_build_assign (r, t);
7422 	    gimple_seq_add_stmt (iseq, g);
7423 	  }
7424     }
7425   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7426   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7427   OMP_CLAUSE_CHAIN (c) = *pclauses;
7428   *pclauses = c;
7429   tree clobber = build_constructor (type, NULL);
7430   TREE_THIS_VOLATILE (clobber) = 1;
7431   g = gimple_build_assign (array, clobber);
7432   gimple_seq_add_stmt (oseq, g);
7433 }
7434 
7435 /* Lower the OpenMP parallel or task directive in the current statement
7436    in GSI_P.  CTX holds context information for the directive.  */
7437 
7438 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)7439 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7440 {
7441   tree clauses;
7442   tree child_fn, t;
7443   gimple *stmt = gsi_stmt (*gsi_p);
7444   gbind *par_bind, *bind, *dep_bind = NULL;
7445   gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7446   location_t loc = gimple_location (stmt);
7447 
7448   clauses = gimple_omp_taskreg_clauses (stmt);
7449   par_bind
7450     = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7451   par_body = gimple_bind_body (par_bind);
7452   child_fn = ctx->cb.dst_fn;
7453   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7454       && !gimple_omp_parallel_combined_p (stmt))
7455     {
7456       struct walk_stmt_info wi;
7457       int ws_num = 0;
7458 
7459       memset (&wi, 0, sizeof (wi));
7460       wi.info = &ws_num;
7461       wi.val_only = true;
7462       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7463       if (ws_num == 1)
7464 	gimple_omp_parallel_set_combined_p (stmt, true);
7465     }
7466   gimple_seq dep_ilist = NULL;
7467   gimple_seq dep_olist = NULL;
7468   if (gimple_code (stmt) == GIMPLE_OMP_TASK
7469       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7470     {
7471       push_gimplify_context ();
7472       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7473       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7474 			    &dep_ilist, &dep_olist);
7475     }
7476 
7477   if (ctx->srecord_type)
7478     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7479 
7480   push_gimplify_context ();
7481 
7482   par_olist = NULL;
7483   par_ilist = NULL;
7484   par_rlist = NULL;
7485   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7486     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7487   if (phony_construct && ctx->record_type)
7488     {
7489       gcc_checking_assert (!ctx->receiver_decl);
7490       ctx->receiver_decl = create_tmp_var
7491 	(build_reference_type (ctx->record_type), ".omp_rec");
7492     }
7493   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7494   lower_omp (&par_body, ctx);
7495   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7496     lower_reduction_clauses (clauses, &par_rlist, ctx);
7497 
7498   /* Declare all the variables created by mapping and the variables
7499      declared in the scope of the parallel body.  */
7500   record_vars_into (ctx->block_vars, child_fn);
7501   maybe_remove_omp_member_access_dummy_vars (par_bind);
7502   record_vars_into (gimple_bind_vars (par_bind), child_fn);
7503 
7504   if (ctx->record_type)
7505     {
7506       ctx->sender_decl
7507 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7508 			  : ctx->record_type, ".omp_data_o");
7509       DECL_NAMELESS (ctx->sender_decl) = 1;
7510       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7511       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7512     }
7513 
7514   olist = NULL;
7515   ilist = NULL;
7516   lower_send_clauses (clauses, &ilist, &olist, ctx);
7517   lower_send_shared_vars (&ilist, &olist, ctx);
7518 
7519   if (ctx->record_type)
7520     {
7521       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7522       TREE_THIS_VOLATILE (clobber) = 1;
7523       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7524 							clobber));
7525     }
7526 
7527   /* Once all the expansions are done, sequence all the different
7528      fragments inside gimple_omp_body.  */
7529 
7530   new_body = NULL;
7531 
7532   if (ctx->record_type)
7533     {
7534       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7535       /* fixup_child_record_type might have changed receiver_decl's type.  */
7536       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7537       gimple_seq_add_stmt (&new_body,
7538 	  		   gimple_build_assign (ctx->receiver_decl, t));
7539     }
7540 
7541   gimple_seq_add_seq (&new_body, par_ilist);
7542   gimple_seq_add_seq (&new_body, par_body);
7543   gimple_seq_add_seq (&new_body, par_rlist);
7544   if (ctx->cancellable)
7545     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7546   gimple_seq_add_seq (&new_body, par_olist);
7547   new_body = maybe_catch_exception (new_body);
7548   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7549     gimple_seq_add_stmt (&new_body,
7550 			 gimple_build_omp_continue (integer_zero_node,
7551 						    integer_zero_node));
7552   if (!phony_construct)
7553     {
7554       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7555       gimple_omp_set_body (stmt, new_body);
7556     }
7557 
7558   bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7559   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7560   gimple_bind_add_seq (bind, ilist);
7561   if (!phony_construct)
7562     gimple_bind_add_stmt (bind, stmt);
7563   else
7564     gimple_bind_add_seq (bind, new_body);
7565   gimple_bind_add_seq (bind, olist);
7566 
7567   pop_gimplify_context (NULL);
7568 
7569   if (dep_bind)
7570     {
7571       gimple_bind_add_seq (dep_bind, dep_ilist);
7572       gimple_bind_add_stmt (dep_bind, bind);
7573       gimple_bind_add_seq (dep_bind, dep_olist);
7574       pop_gimplify_context (dep_bind);
7575     }
7576 }
7577 
7578 /* Lower the GIMPLE_OMP_TARGET in the current statement
7579    in GSI_P.  CTX holds context information for the directive.  */
7580 
7581 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)7582 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7583 {
7584   tree clauses;
7585   tree child_fn, t, c;
7586   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7587   gbind *tgt_bind, *bind, *dep_bind = NULL;
7588   gimple_seq tgt_body, olist, ilist, fplist, new_body;
7589   location_t loc = gimple_location (stmt);
7590   bool offloaded, data_region;
7591   unsigned int map_cnt = 0;
7592 
7593   offloaded = is_gimple_omp_offloaded (stmt);
7594   switch (gimple_omp_target_kind (stmt))
7595     {
7596     case GF_OMP_TARGET_KIND_REGION:
7597     case GF_OMP_TARGET_KIND_UPDATE:
7598     case GF_OMP_TARGET_KIND_ENTER_DATA:
7599     case GF_OMP_TARGET_KIND_EXIT_DATA:
7600     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7601     case GF_OMP_TARGET_KIND_OACC_KERNELS:
7602     case GF_OMP_TARGET_KIND_OACC_UPDATE:
7603     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7604     case GF_OMP_TARGET_KIND_OACC_DECLARE:
7605       data_region = false;
7606       break;
7607     case GF_OMP_TARGET_KIND_DATA:
7608     case GF_OMP_TARGET_KIND_OACC_DATA:
7609     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7610       data_region = true;
7611       break;
7612     default:
7613       gcc_unreachable ();
7614     }
7615 
7616   clauses = gimple_omp_target_clauses (stmt);
7617 
7618   gimple_seq dep_ilist = NULL;
7619   gimple_seq dep_olist = NULL;
7620   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7621     {
7622       push_gimplify_context ();
7623       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7624       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7625 			    &dep_ilist, &dep_olist);
7626     }
7627 
7628   tgt_bind = NULL;
7629   tgt_body = NULL;
7630   if (offloaded)
7631     {
7632       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7633       tgt_body = gimple_bind_body (tgt_bind);
7634     }
7635   else if (data_region)
7636     tgt_body = gimple_omp_body (stmt);
7637   child_fn = ctx->cb.dst_fn;
7638 
7639   push_gimplify_context ();
7640   fplist = NULL;
7641 
7642   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7643     switch (OMP_CLAUSE_CODE (c))
7644       {
7645 	tree var, x;
7646 
7647       default:
7648 	break;
7649       case OMP_CLAUSE_MAP:
7650 #if CHECKING_P
7651 	/* First check what we're prepared to handle in the following.  */
7652 	switch (OMP_CLAUSE_MAP_KIND (c))
7653 	  {
7654 	  case GOMP_MAP_ALLOC:
7655 	  case GOMP_MAP_TO:
7656 	  case GOMP_MAP_FROM:
7657 	  case GOMP_MAP_TOFROM:
7658 	  case GOMP_MAP_POINTER:
7659 	  case GOMP_MAP_TO_PSET:
7660 	  case GOMP_MAP_DELETE:
7661 	  case GOMP_MAP_RELEASE:
7662 	  case GOMP_MAP_ALWAYS_TO:
7663 	  case GOMP_MAP_ALWAYS_FROM:
7664 	  case GOMP_MAP_ALWAYS_TOFROM:
7665 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
7666 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7667 	  case GOMP_MAP_STRUCT:
7668 	  case GOMP_MAP_ALWAYS_POINTER:
7669 	    break;
7670 	  case GOMP_MAP_FORCE_ALLOC:
7671 	  case GOMP_MAP_FORCE_TO:
7672 	  case GOMP_MAP_FORCE_FROM:
7673 	  case GOMP_MAP_FORCE_TOFROM:
7674 	  case GOMP_MAP_FORCE_PRESENT:
7675 	  case GOMP_MAP_FORCE_DEVICEPTR:
7676 	  case GOMP_MAP_DEVICE_RESIDENT:
7677 	  case GOMP_MAP_LINK:
7678 	    gcc_assert (is_gimple_omp_oacc (stmt));
7679 	    break;
7680 	  default:
7681 	    gcc_unreachable ();
7682 	  }
7683 #endif
7684 	  /* FALLTHRU */
7685       case OMP_CLAUSE_TO:
7686       case OMP_CLAUSE_FROM:
7687       oacc_firstprivate:
7688 	var = OMP_CLAUSE_DECL (c);
7689 	if (!DECL_P (var))
7690 	  {
7691 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7692 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7693 		    && (OMP_CLAUSE_MAP_KIND (c)
7694 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
7695 	      map_cnt++;
7696 	    continue;
7697 	  }
7698 
7699 	if (DECL_SIZE (var)
7700 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7701 	  {
7702 	    tree var2 = DECL_VALUE_EXPR (var);
7703 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7704 	    var2 = TREE_OPERAND (var2, 0);
7705 	    gcc_assert (DECL_P (var2));
7706 	    var = var2;
7707 	  }
7708 
7709 	if (offloaded
7710 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7711 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7712 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7713 	  {
7714 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7715 	      {
7716 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7717 		    && varpool_node::get_create (var)->offloadable)
7718 		  continue;
7719 
7720 		tree type = build_pointer_type (TREE_TYPE (var));
7721 		tree new_var = lookup_decl (var, ctx);
7722 		x = create_tmp_var_raw (type, get_name (new_var));
7723 		gimple_add_tmp_var (x);
7724 		x = build_simple_mem_ref (x);
7725 		SET_DECL_VALUE_EXPR (new_var, x);
7726 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7727 	      }
7728 	    continue;
7729 	  }
7730 
7731 	if (!maybe_lookup_field (var, ctx))
7732 	  continue;
7733 
7734 	/* Don't remap oacc parallel reduction variables, because the
7735 	   intermediate result must be local to each gang.  */
7736 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7737 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7738 	  {
7739 	    x = build_receiver_ref (var, true, ctx);
7740 	    tree new_var = lookup_decl (var, ctx);
7741 
7742 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7743 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7744 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7745 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7746 	      x = build_simple_mem_ref (x);
7747 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7748 	      {
7749 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7750 		if (omp_is_reference (new_var))
7751 		  {
7752 		    /* Create a local object to hold the instance
7753 		       value.  */
7754 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
7755 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7756 		    tree inst = create_tmp_var (type, id);
7757 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7758 		    x = build_fold_addr_expr (inst);
7759 		  }
7760 		gimplify_assign (new_var, x, &fplist);
7761 	      }
7762 	    else if (DECL_P (new_var))
7763 	      {
7764 		SET_DECL_VALUE_EXPR (new_var, x);
7765 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7766 	      }
7767 	    else
7768 	      gcc_unreachable ();
7769 	  }
7770 	map_cnt++;
7771 	break;
7772 
7773       case OMP_CLAUSE_FIRSTPRIVATE:
7774 	if (is_oacc_parallel (ctx))
7775 	  goto oacc_firstprivate;
7776 	map_cnt++;
7777 	var = OMP_CLAUSE_DECL (c);
7778 	if (!omp_is_reference (var)
7779 	    && !is_gimple_reg_type (TREE_TYPE (var)))
7780 	  {
7781 	    tree new_var = lookup_decl (var, ctx);
7782 	    if (is_variable_sized (var))
7783 	      {
7784 		tree pvar = DECL_VALUE_EXPR (var);
7785 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7786 		pvar = TREE_OPERAND (pvar, 0);
7787 		gcc_assert (DECL_P (pvar));
7788 		tree new_pvar = lookup_decl (pvar, ctx);
7789 		x = build_fold_indirect_ref (new_pvar);
7790 		TREE_THIS_NOTRAP (x) = 1;
7791 	      }
7792 	    else
7793 	      x = build_receiver_ref (var, true, ctx);
7794 	    SET_DECL_VALUE_EXPR (new_var, x);
7795 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7796 	  }
7797 	break;
7798 
7799       case OMP_CLAUSE_PRIVATE:
7800 	if (is_gimple_omp_oacc (ctx->stmt))
7801 	  break;
7802 	var = OMP_CLAUSE_DECL (c);
7803 	if (is_variable_sized (var))
7804 	  {
7805 	    tree new_var = lookup_decl (var, ctx);
7806 	    tree pvar = DECL_VALUE_EXPR (var);
7807 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7808 	    pvar = TREE_OPERAND (pvar, 0);
7809 	    gcc_assert (DECL_P (pvar));
7810 	    tree new_pvar = lookup_decl (pvar, ctx);
7811 	    x = build_fold_indirect_ref (new_pvar);
7812 	    TREE_THIS_NOTRAP (x) = 1;
7813 	    SET_DECL_VALUE_EXPR (new_var, x);
7814 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7815 	  }
7816 	break;
7817 
7818       case OMP_CLAUSE_USE_DEVICE_PTR:
7819       case OMP_CLAUSE_IS_DEVICE_PTR:
7820 	var = OMP_CLAUSE_DECL (c);
7821 	map_cnt++;
7822 	if (is_variable_sized (var))
7823 	  {
7824 	    tree new_var = lookup_decl (var, ctx);
7825 	    tree pvar = DECL_VALUE_EXPR (var);
7826 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7827 	    pvar = TREE_OPERAND (pvar, 0);
7828 	    gcc_assert (DECL_P (pvar));
7829 	    tree new_pvar = lookup_decl (pvar, ctx);
7830 	    x = build_fold_indirect_ref (new_pvar);
7831 	    TREE_THIS_NOTRAP (x) = 1;
7832 	    SET_DECL_VALUE_EXPR (new_var, x);
7833 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7834 	  }
7835 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7836 	  {
7837 	    tree new_var = lookup_decl (var, ctx);
7838 	    tree type = build_pointer_type (TREE_TYPE (var));
7839 	    x = create_tmp_var_raw (type, get_name (new_var));
7840 	    gimple_add_tmp_var (x);
7841 	    x = build_simple_mem_ref (x);
7842 	    SET_DECL_VALUE_EXPR (new_var, x);
7843 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7844 	  }
7845 	else
7846 	  {
7847 	    tree new_var = lookup_decl (var, ctx);
7848 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7849 	    gimple_add_tmp_var (x);
7850 	    SET_DECL_VALUE_EXPR (new_var, x);
7851 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7852 	  }
7853 	break;
7854       }
7855 
7856   if (offloaded)
7857     {
7858       target_nesting_level++;
7859       lower_omp (&tgt_body, ctx);
7860       target_nesting_level--;
7861     }
7862   else if (data_region)
7863     lower_omp (&tgt_body, ctx);
7864 
7865   if (offloaded)
7866     {
7867       /* Declare all the variables created by mapping and the variables
7868 	 declared in the scope of the target body.  */
7869       record_vars_into (ctx->block_vars, child_fn);
7870       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7871       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7872     }
7873 
7874   olist = NULL;
7875   ilist = NULL;
7876   if (ctx->record_type)
7877     {
7878       ctx->sender_decl
7879 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
7880       DECL_NAMELESS (ctx->sender_decl) = 1;
7881       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7882       t = make_tree_vec (3);
7883       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7884       TREE_VEC_ELT (t, 1)
7885 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7886 			  ".omp_data_sizes");
7887       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7888       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7889       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7890       tree tkind_type = short_unsigned_type_node;
7891       int talign_shift = 8;
7892       TREE_VEC_ELT (t, 2)
7893 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7894 			  ".omp_data_kinds");
7895       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7896       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7897       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7898       gimple_omp_target_set_data_arg (stmt, t);
7899 
7900       vec<constructor_elt, va_gc> *vsize;
7901       vec<constructor_elt, va_gc> *vkind;
7902       vec_alloc (vsize, map_cnt);
7903       vec_alloc (vkind, map_cnt);
7904       unsigned int map_idx = 0;
7905 
7906       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7907 	switch (OMP_CLAUSE_CODE (c))
7908 	  {
7909 	    tree ovar, nc, s, purpose, var, x, type;
7910 	    unsigned int talign;
7911 
7912 	  default:
7913 	    break;
7914 
7915 	  case OMP_CLAUSE_MAP:
7916 	  case OMP_CLAUSE_TO:
7917 	  case OMP_CLAUSE_FROM:
7918 	  oacc_firstprivate_map:
7919 	    nc = c;
7920 	    ovar = OMP_CLAUSE_DECL (c);
7921 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7922 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7923 		    || (OMP_CLAUSE_MAP_KIND (c)
7924 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7925 	      break;
7926 	    if (!DECL_P (ovar))
7927 	      {
7928 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7929 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7930 		  {
7931 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7932 					 == get_base_address (ovar));
7933 		    nc = OMP_CLAUSE_CHAIN (c);
7934 		    ovar = OMP_CLAUSE_DECL (nc);
7935 		  }
7936 		else
7937 		  {
7938 		    tree x = build_sender_ref (ovar, ctx);
7939 		    tree v
7940 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7941 		    gimplify_assign (x, v, &ilist);
7942 		    nc = NULL_TREE;
7943 		  }
7944 	      }
7945 	    else
7946 	      {
7947 		if (DECL_SIZE (ovar)
7948 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7949 		  {
7950 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
7951 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7952 		    ovar2 = TREE_OPERAND (ovar2, 0);
7953 		    gcc_assert (DECL_P (ovar2));
7954 		    ovar = ovar2;
7955 		  }
7956 		if (!maybe_lookup_field (ovar, ctx))
7957 		  continue;
7958 	      }
7959 
7960 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7961 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7962 	      talign = DECL_ALIGN_UNIT (ovar);
7963 	    if (nc)
7964 	      {
7965 		var = lookup_decl_in_outer_ctx (ovar, ctx);
7966 		x = build_sender_ref (ovar, ctx);
7967 
7968 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7969 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7970 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7971 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7972 		  {
7973 		    gcc_assert (offloaded);
7974 		    tree avar
7975 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7976 		    mark_addressable (avar);
7977 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7978 		    talign = DECL_ALIGN_UNIT (avar);
7979 		    avar = build_fold_addr_expr (avar);
7980 		    gimplify_assign (x, avar, &ilist);
7981 		  }
7982 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7983 		  {
7984 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7985 		    if (!omp_is_reference (var))
7986 		      {
7987 			if (is_gimple_reg (var)
7988 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7989 			  TREE_NO_WARNING (var) = 1;
7990 			var = build_fold_addr_expr (var);
7991 		      }
7992 		    else
7993 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7994 		    gimplify_assign (x, var, &ilist);
7995 		  }
7996 		else if (is_gimple_reg (var))
7997 		  {
7998 		    gcc_assert (offloaded);
7999 		    tree avar = create_tmp_var (TREE_TYPE (var));
8000 		    mark_addressable (avar);
8001 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
8002 		    if (GOMP_MAP_COPY_TO_P (map_kind)
8003 			|| map_kind == GOMP_MAP_POINTER
8004 			|| map_kind == GOMP_MAP_TO_PSET
8005 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8006 		      {
8007 			/* If we need to initialize a temporary
8008 			   with VAR because it is not addressable, and
8009 			   the variable hasn't been initialized yet, then
8010 			   we'll get a warning for the store to avar.
8011 			   Don't warn in that case, the mapping might
8012 			   be implicit.  */
8013 			TREE_NO_WARNING (var) = 1;
8014 			gimplify_assign (avar, var, &ilist);
8015 		      }
8016 		    avar = build_fold_addr_expr (avar);
8017 		    gimplify_assign (x, avar, &ilist);
8018 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
8019 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8020 			&& !TYPE_READONLY (TREE_TYPE (var)))
8021 		      {
8022 			x = unshare_expr (x);
8023 			x = build_simple_mem_ref (x);
8024 			gimplify_assign (var, x, &olist);
8025 		      }
8026 		  }
8027 		else
8028 		  {
8029 		    var = build_fold_addr_expr (var);
8030 		    gimplify_assign (x, var, &ilist);
8031 		  }
8032 	      }
8033 	    s = NULL_TREE;
8034 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8035 	      {
8036 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8037 		s = TREE_TYPE (ovar);
8038 		if (TREE_CODE (s) == REFERENCE_TYPE)
8039 		  s = TREE_TYPE (s);
8040 		s = TYPE_SIZE_UNIT (s);
8041 	      }
8042 	    else
8043 	      s = OMP_CLAUSE_SIZE (c);
8044 	    if (s == NULL_TREE)
8045 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8046 	    s = fold_convert (size_type_node, s);
8047 	    purpose = size_int (map_idx++);
8048 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8049 	    if (TREE_CODE (s) != INTEGER_CST)
8050 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8051 
8052 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
8053 	    switch (OMP_CLAUSE_CODE (c))
8054 	      {
8055 	      case OMP_CLAUSE_MAP:
8056 		tkind = OMP_CLAUSE_MAP_KIND (c);
8057 		tkind_zero = tkind;
8058 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8059 		  switch (tkind)
8060 		    {
8061 		    case GOMP_MAP_ALLOC:
8062 		    case GOMP_MAP_TO:
8063 		    case GOMP_MAP_FROM:
8064 		    case GOMP_MAP_TOFROM:
8065 		    case GOMP_MAP_ALWAYS_TO:
8066 		    case GOMP_MAP_ALWAYS_FROM:
8067 		    case GOMP_MAP_ALWAYS_TOFROM:
8068 		    case GOMP_MAP_RELEASE:
8069 		    case GOMP_MAP_FORCE_TO:
8070 		    case GOMP_MAP_FORCE_FROM:
8071 		    case GOMP_MAP_FORCE_TOFROM:
8072 		    case GOMP_MAP_FORCE_PRESENT:
8073 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8074 		      break;
8075 		    case GOMP_MAP_DELETE:
8076 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8077 		    default:
8078 		      break;
8079 		    }
8080 		if (tkind_zero != tkind)
8081 		  {
8082 		    if (integer_zerop (s))
8083 		      tkind = tkind_zero;
8084 		    else if (integer_nonzerop (s))
8085 		      tkind_zero = tkind;
8086 		  }
8087 		break;
8088 	      case OMP_CLAUSE_FIRSTPRIVATE:
8089 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8090 		tkind = GOMP_MAP_TO;
8091 		tkind_zero = tkind;
8092 		break;
8093 	      case OMP_CLAUSE_TO:
8094 		tkind = GOMP_MAP_TO;
8095 		tkind_zero = tkind;
8096 		break;
8097 	      case OMP_CLAUSE_FROM:
8098 		tkind = GOMP_MAP_FROM;
8099 		tkind_zero = tkind;
8100 		break;
8101 	      default:
8102 		gcc_unreachable ();
8103 	      }
8104 	    gcc_checking_assert (tkind
8105 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8106 	    gcc_checking_assert (tkind_zero
8107 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8108 	    talign = ceil_log2 (talign);
8109 	    tkind |= talign << talign_shift;
8110 	    tkind_zero |= talign << talign_shift;
8111 	    gcc_checking_assert (tkind
8112 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8113 	    gcc_checking_assert (tkind_zero
8114 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8115 	    if (tkind == tkind_zero)
8116 	      x = build_int_cstu (tkind_type, tkind);
8117 	    else
8118 	      {
8119 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8120 		x = build3 (COND_EXPR, tkind_type,
8121 			    fold_build2 (EQ_EXPR, boolean_type_node,
8122 					 unshare_expr (s), size_zero_node),
8123 			    build_int_cstu (tkind_type, tkind_zero),
8124 			    build_int_cstu (tkind_type, tkind));
8125 	      }
8126 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8127 	    if (nc && nc != c)
8128 	      c = nc;
8129 	    break;
8130 
8131 	  case OMP_CLAUSE_FIRSTPRIVATE:
8132 	    if (is_oacc_parallel (ctx))
8133 	      goto oacc_firstprivate_map;
8134 	    ovar = OMP_CLAUSE_DECL (c);
8135 	    if (omp_is_reference (ovar))
8136 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8137 	    else
8138 	      talign = DECL_ALIGN_UNIT (ovar);
8139 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8140 	    x = build_sender_ref (ovar, ctx);
8141 	    tkind = GOMP_MAP_FIRSTPRIVATE;
8142 	    type = TREE_TYPE (ovar);
8143 	    if (omp_is_reference (ovar))
8144 	      type = TREE_TYPE (type);
8145 	    if ((INTEGRAL_TYPE_P (type)
8146 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
8147 		|| TREE_CODE (type) == POINTER_TYPE)
8148 	      {
8149 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8150 		tree t = var;
8151 		if (omp_is_reference (var))
8152 		  t = build_simple_mem_ref (var);
8153 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8154 		  TREE_NO_WARNING (var) = 1;
8155 		if (TREE_CODE (type) != POINTER_TYPE)
8156 		  t = fold_convert (pointer_sized_int_node, t);
8157 		t = fold_convert (TREE_TYPE (x), t);
8158 		gimplify_assign (x, t, &ilist);
8159 	      }
8160 	    else if (omp_is_reference (var))
8161 	      gimplify_assign (x, var, &ilist);
8162 	    else if (is_gimple_reg (var))
8163 	      {
8164 		tree avar = create_tmp_var (TREE_TYPE (var));
8165 		mark_addressable (avar);
8166 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8167 		  TREE_NO_WARNING (var) = 1;
8168 		gimplify_assign (avar, var, &ilist);
8169 		avar = build_fold_addr_expr (avar);
8170 		gimplify_assign (x, avar, &ilist);
8171 	      }
8172 	    else
8173 	      {
8174 		var = build_fold_addr_expr (var);
8175 		gimplify_assign (x, var, &ilist);
8176 	      }
8177 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8178 	      s = size_int (0);
8179 	    else if (omp_is_reference (ovar))
8180 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8181 	    else
8182 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8183 	    s = fold_convert (size_type_node, s);
8184 	    purpose = size_int (map_idx++);
8185 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8186 	    if (TREE_CODE (s) != INTEGER_CST)
8187 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8188 
8189 	    gcc_checking_assert (tkind
8190 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8191 	    talign = ceil_log2 (talign);
8192 	    tkind |= talign << talign_shift;
8193 	    gcc_checking_assert (tkind
8194 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8195 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8196 				    build_int_cstu (tkind_type, tkind));
8197 	    break;
8198 
8199 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8200 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8201 	    ovar = OMP_CLAUSE_DECL (c);
8202 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8203 	    x = build_sender_ref (ovar, ctx);
8204 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8205 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
8206 	    else
8207 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8208 	    type = TREE_TYPE (ovar);
8209 	    if (TREE_CODE (type) == ARRAY_TYPE)
8210 	      var = build_fold_addr_expr (var);
8211 	    else
8212 	      {
8213 		if (omp_is_reference (ovar))
8214 		  {
8215 		    type = TREE_TYPE (type);
8216 		    if (TREE_CODE (type) != ARRAY_TYPE)
8217 		      var = build_simple_mem_ref (var);
8218 		    var = fold_convert (TREE_TYPE (x), var);
8219 		  }
8220 	      }
8221 	    gimplify_assign (x, var, &ilist);
8222 	    s = size_int (0);
8223 	    purpose = size_int (map_idx++);
8224 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8225 	    gcc_checking_assert (tkind
8226 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8227 	    gcc_checking_assert (tkind
8228 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8229 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8230 				    build_int_cstu (tkind_type, tkind));
8231 	    break;
8232 	  }
8233 
8234       gcc_assert (map_idx == map_cnt);
8235 
8236       DECL_INITIAL (TREE_VEC_ELT (t, 1))
8237 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8238       DECL_INITIAL (TREE_VEC_ELT (t, 2))
8239 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8240       for (int i = 1; i <= 2; i++)
8241 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8242 	  {
8243 	    gimple_seq initlist = NULL;
8244 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8245 					  TREE_VEC_ELT (t, i)),
8246 				  &initlist, true, NULL_TREE);
8247 	    gimple_seq_add_seq (&ilist, initlist);
8248 
8249 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8250 					      NULL);
8251 	    TREE_THIS_VOLATILE (clobber) = 1;
8252 	    gimple_seq_add_stmt (&olist,
8253 				 gimple_build_assign (TREE_VEC_ELT (t, i),
8254 						      clobber));
8255 	  }
8256 
8257       tree clobber = build_constructor (ctx->record_type, NULL);
8258       TREE_THIS_VOLATILE (clobber) = 1;
8259       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8260 							clobber));
8261     }
8262 
8263   /* Once all the expansions are done, sequence all the different
8264      fragments inside gimple_omp_body.  */
8265 
8266   new_body = NULL;
8267 
8268   if (offloaded
8269       && ctx->record_type)
8270     {
8271       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8272       /* fixup_child_record_type might have changed receiver_decl's type.  */
8273       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8274       gimple_seq_add_stmt (&new_body,
8275 	  		   gimple_build_assign (ctx->receiver_decl, t));
8276     }
8277   gimple_seq_add_seq (&new_body, fplist);
8278 
8279   if (offloaded || data_region)
8280     {
8281       tree prev = NULL_TREE;
8282       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8283 	switch (OMP_CLAUSE_CODE (c))
8284 	  {
8285 	    tree var, x;
8286 	  default:
8287 	    break;
8288 	  case OMP_CLAUSE_FIRSTPRIVATE:
8289 	    if (is_gimple_omp_oacc (ctx->stmt))
8290 	      break;
8291 	    var = OMP_CLAUSE_DECL (c);
8292 	    if (omp_is_reference (var)
8293 		|| is_gimple_reg_type (TREE_TYPE (var)))
8294 	      {
8295 		tree new_var = lookup_decl (var, ctx);
8296 		tree type;
8297 		type = TREE_TYPE (var);
8298 		if (omp_is_reference (var))
8299 		  type = TREE_TYPE (type);
8300 		if ((INTEGRAL_TYPE_P (type)
8301 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
8302 		    || TREE_CODE (type) == POINTER_TYPE)
8303 		  {
8304 		    x = build_receiver_ref (var, false, ctx);
8305 		    if (TREE_CODE (type) != POINTER_TYPE)
8306 		      x = fold_convert (pointer_sized_int_node, x);
8307 		    x = fold_convert (type, x);
8308 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8309 				   fb_rvalue);
8310 		    if (omp_is_reference (var))
8311 		      {
8312 			tree v = create_tmp_var_raw (type, get_name (var));
8313 			gimple_add_tmp_var (v);
8314 			TREE_ADDRESSABLE (v) = 1;
8315 			gimple_seq_add_stmt (&new_body,
8316 					     gimple_build_assign (v, x));
8317 			x = build_fold_addr_expr (v);
8318 		      }
8319 		    gimple_seq_add_stmt (&new_body,
8320 					 gimple_build_assign (new_var, x));
8321 		  }
8322 		else
8323 		  {
8324 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8325 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8326 				   fb_rvalue);
8327 		    gimple_seq_add_stmt (&new_body,
8328 					 gimple_build_assign (new_var, x));
8329 		  }
8330 	      }
8331 	    else if (is_variable_sized (var))
8332 	      {
8333 		tree pvar = DECL_VALUE_EXPR (var);
8334 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8335 		pvar = TREE_OPERAND (pvar, 0);
8336 		gcc_assert (DECL_P (pvar));
8337 		tree new_var = lookup_decl (pvar, ctx);
8338 		x = build_receiver_ref (var, false, ctx);
8339 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8340 		gimple_seq_add_stmt (&new_body,
8341 				     gimple_build_assign (new_var, x));
8342 	      }
8343 	    break;
8344 	  case OMP_CLAUSE_PRIVATE:
8345 	    if (is_gimple_omp_oacc (ctx->stmt))
8346 	      break;
8347 	    var = OMP_CLAUSE_DECL (c);
8348 	    if (omp_is_reference (var))
8349 	      {
8350 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8351 		tree new_var = lookup_decl (var, ctx);
8352 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8353 		if (TREE_CONSTANT (x))
8354 		  {
8355 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8356 					    get_name (var));
8357 		    gimple_add_tmp_var (x);
8358 		    TREE_ADDRESSABLE (x) = 1;
8359 		    x = build_fold_addr_expr_loc (clause_loc, x);
8360 		  }
8361 		else
8362 		  break;
8363 
8364 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8365 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8366 		gimple_seq_add_stmt (&new_body,
8367 				     gimple_build_assign (new_var, x));
8368 	      }
8369 	    break;
8370 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8371 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8372 	    var = OMP_CLAUSE_DECL (c);
8373 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8374 	      x = build_sender_ref (var, ctx);
8375 	    else
8376 	      x = build_receiver_ref (var, false, ctx);
8377 	    if (is_variable_sized (var))
8378 	      {
8379 		tree pvar = DECL_VALUE_EXPR (var);
8380 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8381 		pvar = TREE_OPERAND (pvar, 0);
8382 		gcc_assert (DECL_P (pvar));
8383 		tree new_var = lookup_decl (pvar, ctx);
8384 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8385 		gimple_seq_add_stmt (&new_body,
8386 				     gimple_build_assign (new_var, x));
8387 	      }
8388 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8389 	      {
8390 		tree new_var = lookup_decl (var, ctx);
8391 		new_var = DECL_VALUE_EXPR (new_var);
8392 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
8393 		new_var = TREE_OPERAND (new_var, 0);
8394 		gcc_assert (DECL_P (new_var));
8395 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8396 		gimple_seq_add_stmt (&new_body,
8397 				     gimple_build_assign (new_var, x));
8398 	      }
8399 	    else
8400 	      {
8401 		tree type = TREE_TYPE (var);
8402 		tree new_var = lookup_decl (var, ctx);
8403 		if (omp_is_reference (var))
8404 		  {
8405 		    type = TREE_TYPE (type);
8406 		    if (TREE_CODE (type) != ARRAY_TYPE)
8407 		      {
8408 			tree v = create_tmp_var_raw (type, get_name (var));
8409 			gimple_add_tmp_var (v);
8410 			TREE_ADDRESSABLE (v) = 1;
8411 			x = fold_convert (type, x);
8412 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8413 				       fb_rvalue);
8414 			gimple_seq_add_stmt (&new_body,
8415 					     gimple_build_assign (v, x));
8416 			x = build_fold_addr_expr (v);
8417 		      }
8418 		  }
8419 		new_var = DECL_VALUE_EXPR (new_var);
8420 		x = fold_convert (TREE_TYPE (new_var), x);
8421 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8422 		gimple_seq_add_stmt (&new_body,
8423 				     gimple_build_assign (new_var, x));
8424 	      }
8425 	    break;
8426 	  }
8427       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8428 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8429 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
8430 	 or references to VLAs.  */
8431       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8432 	switch (OMP_CLAUSE_CODE (c))
8433 	  {
8434 	    tree var;
8435 	  default:
8436 	    break;
8437 	  case OMP_CLAUSE_MAP:
8438 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8439 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8440 	      {
8441 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8442 		poly_int64 offset = 0;
8443 		gcc_assert (prev);
8444 		var = OMP_CLAUSE_DECL (c);
8445 		if (DECL_P (var)
8446 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8447 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8448 								      ctx))
8449 		    && varpool_node::get_create (var)->offloadable)
8450 		  break;
8451 		if (TREE_CODE (var) == INDIRECT_REF
8452 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8453 		  var = TREE_OPERAND (var, 0);
8454 		if (TREE_CODE (var) == COMPONENT_REF)
8455 		  {
8456 		    var = get_addr_base_and_unit_offset (var, &offset);
8457 		    gcc_assert (var != NULL_TREE && DECL_P (var));
8458 		  }
8459 		else if (DECL_SIZE (var)
8460 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8461 		  {
8462 		    tree var2 = DECL_VALUE_EXPR (var);
8463 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8464 		    var2 = TREE_OPERAND (var2, 0);
8465 		    gcc_assert (DECL_P (var2));
8466 		    var = var2;
8467 		  }
8468 		tree new_var = lookup_decl (var, ctx), x;
8469 		tree type = TREE_TYPE (new_var);
8470 		bool is_ref;
8471 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8472 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8473 			== COMPONENT_REF))
8474 		  {
8475 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8476 		    is_ref = true;
8477 		    new_var = build2 (MEM_REF, type,
8478 				      build_fold_addr_expr (new_var),
8479 				      build_int_cst (build_pointer_type (type),
8480 						     offset));
8481 		  }
8482 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8483 		  {
8484 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8485 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8486 		    new_var = build2 (MEM_REF, type,
8487 				      build_fold_addr_expr (new_var),
8488 				      build_int_cst (build_pointer_type (type),
8489 						     offset));
8490 		  }
8491 		else
8492 		  is_ref = omp_is_reference (var);
8493 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8494 		  is_ref = false;
8495 		bool ref_to_array = false;
8496 		if (is_ref)
8497 		  {
8498 		    type = TREE_TYPE (type);
8499 		    if (TREE_CODE (type) == ARRAY_TYPE)
8500 		      {
8501 			type = build_pointer_type (type);
8502 			ref_to_array = true;
8503 		      }
8504 		  }
8505 		else if (TREE_CODE (type) == ARRAY_TYPE)
8506 		  {
8507 		    tree decl2 = DECL_VALUE_EXPR (new_var);
8508 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
8509 		    decl2 = TREE_OPERAND (decl2, 0);
8510 		    gcc_assert (DECL_P (decl2));
8511 		    new_var = decl2;
8512 		    type = TREE_TYPE (new_var);
8513 		  }
8514 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8515 		x = fold_convert_loc (clause_loc, type, x);
8516 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8517 		  {
8518 		    tree bias = OMP_CLAUSE_SIZE (c);
8519 		    if (DECL_P (bias))
8520 		      bias = lookup_decl (bias, ctx);
8521 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
8522 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8523 					    bias);
8524 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8525 					 TREE_TYPE (x), x, bias);
8526 		  }
8527 		if (ref_to_array)
8528 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8529 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8530 		if (is_ref && !ref_to_array)
8531 		  {
8532 		    tree t = create_tmp_var_raw (type, get_name (var));
8533 		    gimple_add_tmp_var (t);
8534 		    TREE_ADDRESSABLE (t) = 1;
8535 		    gimple_seq_add_stmt (&new_body,
8536 					 gimple_build_assign (t, x));
8537 		    x = build_fold_addr_expr_loc (clause_loc, t);
8538 		  }
8539 		gimple_seq_add_stmt (&new_body,
8540 				     gimple_build_assign (new_var, x));
8541 		prev = NULL_TREE;
8542 	      }
8543 	    else if (OMP_CLAUSE_CHAIN (c)
8544 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8545 			== OMP_CLAUSE_MAP
8546 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8547 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
8548 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8549 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8550 	      prev = c;
8551 	    break;
8552 	  case OMP_CLAUSE_PRIVATE:
8553 	    var = OMP_CLAUSE_DECL (c);
8554 	    if (is_variable_sized (var))
8555 	      {
8556 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8557 		tree new_var = lookup_decl (var, ctx);
8558 		tree pvar = DECL_VALUE_EXPR (var);
8559 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8560 		pvar = TREE_OPERAND (pvar, 0);
8561 		gcc_assert (DECL_P (pvar));
8562 		tree new_pvar = lookup_decl (pvar, ctx);
8563 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8564 		tree al = size_int (DECL_ALIGN (var));
8565 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8566 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8567 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8568 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8569 		gimple_seq_add_stmt (&new_body,
8570 				     gimple_build_assign (new_pvar, x));
8571 	      }
8572 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8573 	      {
8574 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8575 		tree new_var = lookup_decl (var, ctx);
8576 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8577 		if (TREE_CONSTANT (x))
8578 		  break;
8579 		else
8580 		  {
8581 		    tree atmp
8582 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8583 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8584 		    tree al = size_int (TYPE_ALIGN (rtype));
8585 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8586 		  }
8587 
8588 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8589 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8590 		gimple_seq_add_stmt (&new_body,
8591 				     gimple_build_assign (new_var, x));
8592 	      }
8593 	    break;
8594 	  }
8595 
8596       gimple_seq fork_seq = NULL;
8597       gimple_seq join_seq = NULL;
8598 
8599       if (is_oacc_parallel (ctx))
8600 	{
8601 	  /* If there are reductions on the offloaded region itself, treat
8602 	     them as a dummy GANG loop.  */
8603 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8604 
8605 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8606 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8607 	}
8608 
8609       gimple_seq_add_seq (&new_body, fork_seq);
8610       gimple_seq_add_seq (&new_body, tgt_body);
8611       gimple_seq_add_seq (&new_body, join_seq);
8612 
8613       if (offloaded)
8614 	new_body = maybe_catch_exception (new_body);
8615 
8616       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8617       gimple_omp_set_body (stmt, new_body);
8618     }
8619 
8620   bind = gimple_build_bind (NULL, NULL,
8621 			    tgt_bind ? gimple_bind_block (tgt_bind)
8622 				     : NULL_TREE);
8623   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8624   gimple_bind_add_seq (bind, ilist);
8625   gimple_bind_add_stmt (bind, stmt);
8626   gimple_bind_add_seq (bind, olist);
8627 
8628   pop_gimplify_context (NULL);
8629 
8630   if (dep_bind)
8631     {
8632       gimple_bind_add_seq (dep_bind, dep_ilist);
8633       gimple_bind_add_stmt (dep_bind, bind);
8634       gimple_bind_add_seq (dep_bind, dep_olist);
8635       pop_gimplify_context (dep_bind);
8636     }
8637 }
8638 
8639 /* Expand code for an OpenMP teams directive.  */
8640 
8641 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)8642 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8643 {
8644   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8645   push_gimplify_context ();
8646 
8647   tree block = make_node (BLOCK);
8648   gbind *bind = gimple_build_bind (NULL, NULL, block);
8649   gsi_replace (gsi_p, bind, true);
8650   gimple_seq bind_body = NULL;
8651   gimple_seq dlist = NULL;
8652   gimple_seq olist = NULL;
8653 
8654   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8655 				    OMP_CLAUSE_NUM_TEAMS);
8656   if (num_teams == NULL_TREE)
8657     num_teams = build_int_cst (unsigned_type_node, 0);
8658   else
8659     {
8660       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8661       num_teams = fold_convert (unsigned_type_node, num_teams);
8662       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8663     }
8664   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8665 				       OMP_CLAUSE_THREAD_LIMIT);
8666   if (thread_limit == NULL_TREE)
8667     thread_limit = build_int_cst (unsigned_type_node, 0);
8668   else
8669     {
8670       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8671       thread_limit = fold_convert (unsigned_type_node, thread_limit);
8672       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8673 		     fb_rvalue);
8674     }
8675 
8676   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8677 			   &bind_body, &dlist, ctx, NULL);
8678   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8679   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8680   if (!gimple_omp_teams_grid_phony (teams_stmt))
8681     {
8682       gimple_seq_add_stmt (&bind_body, teams_stmt);
8683       location_t loc = gimple_location (teams_stmt);
8684       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8685       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8686       gimple_set_location (call, loc);
8687       gimple_seq_add_stmt (&bind_body, call);
8688     }
8689 
8690   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8691   gimple_omp_set_body (teams_stmt, NULL);
8692   gimple_seq_add_seq (&bind_body, olist);
8693   gimple_seq_add_seq (&bind_body, dlist);
8694   if (!gimple_omp_teams_grid_phony (teams_stmt))
8695     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8696   gimple_bind_set_body (bind, bind_body);
8697 
8698   pop_gimplify_context (bind);
8699 
8700   gimple_bind_append_vars (bind, ctx->block_vars);
8701   BLOCK_VARS (block) = ctx->block_vars;
8702   if (BLOCK_VARS (block))
8703     TREE_USED (block) = 1;
8704 }
8705 
8706 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
8707 
8708 static void
lower_omp_grid_body(gimple_stmt_iterator * gsi_p,omp_context * ctx)8709 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8710 {
8711   gimple *stmt = gsi_stmt (*gsi_p);
8712   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8713   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8714 		       gimple_build_omp_return (false));
8715 }
8716 
8717 
8718 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
8719    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
8720    of OMP context, but with task_shared_vars set.  */
8721 
8722 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)8723 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8724     			void *data)
8725 {
8726   tree t = *tp;
8727 
8728   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
8729   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8730     return t;
8731 
8732   if (task_shared_vars
8733       && DECL_P (t)
8734       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8735     return t;
8736 
8737   /* If a global variable has been privatized, TREE_CONSTANT on
8738      ADDR_EXPR might be wrong.  */
8739   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8740     recompute_tree_invariant_for_addr_expr (t);
8741 
8742   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8743   return NULL_TREE;
8744 }
8745 
8746 /* Data to be communicated between lower_omp_regimplify_operands and
8747    lower_omp_regimplify_operands_p.  */
8748 
8749 struct lower_omp_regimplify_operands_data
8750 {
8751   omp_context *ctx;
8752   vec<tree> *decls;
8753 };
8754 
8755 /* Helper function for lower_omp_regimplify_operands.  Find
8756    omp_member_access_dummy_var vars and adjust temporarily their
8757    DECL_VALUE_EXPRs if needed.  */
8758 
8759 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)8760 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8761 				 void *data)
8762 {
8763   tree t = omp_member_access_dummy_var (*tp);
8764   if (t)
8765     {
8766       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8767       lower_omp_regimplify_operands_data *ldata
8768 	= (lower_omp_regimplify_operands_data *) wi->info;
8769       tree o = maybe_lookup_decl (t, ldata->ctx);
8770       if (o != t)
8771 	{
8772 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8773 	  ldata->decls->safe_push (*tp);
8774 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8775 	  SET_DECL_VALUE_EXPR (*tp, v);
8776 	}
8777     }
8778   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8779   return NULL_TREE;
8780 }
8781 
8782 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8783    of omp_member_access_dummy_var vars during regimplification.  */
8784 
8785 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)8786 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8787 			       gimple_stmt_iterator *gsi_p)
8788 {
8789   auto_vec<tree, 10> decls;
8790   if (ctx)
8791     {
8792       struct walk_stmt_info wi;
8793       memset (&wi, '\0', sizeof (wi));
8794       struct lower_omp_regimplify_operands_data data;
8795       data.ctx = ctx;
8796       data.decls = &decls;
8797       wi.info = &data;
8798       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8799     }
8800   gimple_regimplify_operands (stmt, gsi_p);
8801   while (!decls.is_empty ())
8802     {
8803       tree t = decls.pop ();
8804       tree v = decls.pop ();
8805       SET_DECL_VALUE_EXPR (t, v);
8806     }
8807 }
8808 
8809 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)8810 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8811 {
8812   gimple *stmt = gsi_stmt (*gsi_p);
8813   struct walk_stmt_info wi;
8814   gcall *call_stmt;
8815 
8816   if (gimple_has_location (stmt))
8817     input_location = gimple_location (stmt);
8818 
8819   if (task_shared_vars)
8820     memset (&wi, '\0', sizeof (wi));
8821 
8822   /* If we have issued syntax errors, avoid doing any heavy lifting.
8823      Just replace the OMP directives with a NOP to avoid
8824      confusing RTL expansion.  */
8825   if (seen_error () && is_gimple_omp (stmt))
8826     {
8827       gsi_replace (gsi_p, gimple_build_nop (), true);
8828       return;
8829     }
8830 
8831   switch (gimple_code (stmt))
8832     {
8833     case GIMPLE_COND:
8834       {
8835 	gcond *cond_stmt = as_a <gcond *> (stmt);
8836 	if ((ctx || task_shared_vars)
8837 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8838 			   lower_omp_regimplify_p,
8839 			   ctx ? NULL : &wi, NULL)
8840 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8841 			      lower_omp_regimplify_p,
8842 			      ctx ? NULL : &wi, NULL)))
8843 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8844       }
8845       break;
8846     case GIMPLE_CATCH:
8847       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8848       break;
8849     case GIMPLE_EH_FILTER:
8850       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8851       break;
8852     case GIMPLE_TRY:
8853       lower_omp (gimple_try_eval_ptr (stmt), ctx);
8854       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8855       break;
8856     case GIMPLE_TRANSACTION:
8857       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8858 		 ctx);
8859       break;
8860     case GIMPLE_BIND:
8861       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8862       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8863       break;
8864     case GIMPLE_OMP_PARALLEL:
8865     case GIMPLE_OMP_TASK:
8866       ctx = maybe_lookup_ctx (stmt);
8867       gcc_assert (ctx);
8868       if (ctx->cancellable)
8869 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8870       lower_omp_taskreg (gsi_p, ctx);
8871       break;
8872     case GIMPLE_OMP_FOR:
8873       ctx = maybe_lookup_ctx (stmt);
8874       gcc_assert (ctx);
8875       if (ctx->cancellable)
8876 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8877       lower_omp_for (gsi_p, ctx);
8878       break;
8879     case GIMPLE_OMP_SECTIONS:
8880       ctx = maybe_lookup_ctx (stmt);
8881       gcc_assert (ctx);
8882       if (ctx->cancellable)
8883 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8884       lower_omp_sections (gsi_p, ctx);
8885       break;
8886     case GIMPLE_OMP_SINGLE:
8887       ctx = maybe_lookup_ctx (stmt);
8888       gcc_assert (ctx);
8889       lower_omp_single (gsi_p, ctx);
8890       break;
8891     case GIMPLE_OMP_MASTER:
8892       ctx = maybe_lookup_ctx (stmt);
8893       gcc_assert (ctx);
8894       lower_omp_master (gsi_p, ctx);
8895       break;
8896     case GIMPLE_OMP_TASKGROUP:
8897       ctx = maybe_lookup_ctx (stmt);
8898       gcc_assert (ctx);
8899       lower_omp_taskgroup (gsi_p, ctx);
8900       break;
8901     case GIMPLE_OMP_ORDERED:
8902       ctx = maybe_lookup_ctx (stmt);
8903       gcc_assert (ctx);
8904       lower_omp_ordered (gsi_p, ctx);
8905       break;
8906     case GIMPLE_OMP_CRITICAL:
8907       ctx = maybe_lookup_ctx (stmt);
8908       gcc_assert (ctx);
8909       lower_omp_critical (gsi_p, ctx);
8910       break;
8911     case GIMPLE_OMP_ATOMIC_LOAD:
8912       if ((ctx || task_shared_vars)
8913 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8914 			  as_a <gomp_atomic_load *> (stmt)),
8915 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8916 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8917       break;
8918     case GIMPLE_OMP_TARGET:
8919       ctx = maybe_lookup_ctx (stmt);
8920       gcc_assert (ctx);
8921       lower_omp_target (gsi_p, ctx);
8922       break;
8923     case GIMPLE_OMP_TEAMS:
8924       ctx = maybe_lookup_ctx (stmt);
8925       gcc_assert (ctx);
8926       lower_omp_teams (gsi_p, ctx);
8927       break;
8928     case GIMPLE_OMP_GRID_BODY:
8929       ctx = maybe_lookup_ctx (stmt);
8930       gcc_assert (ctx);
8931       lower_omp_grid_body (gsi_p, ctx);
8932       break;
8933     case GIMPLE_CALL:
8934       tree fndecl;
8935       call_stmt = as_a <gcall *> (stmt);
8936       fndecl = gimple_call_fndecl (call_stmt);
8937       if (fndecl
8938 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8939 	switch (DECL_FUNCTION_CODE (fndecl))
8940 	  {
8941 	  case BUILT_IN_GOMP_BARRIER:
8942 	    if (ctx == NULL)
8943 	      break;
8944 	    /* FALLTHRU */
8945 	  case BUILT_IN_GOMP_CANCEL:
8946 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
8947 	    omp_context *cctx;
8948 	    cctx = ctx;
8949 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8950 	      cctx = cctx->outer;
8951 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8952 	    if (!cctx->cancellable)
8953 	      {
8954 		if (DECL_FUNCTION_CODE (fndecl)
8955 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
8956 		  {
8957 		    stmt = gimple_build_nop ();
8958 		    gsi_replace (gsi_p, stmt, false);
8959 		  }
8960 		break;
8961 	      }
8962 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8963 	      {
8964 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8965 		gimple_call_set_fndecl (call_stmt, fndecl);
8966 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8967 	      }
8968 	    tree lhs;
8969 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8970 	    gimple_call_set_lhs (call_stmt, lhs);
8971 	    tree fallthru_label;
8972 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8973 	    gimple *g;
8974 	    g = gimple_build_label (fallthru_label);
8975 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8976 	    g = gimple_build_cond (NE_EXPR, lhs,
8977 				   fold_convert (TREE_TYPE (lhs),
8978 						 boolean_false_node),
8979 				   cctx->cancel_label, fallthru_label);
8980 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8981 	    break;
8982 	  default:
8983 	    break;
8984 	  }
8985       /* FALLTHRU */
8986     default:
8987       if ((ctx || task_shared_vars)
8988 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
8989 			     ctx ? NULL : &wi))
8990 	{
8991 	  /* Just remove clobbers, this should happen only if we have
8992 	     "privatized" local addressable variables in SIMD regions,
8993 	     the clobber isn't needed in that case and gimplifying address
8994 	     of the ARRAY_REF into a pointer and creating MEM_REF based
8995 	     clobber would create worse code than we get with the clobber
8996 	     dropped.  */
8997 	  if (gimple_clobber_p (stmt))
8998 	    {
8999 	      gsi_replace (gsi_p, gimple_build_nop (), true);
9000 	      break;
9001 	    }
9002 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9003 	}
9004       break;
9005     }
9006 }
9007 
9008 static void
lower_omp(gimple_seq * body,omp_context * ctx)9009 lower_omp (gimple_seq *body, omp_context *ctx)
9010 {
9011   location_t saved_location = input_location;
9012   gimple_stmt_iterator gsi;
9013   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9014     lower_omp_1 (&gsi, ctx);
9015   /* During gimplification, we haven't folded statments inside offloading
9016      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
9017   if (target_nesting_level || taskreg_nesting_level)
9018     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9019       fold_stmt (&gsi);
9020   input_location = saved_location;
9021 }
9022 
9023 /* Main entry point.  */
9024 
9025 static unsigned int
execute_lower_omp(void)9026 execute_lower_omp (void)
9027 {
9028   gimple_seq body;
9029   int i;
9030   omp_context *ctx;
9031 
9032   /* This pass always runs, to provide PROP_gimple_lomp.
9033      But often, there is nothing to do.  */
9034   if (flag_openacc == 0 && flag_openmp == 0
9035       && flag_openmp_simd == 0)
9036     return 0;
9037 
9038   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
9039 				 delete_omp_context);
9040 
9041   body = gimple_body (current_function_decl);
9042 
9043   if (hsa_gen_requested_p ())
9044     omp_grid_gridify_all_targets (&body);
9045 
9046   scan_omp (&body, NULL);
9047   gcc_assert (taskreg_nesting_level == 0);
9048   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9049     finish_taskreg_scan (ctx);
9050   taskreg_contexts.release ();
9051 
9052   if (all_contexts->root)
9053     {
9054       if (task_shared_vars)
9055 	push_gimplify_context ();
9056       lower_omp (&body, NULL);
9057       if (task_shared_vars)
9058 	pop_gimplify_context (NULL);
9059     }
9060 
9061   if (all_contexts)
9062     {
9063       splay_tree_delete (all_contexts);
9064       all_contexts = NULL;
9065     }
9066   BITMAP_FREE (task_shared_vars);
9067 
9068   /* If current function is a method, remove artificial dummy VAR_DECL created
9069      for non-static data member privatization, they aren't needed for
9070      debuginfo nor anything else, have been already replaced everywhere in the
9071      IL and cause problems with LTO.  */
9072   if (DECL_ARGUMENTS (current_function_decl)
9073       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9074       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9075 	  == POINTER_TYPE))
9076     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
9077   return 0;
9078 }
9079 
9080 namespace {
9081 
9082 const pass_data pass_data_lower_omp =
9083 {
9084   GIMPLE_PASS, /* type */
9085   "omplower", /* name */
9086   OPTGROUP_OMP, /* optinfo_flags */
9087   TV_NONE, /* tv_id */
9088   PROP_gimple_any, /* properties_required */
9089   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9090   0, /* properties_destroyed */
9091   0, /* todo_flags_start */
9092   0, /* todo_flags_finish */
9093 };
9094 
9095 class pass_lower_omp : public gimple_opt_pass
9096 {
9097 public:
pass_lower_omp(gcc::context * ctxt)9098   pass_lower_omp (gcc::context *ctxt)
9099     : gimple_opt_pass (pass_data_lower_omp, ctxt)
9100   {}
9101 
9102   /* opt_pass methods: */
execute(function *)9103   virtual unsigned int execute (function *) { return execute_lower_omp (); }
9104 
9105 }; // class pass_lower_omp
9106 
9107 } // anon namespace
9108 
9109 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)9110 make_pass_lower_omp (gcc::context *ctxt)
9111 {
9112   return new pass_lower_omp (ctxt);
9113 }
9114 
9115 /* The following is a utility to diagnose structured block violations.
9116    It is not part of the "omplower" pass, as that's invoked too late.  It
9117    should be invoked by the respective front ends after gimplification.  */
9118 
9119 static splay_tree all_labels;
9120 
9121 /* Check for mismatched contexts and generate an error if needed.  Return
9122    true if an error is detected.  */
9123 
9124 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)9125 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9126 	       gimple *branch_ctx, gimple *label_ctx)
9127 {
9128   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9129   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9130 
9131   if (label_ctx == branch_ctx)
9132     return false;
9133 
9134   const char* kind = NULL;
9135 
9136   if (flag_openacc)
9137     {
9138       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9139 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9140 	{
9141 	  gcc_checking_assert (kind == NULL);
9142 	  kind = "OpenACC";
9143 	}
9144     }
9145   if (kind == NULL)
9146     {
9147       gcc_checking_assert (flag_openmp || flag_openmp_simd);
9148       kind = "OpenMP";
9149     }
9150 
9151   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9152      so we could traverse it and issue a correct "exit" or "enter" error
9153      message upon a structured block violation.
9154 
9155      We built the context by building a list with tree_cons'ing, but there is
9156      no easy counterpart in gimple tuples.  It seems like far too much work
9157      for issuing exit/enter error messages.  If someone really misses the
9158      distinct error message... patches welcome.  */
9159 
9160 #if 0
9161   /* Try to avoid confusing the user by producing and error message
9162      with correct "exit" or "enter" verbiage.  We prefer "exit"
9163      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
9164   if (branch_ctx == NULL)
9165     exit_p = false;
9166   else
9167     {
9168       while (label_ctx)
9169 	{
9170 	  if (TREE_VALUE (label_ctx) == branch_ctx)
9171 	    {
9172 	      exit_p = false;
9173 	      break;
9174 	    }
9175 	  label_ctx = TREE_CHAIN (label_ctx);
9176 	}
9177     }
9178 
9179   if (exit_p)
9180     error ("invalid exit from %s structured block", kind);
9181   else
9182     error ("invalid entry to %s structured block", kind);
9183 #endif
9184 
9185   /* If it's obvious we have an invalid entry, be specific about the error.  */
9186   if (branch_ctx == NULL)
9187     error ("invalid entry to %s structured block", kind);
9188   else
9189     {
9190       /* Otherwise, be vague and lazy, but efficient.  */
9191       error ("invalid branch to/from %s structured block", kind);
9192     }
9193 
9194   gsi_replace (gsi_p, gimple_build_nop (), false);
9195   return true;
9196 }
9197 
9198 /* Pass 1: Create a minimal tree of structured blocks, and record
9199    where each label is found.  */
9200 
9201 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9202 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9203     	       struct walk_stmt_info *wi)
9204 {
9205   gimple *context = (gimple *) wi->info;
9206   gimple *inner_context;
9207   gimple *stmt = gsi_stmt (*gsi_p);
9208 
9209   *handled_ops_p = true;
9210 
9211   switch (gimple_code (stmt))
9212     {
9213     WALK_SUBSTMTS;
9214 
9215     case GIMPLE_OMP_PARALLEL:
9216     case GIMPLE_OMP_TASK:
9217     case GIMPLE_OMP_SECTIONS:
9218     case GIMPLE_OMP_SINGLE:
9219     case GIMPLE_OMP_SECTION:
9220     case GIMPLE_OMP_MASTER:
9221     case GIMPLE_OMP_ORDERED:
9222     case GIMPLE_OMP_CRITICAL:
9223     case GIMPLE_OMP_TARGET:
9224     case GIMPLE_OMP_TEAMS:
9225     case GIMPLE_OMP_TASKGROUP:
9226       /* The minimal context here is just the current OMP construct.  */
9227       inner_context = stmt;
9228       wi->info = inner_context;
9229       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9230       wi->info = context;
9231       break;
9232 
9233     case GIMPLE_OMP_FOR:
9234       inner_context = stmt;
9235       wi->info = inner_context;
9236       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9237 	 walk them.  */
9238       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9239 	  	       diagnose_sb_1, NULL, wi);
9240       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9241       wi->info = context;
9242       break;
9243 
9244     case GIMPLE_LABEL:
9245       splay_tree_insert (all_labels,
9246 			 (splay_tree_key) gimple_label_label (
9247 					    as_a <glabel *> (stmt)),
9248 			 (splay_tree_value) context);
9249       break;
9250 
9251     default:
9252       break;
9253     }
9254 
9255   return NULL_TREE;
9256 }
9257 
9258 /* Pass 2: Check each branch and see if its context differs from that of
9259    the destination label's context.  */
9260 
9261 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9262 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9263     	       struct walk_stmt_info *wi)
9264 {
9265   gimple *context = (gimple *) wi->info;
9266   splay_tree_node n;
9267   gimple *stmt = gsi_stmt (*gsi_p);
9268 
9269   *handled_ops_p = true;
9270 
9271   switch (gimple_code (stmt))
9272     {
9273     WALK_SUBSTMTS;
9274 
9275     case GIMPLE_OMP_PARALLEL:
9276     case GIMPLE_OMP_TASK:
9277     case GIMPLE_OMP_SECTIONS:
9278     case GIMPLE_OMP_SINGLE:
9279     case GIMPLE_OMP_SECTION:
9280     case GIMPLE_OMP_MASTER:
9281     case GIMPLE_OMP_ORDERED:
9282     case GIMPLE_OMP_CRITICAL:
9283     case GIMPLE_OMP_TARGET:
9284     case GIMPLE_OMP_TEAMS:
9285     case GIMPLE_OMP_TASKGROUP:
9286       wi->info = stmt;
9287       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9288       wi->info = context;
9289       break;
9290 
9291     case GIMPLE_OMP_FOR:
9292       wi->info = stmt;
9293       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9294 	 walk them.  */
9295       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9296 			   diagnose_sb_2, NULL, wi);
9297       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9298       wi->info = context;
9299       break;
9300 
9301     case GIMPLE_COND:
9302 	{
9303 	  gcond *cond_stmt = as_a <gcond *> (stmt);
9304 	  tree lab = gimple_cond_true_label (cond_stmt);
9305 	  if (lab)
9306 	    {
9307 	      n = splay_tree_lookup (all_labels,
9308 				     (splay_tree_key) lab);
9309 	      diagnose_sb_0 (gsi_p, context,
9310 			     n ? (gimple *) n->value : NULL);
9311 	    }
9312 	  lab = gimple_cond_false_label (cond_stmt);
9313 	  if (lab)
9314 	    {
9315 	      n = splay_tree_lookup (all_labels,
9316 				     (splay_tree_key) lab);
9317 	      diagnose_sb_0 (gsi_p, context,
9318 			     n ? (gimple *) n->value : NULL);
9319 	    }
9320 	}
9321       break;
9322 
9323     case GIMPLE_GOTO:
9324       {
9325 	tree lab = gimple_goto_dest (stmt);
9326 	if (TREE_CODE (lab) != LABEL_DECL)
9327 	  break;
9328 
9329 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9330 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9331       }
9332       break;
9333 
9334     case GIMPLE_SWITCH:
9335       {
9336 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
9337 	unsigned int i;
9338 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9339 	  {
9340 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9341 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9342 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9343 	      break;
9344 	  }
9345       }
9346       break;
9347 
9348     case GIMPLE_RETURN:
9349       diagnose_sb_0 (gsi_p, context, NULL);
9350       break;
9351 
9352     default:
9353       break;
9354     }
9355 
9356   return NULL_TREE;
9357 }
9358 
9359 static unsigned int
diagnose_omp_structured_block_errors(void)9360 diagnose_omp_structured_block_errors (void)
9361 {
9362   struct walk_stmt_info wi;
9363   gimple_seq body = gimple_body (current_function_decl);
9364 
9365   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9366 
9367   memset (&wi, 0, sizeof (wi));
9368   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9369 
9370   memset (&wi, 0, sizeof (wi));
9371   wi.want_locations = true;
9372   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9373 
9374   gimple_set_body (current_function_decl, body);
9375 
9376   splay_tree_delete (all_labels);
9377   all_labels = NULL;
9378 
9379   return 0;
9380 }
9381 
9382 namespace {
9383 
9384 const pass_data pass_data_diagnose_omp_blocks =
9385 {
9386   GIMPLE_PASS, /* type */
9387   "*diagnose_omp_blocks", /* name */
9388   OPTGROUP_OMP, /* optinfo_flags */
9389   TV_NONE, /* tv_id */
9390   PROP_gimple_any, /* properties_required */
9391   0, /* properties_provided */
9392   0, /* properties_destroyed */
9393   0, /* todo_flags_start */
9394   0, /* todo_flags_finish */
9395 };
9396 
9397 class pass_diagnose_omp_blocks : public gimple_opt_pass
9398 {
9399 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)9400   pass_diagnose_omp_blocks (gcc::context *ctxt)
9401     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9402   {}
9403 
9404   /* opt_pass methods: */
gate(function *)9405   virtual bool gate (function *)
9406   {
9407     return flag_openacc || flag_openmp || flag_openmp_simd;
9408   }
execute(function *)9409   virtual unsigned int execute (function *)
9410     {
9411       return diagnose_omp_structured_block_errors ();
9412     }
9413 
9414 }; // class pass_diagnose_omp_blocks
9415 
9416 } // anon namespace
9417 
9418 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)9419 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9420 {
9421   return new pass_diagnose_omp_blocks (ctxt);
9422 }
9423 
9424 
9425 #include "gt-omp-low.h"
9426