1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2018 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "hsa-common.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65    phases.  The first phase scans the function looking for OMP statements
66    and then for variables that must be replaced to satisfy data sharing
67    clauses.  The second phase expands code for the constructs, as well as
68    re-gimplifying things when variables have been replaced with complex
69    expressions.
70 
71    Final code generation is done by pass_expand_omp.  The flowgraph is
72    scanned for regions which are then moved to a new
73    function, to be invoked by the thread library, or offloaded.  */
74 
75 /* Context structure.  Used to store information about each parallel
76    directive in the code.  */
77 
78 struct omp_context
79 {
80   /* This field must be at the beginning, as we do "inheritance": Some
81      callback functions for tree-inline.c (e.g., omp_copy_decl)
82      receive a copy_body_data pointer that is up-casted to an
83      omp_context pointer.  */
84   copy_body_data cb;
85 
86   /* The tree of contexts corresponding to the encountered constructs.  */
87   struct omp_context *outer;
88   gimple *stmt;
89 
90   /* Map variables to fields in a structure that allows communication
91      between sending and receiving threads.  */
92   splay_tree field_map;
93   tree record_type;
94   tree sender_decl;
95   tree receiver_decl;
96 
97   /* These are used just by task contexts, if task firstprivate fn is
98      needed.  srecord_type is used to communicate from the thread
99      that encountered the task construct to task firstprivate fn,
100      record_type is allocated by GOMP_task, initialized by task firstprivate
101      fn and passed to the task body fn.  */
102   splay_tree sfield_map;
103   tree srecord_type;
104 
105   /* A chain of variables to add to the top-level block surrounding the
106      construct.  In the case of a parallel, this is in the child function.  */
107   tree block_vars;
108 
109   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110      barriers should jump to during omplower pass.  */
111   tree cancel_label;
112 
113   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114      otherwise.  */
115   gimple *simt_stmt;
116 
117   /* Nesting depth of this context.  Used to beautify error messages re
118      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
119      reserved for the main body of the function.  */
120   int depth;
121 
122   /* True if this parallel directive is nested within another.  */
123   bool is_nested;
124 
125   /* True if this construct can be cancelled.  */
126   bool cancellable;
127 };
128 
129 static splay_tree all_contexts;
130 static int taskreg_nesting_level;
131 static int target_nesting_level;
132 static bitmap task_shared_vars;
133 static vec<omp_context *> taskreg_contexts;
134 
135 static void scan_omp (gimple_seq *, omp_context *);
136 static tree scan_omp_1_op (tree *, int *, void *);
137 
138 #define WALK_SUBSTMTS  \
139     case GIMPLE_BIND: \
140     case GIMPLE_TRY: \
141     case GIMPLE_CATCH: \
142     case GIMPLE_EH_FILTER: \
143     case GIMPLE_TRANSACTION: \
144       /* The sub-statements for these should be walked.  */ \
145       *handled_ops_p = false; \
146       break;
147 
148 /* Return true if CTX corresponds to an oacc parallel region.  */
149 
150 static bool
is_oacc_parallel(omp_context * ctx)151 is_oacc_parallel (omp_context *ctx)
152 {
153   enum gimple_code outer_type = gimple_code (ctx->stmt);
154   return ((outer_type == GIMPLE_OMP_TARGET)
155 	  && (gimple_omp_target_kind (ctx->stmt)
156 	      == GF_OMP_TARGET_KIND_OACC_PARALLEL));
157 }
158 
159 /* Return true if CTX corresponds to an oacc kernels region.  */
160 
161 static bool
is_oacc_kernels(omp_context * ctx)162 is_oacc_kernels (omp_context *ctx)
163 {
164   enum gimple_code outer_type = gimple_code (ctx->stmt);
165   return ((outer_type == GIMPLE_OMP_TARGET)
166 	  && (gimple_omp_target_kind (ctx->stmt)
167 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
168 }
169 
170 /* If DECL is the artificial dummy VAR_DECL created for non-static
171    data member privatization, return the underlying "this" parameter,
172    otherwise return NULL.  */
173 
174 tree
omp_member_access_dummy_var(tree decl)175 omp_member_access_dummy_var (tree decl)
176 {
177   if (!VAR_P (decl)
178       || !DECL_ARTIFICIAL (decl)
179       || !DECL_IGNORED_P (decl)
180       || !DECL_HAS_VALUE_EXPR_P (decl)
181       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
182     return NULL_TREE;
183 
184   tree v = DECL_VALUE_EXPR (decl);
185   if (TREE_CODE (v) != COMPONENT_REF)
186     return NULL_TREE;
187 
188   while (1)
189     switch (TREE_CODE (v))
190       {
191       case COMPONENT_REF:
192       case MEM_REF:
193       case INDIRECT_REF:
194       CASE_CONVERT:
195       case POINTER_PLUS_EXPR:
196 	v = TREE_OPERAND (v, 0);
197 	continue;
198       case PARM_DECL:
199 	if (DECL_CONTEXT (v) == current_function_decl
200 	    && DECL_ARTIFICIAL (v)
201 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
202 	  return v;
203 	return NULL_TREE;
204       default:
205 	return NULL_TREE;
206       }
207 }
208 
209 /* Helper for unshare_and_remap, called through walk_tree.  */
210 
211 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)212 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
213 {
214   tree *pair = (tree *) data;
215   if (*tp == pair[0])
216     {
217       *tp = unshare_expr (pair[1]);
218       *walk_subtrees = 0;
219     }
220   else if (IS_TYPE_OR_DECL_P (*tp))
221     *walk_subtrees = 0;
222   return NULL_TREE;
223 }
224 
225 /* Return unshare_expr (X) with all occurrences of FROM
226    replaced with TO.  */
227 
228 static tree
unshare_and_remap(tree x,tree from,tree to)229 unshare_and_remap (tree x, tree from, tree to)
230 {
231   tree pair[2] = { from, to };
232   x = unshare_expr (x);
233   walk_tree (&x, unshare_and_remap_1, pair, NULL);
234   return x;
235 }
236 
237 /* Convenience function for calling scan_omp_1_op on tree operands.  */
238 
239 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)240 scan_omp_op (tree *tp, omp_context *ctx)
241 {
242   struct walk_stmt_info wi;
243 
244   memset (&wi, 0, sizeof (wi));
245   wi.info = ctx;
246   wi.want_locations = true;
247 
248   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
249 }
250 
251 static void lower_omp (gimple_seq *, omp_context *);
252 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
253 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
254 
255 /* Return true if CTX is for an omp parallel.  */
256 
257 static inline bool
is_parallel_ctx(omp_context * ctx)258 is_parallel_ctx (omp_context *ctx)
259 {
260   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
261 }
262 
263 
264 /* Return true if CTX is for an omp task.  */
265 
266 static inline bool
is_task_ctx(omp_context * ctx)267 is_task_ctx (omp_context *ctx)
268 {
269   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
270 }
271 
272 
273 /* Return true if CTX is for an omp taskloop.  */
274 
275 static inline bool
is_taskloop_ctx(omp_context * ctx)276 is_taskloop_ctx (omp_context *ctx)
277 {
278   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
279 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
280 }
281 
282 
283 /* Return true if CTX is for an omp parallel or omp task.  */
284 
285 static inline bool
is_taskreg_ctx(omp_context * ctx)286 is_taskreg_ctx (omp_context *ctx)
287 {
288   return is_parallel_ctx (ctx) || is_task_ctx (ctx);
289 }
290 
291 /* Return true if EXPR is variable sized.  */
292 
293 static inline bool
is_variable_sized(const_tree expr)294 is_variable_sized (const_tree expr)
295 {
296   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
297 }
298 
299 /* Lookup variables.  The "maybe" form
300    allows for the variable form to not have been entered, otherwise we
301    assert that the variable must have been entered.  */
302 
303 static inline tree
lookup_decl(tree var,omp_context * ctx)304 lookup_decl (tree var, omp_context *ctx)
305 {
306   tree *n = ctx->cb.decl_map->get (var);
307   return *n;
308 }
309 
310 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)311 maybe_lookup_decl (const_tree var, omp_context *ctx)
312 {
313   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
314   return n ? *n : NULL_TREE;
315 }
316 
317 static inline tree
lookup_field(tree var,omp_context * ctx)318 lookup_field (tree var, omp_context *ctx)
319 {
320   splay_tree_node n;
321   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
322   return (tree) n->value;
323 }
324 
325 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)326 lookup_sfield (splay_tree_key key, omp_context *ctx)
327 {
328   splay_tree_node n;
329   n = splay_tree_lookup (ctx->sfield_map
330 			 ? ctx->sfield_map : ctx->field_map, key);
331   return (tree) n->value;
332 }
333 
334 static inline tree
lookup_sfield(tree var,omp_context * ctx)335 lookup_sfield (tree var, omp_context *ctx)
336 {
337   return lookup_sfield ((splay_tree_key) var, ctx);
338 }
339 
340 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)341 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
342 {
343   splay_tree_node n;
344   n = splay_tree_lookup (ctx->field_map, key);
345   return n ? (tree) n->value : NULL_TREE;
346 }
347 
348 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)349 maybe_lookup_field (tree var, omp_context *ctx)
350 {
351   return maybe_lookup_field ((splay_tree_key) var, ctx);
352 }
353 
354 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
355    the parallel context if DECL is to be shared.  */
356 
357 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)358 use_pointer_for_field (tree decl, omp_context *shared_ctx)
359 {
360   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
361       || TYPE_ATOMIC (TREE_TYPE (decl)))
362     return true;
363 
364   /* We can only use copy-in/copy-out semantics for shared variables
365      when we know the value is not accessible from an outer scope.  */
366   if (shared_ctx)
367     {
368       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
369 
370       /* ??? Trivially accessible from anywhere.  But why would we even
371 	 be passing an address in this case?  Should we simply assert
372 	 this to be false, or should we have a cleanup pass that removes
373 	 these from the list of mappings?  */
374       if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
375 	return true;
376 
377       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
378 	 without analyzing the expression whether or not its location
379 	 is accessible to anyone else.  In the case of nested parallel
380 	 regions it certainly may be.  */
381       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
382 	return true;
383 
384       /* Do not use copy-in/copy-out for variables that have their
385 	 address taken.  */
386       if (TREE_ADDRESSABLE (decl))
387 	return true;
388 
389       /* lower_send_shared_vars only uses copy-in, but not copy-out
390 	 for these.  */
391       if (TREE_READONLY (decl)
392 	  || ((TREE_CODE (decl) == RESULT_DECL
393 	       || TREE_CODE (decl) == PARM_DECL)
394 	      && DECL_BY_REFERENCE (decl)))
395 	return false;
396 
397       /* Disallow copy-in/out in nested parallel if
398 	 decl is shared in outer parallel, otherwise
399 	 each thread could store the shared variable
400 	 in its own copy-in location, making the
401 	 variable no longer really shared.  */
402       if (shared_ctx->is_nested)
403 	{
404 	  omp_context *up;
405 
406 	  for (up = shared_ctx->outer; up; up = up->outer)
407 	    if ((is_taskreg_ctx (up)
408 		 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
409 		     && is_gimple_omp_offloaded (up->stmt)))
410 		&& maybe_lookup_decl (decl, up))
411 	      break;
412 
413 	  if (up)
414 	    {
415 	      tree c;
416 
417 	      if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
418 		{
419 		  for (c = gimple_omp_target_clauses (up->stmt);
420 		       c; c = OMP_CLAUSE_CHAIN (c))
421 		    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
422 			&& OMP_CLAUSE_DECL (c) == decl)
423 		      break;
424 		}
425 	      else
426 		for (c = gimple_omp_taskreg_clauses (up->stmt);
427 		     c; c = OMP_CLAUSE_CHAIN (c))
428 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
429 		      && OMP_CLAUSE_DECL (c) == decl)
430 		    break;
431 
432 	      if (c)
433 		goto maybe_mark_addressable_and_ret;
434 	    }
435 	}
436 
437       /* For tasks avoid using copy-in/out.  As tasks can be
438 	 deferred or executed in different thread, when GOMP_task
439 	 returns, the task hasn't necessarily terminated.  */
440       if (is_task_ctx (shared_ctx))
441 	{
442 	  tree outer;
443 	maybe_mark_addressable_and_ret:
444 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
445 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
446 	    {
447 	      /* Taking address of OUTER in lower_send_shared_vars
448 		 might need regimplification of everything that uses the
449 		 variable.  */
450 	      if (!task_shared_vars)
451 		task_shared_vars = BITMAP_ALLOC (NULL);
452 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
453 	      TREE_ADDRESSABLE (outer) = 1;
454 	    }
455 	  return true;
456 	}
457     }
458 
459   return false;
460 }
461 
462 /* Construct a new automatic decl similar to VAR.  */
463 
464 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)465 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
466 {
467   tree copy = copy_var_decl (var, name, type);
468 
469   DECL_CONTEXT (copy) = current_function_decl;
470   DECL_CHAIN (copy) = ctx->block_vars;
471   /* If VAR is listed in task_shared_vars, it means it wasn't
472      originally addressable and is just because task needs to take
473      it's address.  But we don't need to take address of privatizations
474      from that var.  */
475   if (TREE_ADDRESSABLE (var)
476       && task_shared_vars
477       && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
478     TREE_ADDRESSABLE (copy) = 0;
479   ctx->block_vars = copy;
480 
481   return copy;
482 }
483 
484 static tree
omp_copy_decl_1(tree var,omp_context * ctx)485 omp_copy_decl_1 (tree var, omp_context *ctx)
486 {
487   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
488 }
489 
490 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
491    as appropriate.  */
492 static tree
omp_build_component_ref(tree obj,tree field)493 omp_build_component_ref (tree obj, tree field)
494 {
495   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
496   if (TREE_THIS_VOLATILE (field))
497     TREE_THIS_VOLATILE (ret) |= 1;
498   if (TREE_READONLY (field))
499     TREE_READONLY (ret) |= 1;
500   return ret;
501 }
502 
503 /* Build tree nodes to access the field for VAR on the receiver side.  */
504 
505 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)506 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
507 {
508   tree x, field = lookup_field (var, ctx);
509 
510   /* If the receiver record type was remapped in the child function,
511      remap the field into the new record type.  */
512   x = maybe_lookup_field (field, ctx);
513   if (x != NULL)
514     field = x;
515 
516   x = build_simple_mem_ref (ctx->receiver_decl);
517   TREE_THIS_NOTRAP (x) = 1;
518   x = omp_build_component_ref (x, field);
519   if (by_ref)
520     {
521       x = build_simple_mem_ref (x);
522       TREE_THIS_NOTRAP (x) = 1;
523     }
524 
525   return x;
526 }
527 
528 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
529    of a parallel, this is a component reference; for workshare constructs
530    this is some variable.  */
531 
532 static tree
533 build_outer_var_ref (tree var, omp_context *ctx,
534 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
535 {
536   tree x;
537 
538   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
539     x = var;
540   else if (is_variable_sized (var))
541     {
542       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
543       x = build_outer_var_ref (x, ctx, code);
544       x = build_simple_mem_ref (x);
545     }
546   else if (is_taskreg_ctx (ctx))
547     {
548       bool by_ref = use_pointer_for_field (var, NULL);
549       x = build_receiver_ref (var, by_ref, ctx);
550     }
551   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
552 	    && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
553 	   || (code == OMP_CLAUSE_PRIVATE
554 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
555 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
556 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
557     {
558       /* #pragma omp simd isn't a worksharing construct, and can reference
559 	 even private vars in its linear etc. clauses.
560 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
561 	 to private vars in all worksharing constructs.  */
562       x = NULL_TREE;
563       if (ctx->outer && is_taskreg_ctx (ctx))
564 	x = lookup_decl (var, ctx->outer);
565       else if (ctx->outer)
566 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
567       if (x == NULL_TREE)
568 	x = var;
569     }
570   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
571     {
572       gcc_assert (ctx->outer);
573       splay_tree_node n
574 	= splay_tree_lookup (ctx->outer->field_map,
575 			     (splay_tree_key) &DECL_UID (var));
576       if (n == NULL)
577 	{
578 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx->outer)))
579 	    x = var;
580 	  else
581 	    x = lookup_decl (var, ctx->outer);
582 	}
583       else
584 	{
585 	  tree field = (tree) n->value;
586 	  /* If the receiver record type was remapped in the child function,
587 	     remap the field into the new record type.  */
588 	  x = maybe_lookup_field (field, ctx->outer);
589 	  if (x != NULL)
590 	    field = x;
591 
592 	  x = build_simple_mem_ref (ctx->outer->receiver_decl);
593 	  x = omp_build_component_ref (x, field);
594 	  if (use_pointer_for_field (var, ctx->outer))
595 	    x = build_simple_mem_ref (x);
596 	}
597     }
598   else if (ctx->outer)
599     {
600       omp_context *outer = ctx->outer;
601       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
602 	{
603 	  outer = outer->outer;
604 	  gcc_assert (outer
605 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
606 	}
607       x = lookup_decl (var, outer);
608     }
609   else if (omp_is_reference (var))
610     /* This can happen with orphaned constructs.  If var is reference, it is
611        possible it is shared and as such valid.  */
612     x = var;
613   else if (omp_member_access_dummy_var (var))
614     x = var;
615   else
616     gcc_unreachable ();
617 
618   if (x == var)
619     {
620       tree t = omp_member_access_dummy_var (var);
621       if (t)
622 	{
623 	  x = DECL_VALUE_EXPR (var);
624 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
625 	  if (o != t)
626 	    x = unshare_and_remap (x, t, o);
627 	  else
628 	    x = unshare_expr (x);
629 	}
630     }
631 
632   if (omp_is_reference (var))
633     x = build_simple_mem_ref (x);
634 
635   return x;
636 }
637 
638 /* Build tree nodes to access the field for VAR on the sender side.  */
639 
640 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)641 build_sender_ref (splay_tree_key key, omp_context *ctx)
642 {
643   tree field = lookup_sfield (key, ctx);
644   return omp_build_component_ref (ctx->sender_decl, field);
645 }
646 
647 static tree
build_sender_ref(tree var,omp_context * ctx)648 build_sender_ref (tree var, omp_context *ctx)
649 {
650   return build_sender_ref ((splay_tree_key) var, ctx);
651 }
652 
653 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
654    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
655 
656 static void
657 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx,
658 		   bool base_pointers_restrict = false)
659 {
660   tree field, type, sfield = NULL_TREE;
661   splay_tree_key key = (splay_tree_key) var;
662 
663   if ((mask & 8) != 0)
664     {
665       key = (splay_tree_key) &DECL_UID (var);
666       gcc_checking_assert (key != (splay_tree_key) var);
667     }
668   gcc_assert ((mask & 1) == 0
669 	      || !splay_tree_lookup (ctx->field_map, key));
670   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
671 	      || !splay_tree_lookup (ctx->sfield_map, key));
672   gcc_assert ((mask & 3) == 3
673 	      || !is_gimple_omp_oacc (ctx->stmt));
674 
675   type = TREE_TYPE (var);
676   /* Prevent redeclaring the var in the split-off function with a restrict
677      pointer type.  Note that we only clear type itself, restrict qualifiers in
678      the pointed-to type will be ignored by points-to analysis.  */
679   if (POINTER_TYPE_P (type)
680       && TYPE_RESTRICT (type))
681     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
682 
683   if (mask & 4)
684     {
685       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
686       type = build_pointer_type (build_pointer_type (type));
687     }
688   else if (by_ref)
689     {
690       type = build_pointer_type (type);
691       if (base_pointers_restrict)
692 	type = build_qualified_type (type, TYPE_QUAL_RESTRICT);
693     }
694   else if ((mask & 3) == 1 && omp_is_reference (var))
695     type = TREE_TYPE (type);
696 
697   field = build_decl (DECL_SOURCE_LOCATION (var),
698 		      FIELD_DECL, DECL_NAME (var), type);
699 
700   /* Remember what variable this field was created for.  This does have a
701      side effect of making dwarf2out ignore this member, so for helpful
702      debugging we clear it later in delete_omp_context.  */
703   DECL_ABSTRACT_ORIGIN (field) = var;
704   if (type == TREE_TYPE (var))
705     {
706       SET_DECL_ALIGN (field, DECL_ALIGN (var));
707       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
708       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
709     }
710   else
711     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
712 
713   if ((mask & 3) == 3)
714     {
715       insert_field_into_struct (ctx->record_type, field);
716       if (ctx->srecord_type)
717 	{
718 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
719 			       FIELD_DECL, DECL_NAME (var), type);
720 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
721 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
722 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
723 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
724 	  insert_field_into_struct (ctx->srecord_type, sfield);
725 	}
726     }
727   else
728     {
729       if (ctx->srecord_type == NULL_TREE)
730 	{
731 	  tree t;
732 
733 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
734 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
735 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
736 	    {
737 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
738 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
739 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
740 	      insert_field_into_struct (ctx->srecord_type, sfield);
741 	      splay_tree_insert (ctx->sfield_map,
742 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
743 				 (splay_tree_value) sfield);
744 	    }
745 	}
746       sfield = field;
747       insert_field_into_struct ((mask & 1) ? ctx->record_type
748 				: ctx->srecord_type, field);
749     }
750 
751   if (mask & 1)
752     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
753   if ((mask & 2) && ctx->sfield_map)
754     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
755 }
756 
757 static tree
install_var_local(tree var,omp_context * ctx)758 install_var_local (tree var, omp_context *ctx)
759 {
760   tree new_var = omp_copy_decl_1 (var, ctx);
761   insert_decl_map (&ctx->cb, var, new_var);
762   return new_var;
763 }
764 
765 /* Adjust the replacement for DECL in CTX for the new context.  This means
766    copying the DECL_VALUE_EXPR, and fixing up the type.  */
767 
768 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)769 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
770 {
771   tree new_decl, size;
772 
773   new_decl = lookup_decl (decl, ctx);
774 
775   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
776 
777   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
778       && DECL_HAS_VALUE_EXPR_P (decl))
779     {
780       tree ve = DECL_VALUE_EXPR (decl);
781       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
782       SET_DECL_VALUE_EXPR (new_decl, ve);
783       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
784     }
785 
786   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
787     {
788       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
789       if (size == error_mark_node)
790 	size = TYPE_SIZE (TREE_TYPE (new_decl));
791       DECL_SIZE (new_decl) = size;
792 
793       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
794       if (size == error_mark_node)
795 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
796       DECL_SIZE_UNIT (new_decl) = size;
797     }
798 }
799 
800 /* The callback for remap_decl.  Search all containing contexts for a
801    mapping of the variable; this avoids having to duplicate the splay
802    tree ahead of time.  We know a mapping doesn't already exist in the
803    given context.  Create new mappings to implement default semantics.  */
804 
805 static tree
omp_copy_decl(tree var,copy_body_data * cb)806 omp_copy_decl (tree var, copy_body_data *cb)
807 {
808   omp_context *ctx = (omp_context *) cb;
809   tree new_var;
810 
811   if (TREE_CODE (var) == LABEL_DECL)
812     {
813       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
814 	return var;
815       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
816       DECL_CONTEXT (new_var) = current_function_decl;
817       insert_decl_map (&ctx->cb, var, new_var);
818       return new_var;
819     }
820 
821   while (!is_taskreg_ctx (ctx))
822     {
823       ctx = ctx->outer;
824       if (ctx == NULL)
825 	return var;
826       new_var = maybe_lookup_decl (var, ctx);
827       if (new_var)
828 	return new_var;
829     }
830 
831   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
832     return var;
833 
834   return error_mark_node;
835 }
836 
837 /* Create a new context, with OUTER_CTX being the surrounding context.  */
838 
839 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)840 new_omp_context (gimple *stmt, omp_context *outer_ctx)
841 {
842   omp_context *ctx = XCNEW (omp_context);
843 
844   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
845 		     (splay_tree_value) ctx);
846   ctx->stmt = stmt;
847 
848   if (outer_ctx)
849     {
850       ctx->outer = outer_ctx;
851       ctx->cb = outer_ctx->cb;
852       ctx->cb.block = NULL;
853       ctx->depth = outer_ctx->depth + 1;
854     }
855   else
856     {
857       ctx->cb.src_fn = current_function_decl;
858       ctx->cb.dst_fn = current_function_decl;
859       ctx->cb.src_node = cgraph_node::get (current_function_decl);
860       gcc_checking_assert (ctx->cb.src_node);
861       ctx->cb.dst_node = ctx->cb.src_node;
862       ctx->cb.src_cfun = cfun;
863       ctx->cb.copy_decl = omp_copy_decl;
864       ctx->cb.eh_lp_nr = 0;
865       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
866       ctx->cb.dont_remap_vla_if_no_change = true;
867       ctx->depth = 1;
868     }
869 
870   ctx->cb.decl_map = new hash_map<tree, tree>;
871 
872   return ctx;
873 }
874 
875 static gimple_seq maybe_catch_exception (gimple_seq);
876 
877 /* Finalize task copyfn.  */
878 
879 static void
finalize_task_copyfn(gomp_task * task_stmt)880 finalize_task_copyfn (gomp_task *task_stmt)
881 {
882   struct function *child_cfun;
883   tree child_fn;
884   gimple_seq seq = NULL, new_seq;
885   gbind *bind;
886 
887   child_fn = gimple_omp_task_copy_fn (task_stmt);
888   if (child_fn == NULL_TREE)
889     return;
890 
891   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
892   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
893 
894   push_cfun (child_cfun);
895   bind = gimplify_body (child_fn, false);
896   gimple_seq_add_stmt (&seq, bind);
897   new_seq = maybe_catch_exception (seq);
898   if (new_seq != seq)
899     {
900       bind = gimple_build_bind (NULL, new_seq, NULL);
901       seq = NULL;
902       gimple_seq_add_stmt (&seq, bind);
903     }
904   gimple_set_body (child_fn, seq);
905   pop_cfun ();
906 
907   /* Inform the callgraph about the new function.  */
908   cgraph_node *node = cgraph_node::get_create (child_fn);
909   node->parallelized_function = 1;
910   cgraph_node::add_new_function (child_fn, false);
911 }
912 
913 /* Destroy a omp_context data structures.  Called through the splay tree
914    value delete callback.  */
915 
916 static void
delete_omp_context(splay_tree_value value)917 delete_omp_context (splay_tree_value value)
918 {
919   omp_context *ctx = (omp_context *) value;
920 
921   delete ctx->cb.decl_map;
922 
923   if (ctx->field_map)
924     splay_tree_delete (ctx->field_map);
925   if (ctx->sfield_map)
926     splay_tree_delete (ctx->sfield_map);
927 
928   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
929      it produces corrupt debug information.  */
930   if (ctx->record_type)
931     {
932       tree t;
933       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
934 	DECL_ABSTRACT_ORIGIN (t) = NULL;
935     }
936   if (ctx->srecord_type)
937     {
938       tree t;
939       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
940 	DECL_ABSTRACT_ORIGIN (t) = NULL;
941     }
942 
943   if (is_task_ctx (ctx))
944     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
945 
946   XDELETE (ctx);
947 }
948 
949 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
950    context.  */
951 
952 static void
fixup_child_record_type(omp_context * ctx)953 fixup_child_record_type (omp_context *ctx)
954 {
955   tree f, type = ctx->record_type;
956 
957   if (!ctx->receiver_decl)
958     return;
959   /* ??? It isn't sufficient to just call remap_type here, because
960      variably_modified_type_p doesn't work the way we expect for
961      record types.  Testing each field for whether it needs remapping
962      and creating a new record by hand works, however.  */
963   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
964     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
965       break;
966   if (f)
967     {
968       tree name, new_fields = NULL;
969 
970       type = lang_hooks.types.make_type (RECORD_TYPE);
971       name = DECL_NAME (TYPE_NAME (ctx->record_type));
972       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
973 			 TYPE_DECL, name, type);
974       TYPE_NAME (type) = name;
975 
976       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
977 	{
978 	  tree new_f = copy_node (f);
979 	  DECL_CONTEXT (new_f) = type;
980 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
981 	  DECL_CHAIN (new_f) = new_fields;
982 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
983 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
984 		     &ctx->cb, NULL);
985 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
986 		     &ctx->cb, NULL);
987 	  new_fields = new_f;
988 
989 	  /* Arrange to be able to look up the receiver field
990 	     given the sender field.  */
991 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
992 			     (splay_tree_value) new_f);
993 	}
994       TYPE_FIELDS (type) = nreverse (new_fields);
995       layout_type (type);
996     }
997 
998   /* In a target region we never modify any of the pointers in *.omp_data_i,
999      so attempt to help the optimizers.  */
1000   if (is_gimple_omp_offloaded (ctx->stmt))
1001     type = build_qualified_type (type, TYPE_QUAL_CONST);
1002 
1003   TREE_TYPE (ctx->receiver_decl)
1004     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1005 }
1006 
1007 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1008    specified by CLAUSES.  If BASE_POINTERS_RESTRICT, install var field with
1009    restrict.  */
1010 
1011 static void
1012 scan_sharing_clauses (tree clauses, omp_context *ctx,
1013 		      bool base_pointers_restrict = false)
1014 {
1015   tree c, decl;
1016   bool scan_array_reductions = false;
1017 
1018   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1019     {
1020       bool by_ref;
1021 
1022       switch (OMP_CLAUSE_CODE (c))
1023 	{
1024 	case OMP_CLAUSE_PRIVATE:
1025 	  decl = OMP_CLAUSE_DECL (c);
1026 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1027 	    goto do_private;
1028 	  else if (!is_variable_sized (decl))
1029 	    install_var_local (decl, ctx);
1030 	  break;
1031 
1032 	case OMP_CLAUSE_SHARED:
1033 	  decl = OMP_CLAUSE_DECL (c);
1034 	  /* Ignore shared directives in teams construct.  */
1035 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1036 	    {
1037 	      /* Global variables don't need to be copied,
1038 		 the receiver side will use them directly.  */
1039 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1040 	      if (is_global_var (odecl))
1041 		break;
1042 	      insert_decl_map (&ctx->cb, decl, odecl);
1043 	      break;
1044 	    }
1045 	  gcc_assert (is_taskreg_ctx (ctx));
1046 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1047 		      || !is_variable_sized (decl));
1048 	  /* Global variables don't need to be copied,
1049 	     the receiver side will use them directly.  */
1050 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1051 	    break;
1052 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1053 	    {
1054 	      use_pointer_for_field (decl, ctx);
1055 	      break;
1056 	    }
1057 	  by_ref = use_pointer_for_field (decl, NULL);
1058 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1059 	      || TREE_ADDRESSABLE (decl)
1060 	      || by_ref
1061 	      || omp_is_reference (decl))
1062 	    {
1063 	      by_ref = use_pointer_for_field (decl, ctx);
1064 	      install_var_field (decl, by_ref, 3, ctx);
1065 	      install_var_local (decl, ctx);
1066 	      break;
1067 	    }
1068 	  /* We don't need to copy const scalar vars back.  */
1069 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1070 	  goto do_private;
1071 
1072 	case OMP_CLAUSE_REDUCTION:
1073 	  decl = OMP_CLAUSE_DECL (c);
1074 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1075 	      && TREE_CODE (decl) == MEM_REF)
1076 	    {
1077 	      tree t = TREE_OPERAND (decl, 0);
1078 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1079 		t = TREE_OPERAND (t, 0);
1080 	      if (TREE_CODE (t) == INDIRECT_REF
1081 		  || TREE_CODE (t) == ADDR_EXPR)
1082 		t = TREE_OPERAND (t, 0);
1083 	      install_var_local (t, ctx);
1084 	      if (is_taskreg_ctx (ctx)
1085 		  && !is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1086 		  && !is_variable_sized (t))
1087 		{
1088 		  by_ref = use_pointer_for_field (t, ctx);
1089 		  install_var_field (t, by_ref, 3, ctx);
1090 		}
1091 	      break;
1092 	    }
1093 	  goto do_private;
1094 
1095 	case OMP_CLAUSE_LASTPRIVATE:
1096 	  /* Let the corresponding firstprivate clause create
1097 	     the variable.  */
1098 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1099 	    break;
1100 	  /* FALLTHRU */
1101 
1102 	case OMP_CLAUSE_FIRSTPRIVATE:
1103 	case OMP_CLAUSE_LINEAR:
1104 	  decl = OMP_CLAUSE_DECL (c);
1105 	do_private:
1106 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1107 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1108 	      && is_gimple_omp_offloaded (ctx->stmt))
1109 	    {
1110 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1111 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1112 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1113 		install_var_field (decl, true, 3, ctx);
1114 	      else
1115 		install_var_field (decl, false, 3, ctx);
1116 	    }
1117 	  if (is_variable_sized (decl))
1118 	    {
1119 	      if (is_task_ctx (ctx))
1120 		install_var_field (decl, false, 1, ctx);
1121 	      break;
1122 	    }
1123 	  else if (is_taskreg_ctx (ctx))
1124 	    {
1125 	      bool global
1126 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1127 	      by_ref = use_pointer_for_field (decl, NULL);
1128 
1129 	      if (is_task_ctx (ctx)
1130 		  && (global || by_ref || omp_is_reference (decl)))
1131 		{
1132 		  install_var_field (decl, false, 1, ctx);
1133 		  if (!global)
1134 		    install_var_field (decl, by_ref, 2, ctx);
1135 		}
1136 	      else if (!global)
1137 		install_var_field (decl, by_ref, 3, ctx);
1138 	    }
1139 	  install_var_local (decl, ctx);
1140 	  break;
1141 
1142 	case OMP_CLAUSE_USE_DEVICE_PTR:
1143 	  decl = OMP_CLAUSE_DECL (c);
1144 	  if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1145 	    install_var_field (decl, true, 3, ctx);
1146 	  else
1147 	    install_var_field (decl, false, 3, ctx);
1148 	  if (DECL_SIZE (decl)
1149 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1150 	    {
1151 	      tree decl2 = DECL_VALUE_EXPR (decl);
1152 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1153 	      decl2 = TREE_OPERAND (decl2, 0);
1154 	      gcc_assert (DECL_P (decl2));
1155 	      install_var_local (decl2, ctx);
1156 	    }
1157 	  install_var_local (decl, ctx);
1158 	  break;
1159 
1160 	case OMP_CLAUSE_IS_DEVICE_PTR:
1161 	  decl = OMP_CLAUSE_DECL (c);
1162 	  goto do_private;
1163 
1164 	case OMP_CLAUSE__LOOPTEMP_:
1165 	  gcc_assert (is_taskreg_ctx (ctx));
1166 	  decl = OMP_CLAUSE_DECL (c);
1167 	  install_var_field (decl, false, 3, ctx);
1168 	  install_var_local (decl, ctx);
1169 	  break;
1170 
1171 	case OMP_CLAUSE_COPYPRIVATE:
1172 	case OMP_CLAUSE_COPYIN:
1173 	  decl = OMP_CLAUSE_DECL (c);
1174 	  by_ref = use_pointer_for_field (decl, NULL);
1175 	  install_var_field (decl, by_ref, 3, ctx);
1176 	  break;
1177 
1178 	case OMP_CLAUSE_FINAL:
1179 	case OMP_CLAUSE_IF:
1180 	case OMP_CLAUSE_NUM_THREADS:
1181 	case OMP_CLAUSE_NUM_TEAMS:
1182 	case OMP_CLAUSE_THREAD_LIMIT:
1183 	case OMP_CLAUSE_DEVICE:
1184 	case OMP_CLAUSE_SCHEDULE:
1185 	case OMP_CLAUSE_DIST_SCHEDULE:
1186 	case OMP_CLAUSE_DEPEND:
1187 	case OMP_CLAUSE_PRIORITY:
1188 	case OMP_CLAUSE_GRAINSIZE:
1189 	case OMP_CLAUSE_NUM_TASKS:
1190 	case OMP_CLAUSE_NUM_GANGS:
1191 	case OMP_CLAUSE_NUM_WORKERS:
1192 	case OMP_CLAUSE_VECTOR_LENGTH:
1193 	  if (ctx->outer)
1194 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1195 	  break;
1196 
1197 	case OMP_CLAUSE_TO:
1198 	case OMP_CLAUSE_FROM:
1199 	case OMP_CLAUSE_MAP:
1200 	  if (ctx->outer)
1201 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1202 	  decl = OMP_CLAUSE_DECL (c);
1203 	  /* Global variables with "omp declare target" attribute
1204 	     don't need to be copied, the receiver side will use them
1205 	     directly.  However, global variables with "omp declare target link"
1206 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1207 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1208 	      && DECL_P (decl)
1209 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1210 		   && (OMP_CLAUSE_MAP_KIND (c)
1211 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1212 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1213 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1214 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1215 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1216 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1217 	      && varpool_node::get_create (decl)->offloadable
1218 	      && !lookup_attribute ("omp declare target link",
1219 				    DECL_ATTRIBUTES (decl)))
1220 	    break;
1221 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1222 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1223 	    {
1224 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1225 		 not offloaded; there is nothing to map for those.  */
1226 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1227 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1228 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1229 		break;
1230 	    }
1231 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1232 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1233 		  || (OMP_CLAUSE_MAP_KIND (c)
1234 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1235 	    {
1236 	      if (TREE_CODE (decl) == COMPONENT_REF
1237 		  || (TREE_CODE (decl) == INDIRECT_REF
1238 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1239 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1240 			  == REFERENCE_TYPE)))
1241 		break;
1242 	      if (DECL_SIZE (decl)
1243 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1244 		{
1245 		  tree decl2 = DECL_VALUE_EXPR (decl);
1246 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1247 		  decl2 = TREE_OPERAND (decl2, 0);
1248 		  gcc_assert (DECL_P (decl2));
1249 		  install_var_local (decl2, ctx);
1250 		}
1251 	      install_var_local (decl, ctx);
1252 	      break;
1253 	    }
1254 	  if (DECL_P (decl))
1255 	    {
1256 	      if (DECL_SIZE (decl)
1257 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1258 		{
1259 		  tree decl2 = DECL_VALUE_EXPR (decl);
1260 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1261 		  decl2 = TREE_OPERAND (decl2, 0);
1262 		  gcc_assert (DECL_P (decl2));
1263 		  install_var_field (decl2, true, 3, ctx);
1264 		  install_var_local (decl2, ctx);
1265 		  install_var_local (decl, ctx);
1266 		}
1267 	      else
1268 		{
1269 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1270 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1271 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1272 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1273 		    install_var_field (decl, true, 7, ctx);
1274 		  else
1275 		    install_var_field (decl, true, 3, ctx,
1276 				       base_pointers_restrict);
1277 		  if (is_gimple_omp_offloaded (ctx->stmt)
1278 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1279 		    install_var_local (decl, ctx);
1280 		}
1281 	    }
1282 	  else
1283 	    {
1284 	      tree base = get_base_address (decl);
1285 	      tree nc = OMP_CLAUSE_CHAIN (c);
1286 	      if (DECL_P (base)
1287 		  && nc != NULL_TREE
1288 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1289 		  && OMP_CLAUSE_DECL (nc) == base
1290 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1291 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1292 		{
1293 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1294 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1295 		}
1296 	      else
1297 		{
1298 		  if (ctx->outer)
1299 		    {
1300 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1301 		      decl = OMP_CLAUSE_DECL (c);
1302 		    }
1303 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1304 						  (splay_tree_key) decl));
1305 		  tree field
1306 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1307 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1308 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1309 		  insert_field_into_struct (ctx->record_type, field);
1310 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1311 				     (splay_tree_value) field);
1312 		}
1313 	    }
1314 	  break;
1315 
1316 	case OMP_CLAUSE__GRIDDIM_:
1317 	  if (ctx->outer)
1318 	    {
1319 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1320 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1321 	    }
1322 	  break;
1323 
1324 	case OMP_CLAUSE_NOWAIT:
1325 	case OMP_CLAUSE_ORDERED:
1326 	case OMP_CLAUSE_COLLAPSE:
1327 	case OMP_CLAUSE_UNTIED:
1328 	case OMP_CLAUSE_MERGEABLE:
1329 	case OMP_CLAUSE_PROC_BIND:
1330 	case OMP_CLAUSE_SAFELEN:
1331 	case OMP_CLAUSE_SIMDLEN:
1332 	case OMP_CLAUSE_THREADS:
1333 	case OMP_CLAUSE_SIMD:
1334 	case OMP_CLAUSE_NOGROUP:
1335 	case OMP_CLAUSE_DEFAULTMAP:
1336 	case OMP_CLAUSE_ASYNC:
1337 	case OMP_CLAUSE_WAIT:
1338 	case OMP_CLAUSE_GANG:
1339 	case OMP_CLAUSE_WORKER:
1340 	case OMP_CLAUSE_VECTOR:
1341 	case OMP_CLAUSE_INDEPENDENT:
1342 	case OMP_CLAUSE_AUTO:
1343 	case OMP_CLAUSE_SEQ:
1344 	case OMP_CLAUSE_TILE:
1345 	case OMP_CLAUSE__SIMT_:
1346 	case OMP_CLAUSE_DEFAULT:
1347 	  break;
1348 
1349 	case OMP_CLAUSE_ALIGNED:
1350 	  decl = OMP_CLAUSE_DECL (c);
1351 	  if (is_global_var (decl)
1352 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1353 	    install_var_local (decl, ctx);
1354 	  break;
1355 
1356 	case OMP_CLAUSE__CACHE_:
1357 	default:
1358 	  gcc_unreachable ();
1359 	}
1360     }
1361 
1362   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1363     {
1364       switch (OMP_CLAUSE_CODE (c))
1365 	{
1366 	case OMP_CLAUSE_LASTPRIVATE:
1367 	  /* Let the corresponding firstprivate clause create
1368 	     the variable.  */
1369 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1370 	    scan_array_reductions = true;
1371 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1372 	    break;
1373 	  /* FALLTHRU */
1374 
1375 	case OMP_CLAUSE_FIRSTPRIVATE:
1376 	case OMP_CLAUSE_PRIVATE:
1377 	case OMP_CLAUSE_LINEAR:
1378 	case OMP_CLAUSE_IS_DEVICE_PTR:
1379 	  decl = OMP_CLAUSE_DECL (c);
1380 	  if (is_variable_sized (decl))
1381 	    {
1382 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1383 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1384 		  && is_gimple_omp_offloaded (ctx->stmt))
1385 		{
1386 		  tree decl2 = DECL_VALUE_EXPR (decl);
1387 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1388 		  decl2 = TREE_OPERAND (decl2, 0);
1389 		  gcc_assert (DECL_P (decl2));
1390 		  install_var_local (decl2, ctx);
1391 		  fixup_remapped_decl (decl2, ctx, false);
1392 		}
1393 	      install_var_local (decl, ctx);
1394 	    }
1395 	  fixup_remapped_decl (decl, ctx,
1396 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1397 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1398 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1399 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1400 	    scan_array_reductions = true;
1401 	  break;
1402 
1403 	case OMP_CLAUSE_REDUCTION:
1404 	  decl = OMP_CLAUSE_DECL (c);
1405 	  if (TREE_CODE (decl) != MEM_REF)
1406 	    {
1407 	      if (is_variable_sized (decl))
1408 		install_var_local (decl, ctx);
1409 	      fixup_remapped_decl (decl, ctx, false);
1410 	    }
1411 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1412 	    scan_array_reductions = true;
1413 	  break;
1414 
1415 	case OMP_CLAUSE_SHARED:
1416 	  /* Ignore shared directives in teams construct.  */
1417 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
1418 	    break;
1419 	  decl = OMP_CLAUSE_DECL (c);
1420 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1421 	    break;
1422 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1423 	    {
1424 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1425 								 ctx->outer)))
1426 		break;
1427 	      bool by_ref = use_pointer_for_field (decl, ctx);
1428 	      install_var_field (decl, by_ref, 11, ctx);
1429 	      break;
1430 	    }
1431 	  fixup_remapped_decl (decl, ctx, false);
1432 	  break;
1433 
1434 	case OMP_CLAUSE_MAP:
1435 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1436 	    break;
1437 	  decl = OMP_CLAUSE_DECL (c);
1438 	  if (DECL_P (decl)
1439 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1440 		   && (OMP_CLAUSE_MAP_KIND (c)
1441 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1442 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1443 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1444 	      && varpool_node::get_create (decl)->offloadable)
1445 	    break;
1446 	  if (DECL_P (decl))
1447 	    {
1448 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1449 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1450 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1451 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1452 		{
1453 		  tree new_decl = lookup_decl (decl, ctx);
1454 		  TREE_TYPE (new_decl)
1455 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1456 		}
1457 	      else if (DECL_SIZE (decl)
1458 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1459 		{
1460 		  tree decl2 = DECL_VALUE_EXPR (decl);
1461 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1462 		  decl2 = TREE_OPERAND (decl2, 0);
1463 		  gcc_assert (DECL_P (decl2));
1464 		  fixup_remapped_decl (decl2, ctx, false);
1465 		  fixup_remapped_decl (decl, ctx, true);
1466 		}
1467 	      else
1468 		fixup_remapped_decl (decl, ctx, false);
1469 	    }
1470 	  break;
1471 
1472 	case OMP_CLAUSE_COPYPRIVATE:
1473 	case OMP_CLAUSE_COPYIN:
1474 	case OMP_CLAUSE_DEFAULT:
1475 	case OMP_CLAUSE_IF:
1476 	case OMP_CLAUSE_NUM_THREADS:
1477 	case OMP_CLAUSE_NUM_TEAMS:
1478 	case OMP_CLAUSE_THREAD_LIMIT:
1479 	case OMP_CLAUSE_DEVICE:
1480 	case OMP_CLAUSE_SCHEDULE:
1481 	case OMP_CLAUSE_DIST_SCHEDULE:
1482 	case OMP_CLAUSE_NOWAIT:
1483 	case OMP_CLAUSE_ORDERED:
1484 	case OMP_CLAUSE_COLLAPSE:
1485 	case OMP_CLAUSE_UNTIED:
1486 	case OMP_CLAUSE_FINAL:
1487 	case OMP_CLAUSE_MERGEABLE:
1488 	case OMP_CLAUSE_PROC_BIND:
1489 	case OMP_CLAUSE_SAFELEN:
1490 	case OMP_CLAUSE_SIMDLEN:
1491 	case OMP_CLAUSE_ALIGNED:
1492 	case OMP_CLAUSE_DEPEND:
1493 	case OMP_CLAUSE__LOOPTEMP_:
1494 	case OMP_CLAUSE_TO:
1495 	case OMP_CLAUSE_FROM:
1496 	case OMP_CLAUSE_PRIORITY:
1497 	case OMP_CLAUSE_GRAINSIZE:
1498 	case OMP_CLAUSE_NUM_TASKS:
1499 	case OMP_CLAUSE_THREADS:
1500 	case OMP_CLAUSE_SIMD:
1501 	case OMP_CLAUSE_NOGROUP:
1502 	case OMP_CLAUSE_DEFAULTMAP:
1503 	case OMP_CLAUSE_USE_DEVICE_PTR:
1504 	case OMP_CLAUSE_ASYNC:
1505 	case OMP_CLAUSE_WAIT:
1506 	case OMP_CLAUSE_NUM_GANGS:
1507 	case OMP_CLAUSE_NUM_WORKERS:
1508 	case OMP_CLAUSE_VECTOR_LENGTH:
1509 	case OMP_CLAUSE_GANG:
1510 	case OMP_CLAUSE_WORKER:
1511 	case OMP_CLAUSE_VECTOR:
1512 	case OMP_CLAUSE_INDEPENDENT:
1513 	case OMP_CLAUSE_AUTO:
1514 	case OMP_CLAUSE_SEQ:
1515 	case OMP_CLAUSE_TILE:
1516 	case OMP_CLAUSE__GRIDDIM_:
1517 	case OMP_CLAUSE__SIMT_:
1518 	  break;
1519 
1520 	case OMP_CLAUSE__CACHE_:
1521 	default:
1522 	  gcc_unreachable ();
1523 	}
1524     }
1525 
1526   gcc_checking_assert (!scan_array_reductions
1527 		       || !is_gimple_omp_oacc (ctx->stmt));
1528   if (scan_array_reductions)
1529     {
1530       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1531 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1532 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1533 	  {
1534 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1535 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1536 	  }
1537 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1538 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1539 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1540 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1541 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1542 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1543     }
1544 }
1545 
1546 /* Create a new name for omp child function.  Returns an identifier. */
1547 
1548 static tree
create_omp_child_function_name(bool task_copy)1549 create_omp_child_function_name (bool task_copy)
1550 {
1551   return clone_function_name (current_function_decl,
1552 			      task_copy ? "_omp_cpyfn" : "_omp_fn");
1553 }
1554 
1555 /* Return true if CTX may belong to offloaded code: either if current function
1556    is offloaded, or any enclosing context corresponds to a target region.  */
1557 
1558 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1559 omp_maybe_offloaded_ctx (omp_context *ctx)
1560 {
1561   if (cgraph_node::get (current_function_decl)->offloadable)
1562     return true;
1563   for (; ctx; ctx = ctx->outer)
1564     if (is_gimple_omp_offloaded (ctx->stmt))
1565       return true;
1566   return false;
1567 }
1568 
1569 /* Build a decl for the omp child function.  It'll not contain a body
1570    yet, just the bare decl.  */
1571 
1572 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1573 create_omp_child_function (omp_context *ctx, bool task_copy)
1574 {
1575   tree decl, type, name, t;
1576 
1577   name = create_omp_child_function_name (task_copy);
1578   if (task_copy)
1579     type = build_function_type_list (void_type_node, ptr_type_node,
1580 				     ptr_type_node, NULL_TREE);
1581   else
1582     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1583 
1584   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1585 
1586   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1587 		       || !task_copy);
1588   if (!task_copy)
1589     ctx->cb.dst_fn = decl;
1590   else
1591     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1592 
1593   TREE_STATIC (decl) = 1;
1594   TREE_USED (decl) = 1;
1595   DECL_ARTIFICIAL (decl) = 1;
1596   DECL_IGNORED_P (decl) = 0;
1597   TREE_PUBLIC (decl) = 0;
1598   DECL_UNINLINABLE (decl) = 1;
1599   DECL_EXTERNAL (decl) = 0;
1600   DECL_CONTEXT (decl) = NULL_TREE;
1601   DECL_INITIAL (decl) = make_node (BLOCK);
1602   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1603   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1604   /* Remove omp declare simd attribute from the new attributes.  */
1605   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1606     {
1607       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1608 	a = a2;
1609       a = TREE_CHAIN (a);
1610       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1611 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1612 	  *p = TREE_CHAIN (*p);
1613 	else
1614 	  {
1615 	    tree chain = TREE_CHAIN (*p);
1616 	    *p = copy_node (*p);
1617 	    p = &TREE_CHAIN (*p);
1618 	    *p = chain;
1619 	  }
1620     }
1621   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1622     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1623   DECL_FUNCTION_SPECIFIC_TARGET (decl)
1624     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1625   DECL_FUNCTION_VERSIONED (decl)
1626     = DECL_FUNCTION_VERSIONED (current_function_decl);
1627 
1628   if (omp_maybe_offloaded_ctx (ctx))
1629     {
1630       cgraph_node::get_create (decl)->offloadable = 1;
1631       if (ENABLE_OFFLOADING)
1632 	g->have_offload = true;
1633     }
1634 
1635   if (cgraph_node::get_create (decl)->offloadable
1636       && !lookup_attribute ("omp declare target",
1637                            DECL_ATTRIBUTES (current_function_decl)))
1638     {
1639       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1640 				 ? "omp target entrypoint"
1641 				 : "omp declare target");
1642       DECL_ATTRIBUTES (decl)
1643 	= tree_cons (get_identifier (target_attr),
1644 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1645     }
1646 
1647   t = build_decl (DECL_SOURCE_LOCATION (decl),
1648 		  RESULT_DECL, NULL_TREE, void_type_node);
1649   DECL_ARTIFICIAL (t) = 1;
1650   DECL_IGNORED_P (t) = 1;
1651   DECL_CONTEXT (t) = decl;
1652   DECL_RESULT (decl) = t;
1653 
1654   tree data_name = get_identifier (".omp_data_i");
1655   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1656 		  ptr_type_node);
1657   DECL_ARTIFICIAL (t) = 1;
1658   DECL_NAMELESS (t) = 1;
1659   DECL_ARG_TYPE (t) = ptr_type_node;
1660   DECL_CONTEXT (t) = current_function_decl;
1661   TREE_USED (t) = 1;
1662   TREE_READONLY (t) = 1;
1663   DECL_ARGUMENTS (decl) = t;
1664   if (!task_copy)
1665     ctx->receiver_decl = t;
1666   else
1667     {
1668       t = build_decl (DECL_SOURCE_LOCATION (decl),
1669 		      PARM_DECL, get_identifier (".omp_data_o"),
1670 		      ptr_type_node);
1671       DECL_ARTIFICIAL (t) = 1;
1672       DECL_NAMELESS (t) = 1;
1673       DECL_ARG_TYPE (t) = ptr_type_node;
1674       DECL_CONTEXT (t) = current_function_decl;
1675       TREE_USED (t) = 1;
1676       TREE_ADDRESSABLE (t) = 1;
1677       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1678       DECL_ARGUMENTS (decl) = t;
1679     }
1680 
1681   /* Allocate memory for the function structure.  The call to
1682      allocate_struct_function clobbers CFUN, so we need to restore
1683      it afterward.  */
1684   push_struct_function (decl);
1685   cfun->function_end_locus = gimple_location (ctx->stmt);
1686   init_tree_ssa (cfun);
1687   pop_cfun ();
1688 }
1689 
1690 /* Callback for walk_gimple_seq.  Check if combined parallel
1691    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1692 
1693 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1694 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1695 		       bool *handled_ops_p,
1696 		       struct walk_stmt_info *wi)
1697 {
1698   gimple *stmt = gsi_stmt (*gsi_p);
1699 
1700   *handled_ops_p = true;
1701   switch (gimple_code (stmt))
1702     {
1703     WALK_SUBSTMTS;
1704 
1705     case GIMPLE_OMP_FOR:
1706       if (gimple_omp_for_combined_into_p (stmt)
1707 	  && gimple_omp_for_kind (stmt)
1708 	     == *(const enum gf_mask *) (wi->info))
1709 	{
1710 	  wi->info = stmt;
1711 	  return integer_zero_node;
1712 	}
1713       break;
1714     default:
1715       break;
1716     }
1717   return NULL;
1718 }
1719 
1720 /* Add _LOOPTEMP_ clauses on OpenMP parallel or task.  */
1721 
1722 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)1723 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1724 			      omp_context *outer_ctx)
1725 {
1726   struct walk_stmt_info wi;
1727 
1728   memset (&wi, 0, sizeof (wi));
1729   wi.val_only = true;
1730   wi.info = (void *) &msk;
1731   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1732   if (wi.info != (void *) &msk)
1733     {
1734       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1735       struct omp_for_data fd;
1736       omp_extract_for_data (for_stmt, &fd, NULL);
1737       /* We need two temporaries with fd.loop.v type (istart/iend)
1738 	 and then (fd.collapse - 1) temporaries with the same
1739 	 type for count2 ... countN-1 vars if not constant.  */
1740       size_t count = 2, i;
1741       tree type = fd.iter_type;
1742       if (fd.collapse > 1
1743 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1744 	{
1745 	  count += fd.collapse - 1;
1746 	  /* If there are lastprivate clauses on the inner
1747 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1748 	     of iterations (product of count1 ... countN-1).  */
1749 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1750 			       OMP_CLAUSE_LASTPRIVATE))
1751 	    count++;
1752 	  else if (msk == GF_OMP_FOR_KIND_FOR
1753 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1754 				       OMP_CLAUSE_LASTPRIVATE))
1755 	    count++;
1756 	}
1757       for (i = 0; i < count; i++)
1758 	{
1759 	  tree temp = create_tmp_var (type);
1760 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1761 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1762 	  OMP_CLAUSE_DECL (c) = temp;
1763 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1764 	  gimple_omp_taskreg_set_clauses (stmt, c);
1765 	}
1766     }
1767 }
1768 
1769 /* Scan an OpenMP parallel directive.  */
1770 
1771 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1772 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1773 {
1774   omp_context *ctx;
1775   tree name;
1776   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1777 
1778   /* Ignore parallel directives with empty bodies, unless there
1779      are copyin clauses.  */
1780   if (optimize > 0
1781       && empty_body_p (gimple_omp_body (stmt))
1782       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1783 			  OMP_CLAUSE_COPYIN) == NULL)
1784     {
1785       gsi_replace (gsi, gimple_build_nop (), false);
1786       return;
1787     }
1788 
1789   if (gimple_omp_parallel_combined_p (stmt))
1790     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1791 
1792   ctx = new_omp_context (stmt, outer_ctx);
1793   taskreg_contexts.safe_push (ctx);
1794   if (taskreg_nesting_level > 1)
1795     ctx->is_nested = true;
1796   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1797   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1798   name = create_tmp_var_name (".omp_data_s");
1799   name = build_decl (gimple_location (stmt),
1800 		     TYPE_DECL, name, ctx->record_type);
1801   DECL_ARTIFICIAL (name) = 1;
1802   DECL_NAMELESS (name) = 1;
1803   TYPE_NAME (ctx->record_type) = name;
1804   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1805   if (!gimple_omp_parallel_grid_phony (stmt))
1806     {
1807       create_omp_child_function (ctx, false);
1808       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
1809     }
1810 
1811   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
1812   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1813 
1814   if (TYPE_FIELDS (ctx->record_type) == NULL)
1815     ctx->record_type = ctx->receiver_decl = NULL;
1816 }
1817 
1818 /* Scan an OpenMP task directive.  */
1819 
1820 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1821 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1822 {
1823   omp_context *ctx;
1824   tree name, t;
1825   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
1826 
1827   /* Ignore task directives with empty bodies, unless they have depend
1828      clause.  */
1829   if (optimize > 0
1830       && empty_body_p (gimple_omp_body (stmt))
1831       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
1832     {
1833       gsi_replace (gsi, gimple_build_nop (), false);
1834       return;
1835     }
1836 
1837   if (gimple_omp_task_taskloop_p (stmt))
1838     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
1839 
1840   ctx = new_omp_context (stmt, outer_ctx);
1841   taskreg_contexts.safe_push (ctx);
1842   if (taskreg_nesting_level > 1)
1843     ctx->is_nested = true;
1844   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1845   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
1846   name = create_tmp_var_name (".omp_data_s");
1847   name = build_decl (gimple_location (stmt),
1848 		     TYPE_DECL, name, ctx->record_type);
1849   DECL_ARTIFICIAL (name) = 1;
1850   DECL_NAMELESS (name) = 1;
1851   TYPE_NAME (ctx->record_type) = name;
1852   TYPE_ARTIFICIAL (ctx->record_type) = 1;
1853   create_omp_child_function (ctx, false);
1854   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
1855 
1856   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
1857 
1858   if (ctx->srecord_type)
1859     {
1860       name = create_tmp_var_name (".omp_data_a");
1861       name = build_decl (gimple_location (stmt),
1862 			 TYPE_DECL, name, ctx->srecord_type);
1863       DECL_ARTIFICIAL (name) = 1;
1864       DECL_NAMELESS (name) = 1;
1865       TYPE_NAME (ctx->srecord_type) = name;
1866       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
1867       create_omp_child_function (ctx, true);
1868     }
1869 
1870   scan_omp (gimple_omp_body_ptr (stmt), ctx);
1871 
1872   if (TYPE_FIELDS (ctx->record_type) == NULL)
1873     {
1874       ctx->record_type = ctx->receiver_decl = NULL;
1875       t = build_int_cst (long_integer_type_node, 0);
1876       gimple_omp_task_set_arg_size (stmt, t);
1877       t = build_int_cst (long_integer_type_node, 1);
1878       gimple_omp_task_set_arg_align (stmt, t);
1879     }
1880 }
1881 
1882 /* Helper function for finish_taskreg_scan, called through walk_tree.
1883    If maybe_lookup_decl_in_outer_context returns non-NULL for some
1884    tree, replace it in the expression.  */
1885 
1886 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)1887 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
1888 {
1889   if (VAR_P (*tp))
1890     {
1891       omp_context *ctx = (omp_context *) data;
1892       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
1893       if (t != *tp)
1894 	{
1895 	  if (DECL_HAS_VALUE_EXPR_P (t))
1896 	    t = unshare_expr (DECL_VALUE_EXPR (t));
1897 	  *tp = t;
1898 	}
1899       *walk_subtrees = 0;
1900     }
1901   else if (IS_TYPE_OR_DECL_P (*tp))
1902     *walk_subtrees = 0;
1903   return NULL_TREE;
1904 }
1905 
1906 /* If any decls have been made addressable during scan_omp,
1907    adjust their fields if needed, and layout record types
1908    of parallel/task constructs.  */
1909 
1910 static void
finish_taskreg_scan(omp_context * ctx)1911 finish_taskreg_scan (omp_context *ctx)
1912 {
1913   if (ctx->record_type == NULL_TREE)
1914     return;
1915 
1916   /* If any task_shared_vars were needed, verify all
1917      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK}
1918      statements if use_pointer_for_field hasn't changed
1919      because of that.  If it did, update field types now.  */
1920   if (task_shared_vars)
1921     {
1922       tree c;
1923 
1924       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
1925 	   c; c = OMP_CLAUSE_CHAIN (c))
1926 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
1927 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1928 	  {
1929 	    tree decl = OMP_CLAUSE_DECL (c);
1930 
1931 	    /* Global variables don't need to be copied,
1932 	       the receiver side will use them directly.  */
1933 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1934 	      continue;
1935 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
1936 		|| !use_pointer_for_field (decl, ctx))
1937 	      continue;
1938 	    tree field = lookup_field (decl, ctx);
1939 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
1940 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
1941 	      continue;
1942 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
1943 	    TREE_THIS_VOLATILE (field) = 0;
1944 	    DECL_USER_ALIGN (field) = 0;
1945 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
1946 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
1947 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
1948 	    if (ctx->srecord_type)
1949 	      {
1950 		tree sfield = lookup_sfield (decl, ctx);
1951 		TREE_TYPE (sfield) = TREE_TYPE (field);
1952 		TREE_THIS_VOLATILE (sfield) = 0;
1953 		DECL_USER_ALIGN (sfield) = 0;
1954 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
1955 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
1956 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
1957 	      }
1958 	  }
1959     }
1960 
1961   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
1962     {
1963       layout_type (ctx->record_type);
1964       fixup_child_record_type (ctx);
1965     }
1966   else
1967     {
1968       location_t loc = gimple_location (ctx->stmt);
1969       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
1970       /* Move VLA fields to the end.  */
1971       p = &TYPE_FIELDS (ctx->record_type);
1972       while (*p)
1973 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
1974 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
1975 	  {
1976 	    *q = *p;
1977 	    *p = TREE_CHAIN (*p);
1978 	    TREE_CHAIN (*q) = NULL_TREE;
1979 	    q = &TREE_CHAIN (*q);
1980 	  }
1981 	else
1982 	  p = &DECL_CHAIN (*p);
1983       *p = vla_fields;
1984       if (gimple_omp_task_taskloop_p (ctx->stmt))
1985 	{
1986 	  /* Move fields corresponding to first and second _looptemp_
1987 	     clause first.  There are filled by GOMP_taskloop
1988 	     and thus need to be in specific positions.  */
1989 	  tree c1 = gimple_omp_task_clauses (ctx->stmt);
1990 	  c1 = omp_find_clause (c1, OMP_CLAUSE__LOOPTEMP_);
1991 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
1992 				     OMP_CLAUSE__LOOPTEMP_);
1993 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
1994 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
1995 	  p = &TYPE_FIELDS (ctx->record_type);
1996 	  while (*p)
1997 	    if (*p == f1 || *p == f2)
1998 	      *p = DECL_CHAIN (*p);
1999 	    else
2000 	      p = &DECL_CHAIN (*p);
2001 	  DECL_CHAIN (f1) = f2;
2002 	  DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2003 	  TYPE_FIELDS (ctx->record_type) = f1;
2004 	  if (ctx->srecord_type)
2005 	    {
2006 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2007 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2008 	      p = &TYPE_FIELDS (ctx->srecord_type);
2009 	      while (*p)
2010 		if (*p == f1 || *p == f2)
2011 		  *p = DECL_CHAIN (*p);
2012 		else
2013 		  p = &DECL_CHAIN (*p);
2014 	      DECL_CHAIN (f1) = f2;
2015 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2016 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2017 	    }
2018 	}
2019       layout_type (ctx->record_type);
2020       fixup_child_record_type (ctx);
2021       if (ctx->srecord_type)
2022 	layout_type (ctx->srecord_type);
2023       tree t = fold_convert_loc (loc, long_integer_type_node,
2024 				 TYPE_SIZE_UNIT (ctx->record_type));
2025       if (TREE_CODE (t) != INTEGER_CST)
2026 	{
2027 	  t = unshare_expr (t);
2028 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2029 	}
2030       gimple_omp_task_set_arg_size (ctx->stmt, t);
2031       t = build_int_cst (long_integer_type_node,
2032 			 TYPE_ALIGN_UNIT (ctx->record_type));
2033       gimple_omp_task_set_arg_align (ctx->stmt, t);
2034     }
2035 }
2036 
2037 /* Find the enclosing offload context.  */
2038 
2039 static omp_context *
enclosing_target_ctx(omp_context * ctx)2040 enclosing_target_ctx (omp_context *ctx)
2041 {
2042   for (; ctx; ctx = ctx->outer)
2043     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2044       break;
2045 
2046   return ctx;
2047 }
2048 
2049 /* Return true if ctx is part of an oacc kernels region.  */
2050 
2051 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2052 ctx_in_oacc_kernels_region (omp_context *ctx)
2053 {
2054   for (;ctx != NULL; ctx = ctx->outer)
2055     {
2056       gimple *stmt = ctx->stmt;
2057       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2058 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2059 	return true;
2060     }
2061 
2062   return false;
2063 }
2064 
2065 /* Check the parallelism clauses inside a kernels regions.
2066    Until kernels handling moves to use the same loop indirection
2067    scheme as parallel, we need to do this checking early.  */
2068 
2069 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2070 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2071 {
2072   bool checking = true;
2073   unsigned outer_mask = 0;
2074   unsigned this_mask = 0;
2075   bool has_seq = false, has_auto = false;
2076 
2077   if (ctx->outer)
2078     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2079   if (!stmt)
2080     {
2081       checking = false;
2082       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2083 	return outer_mask;
2084       stmt = as_a <gomp_for *> (ctx->stmt);
2085     }
2086 
2087   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2088     {
2089       switch (OMP_CLAUSE_CODE (c))
2090 	{
2091 	case OMP_CLAUSE_GANG:
2092 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2093 	  break;
2094 	case OMP_CLAUSE_WORKER:
2095 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2096 	  break;
2097 	case OMP_CLAUSE_VECTOR:
2098 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2099 	  break;
2100 	case OMP_CLAUSE_SEQ:
2101 	  has_seq = true;
2102 	  break;
2103 	case OMP_CLAUSE_AUTO:
2104 	  has_auto = true;
2105 	  break;
2106 	default:
2107 	  break;
2108 	}
2109     }
2110 
2111   if (checking)
2112     {
2113       if (has_seq && (this_mask || has_auto))
2114 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2115 		  " OpenACC loop specifiers");
2116       else if (has_auto && this_mask)
2117 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2118 		  " OpenACC loop specifiers");
2119 
2120       if (this_mask & outer_mask)
2121 	error_at (gimple_location (stmt), "inner loop uses same"
2122 		  " OpenACC parallelism as containing loop");
2123     }
2124 
2125   return outer_mask | this_mask;
2126 }
2127 
2128 /* Scan a GIMPLE_OMP_FOR.  */
2129 
2130 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2131 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2132 {
2133   omp_context *ctx;
2134   size_t i;
2135   tree clauses = gimple_omp_for_clauses (stmt);
2136 
2137   ctx = new_omp_context (stmt, outer_ctx);
2138 
2139   if (is_gimple_omp_oacc (stmt))
2140     {
2141       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2142 
2143       if (!tgt || is_oacc_parallel (tgt))
2144 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2145 	  {
2146 	    char const *check = NULL;
2147 
2148 	    switch (OMP_CLAUSE_CODE (c))
2149 	      {
2150 	      case OMP_CLAUSE_GANG:
2151 		check = "gang";
2152 		break;
2153 
2154 	      case OMP_CLAUSE_WORKER:
2155 		check = "worker";
2156 		break;
2157 
2158 	      case OMP_CLAUSE_VECTOR:
2159 		check = "vector";
2160 		break;
2161 
2162 	      default:
2163 		break;
2164 	      }
2165 
2166 	    if (check && OMP_CLAUSE_OPERAND (c, 0))
2167 	      error_at (gimple_location (stmt),
2168 			"argument not permitted on %qs clause in"
2169 			" OpenACC %<parallel%>", check);
2170 	  }
2171 
2172       if (tgt && is_oacc_kernels (tgt))
2173 	{
2174 	  /* Strip out reductions, as they are not  handled yet.  */
2175 	  tree *prev_ptr = &clauses;
2176 
2177 	  while (tree probe = *prev_ptr)
2178 	    {
2179 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2180 
2181 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2182 		*prev_ptr = *next_ptr;
2183 	      else
2184 		prev_ptr = next_ptr;
2185 	    }
2186 
2187 	  gimple_omp_for_set_clauses (stmt, clauses);
2188 	  check_oacc_kernel_gwv (stmt, ctx);
2189 	}
2190     }
2191 
2192   scan_sharing_clauses (clauses, ctx);
2193 
2194   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2195   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2196     {
2197       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2198       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2199       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2200       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2201     }
2202   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2203   return ctx;
2204 }
2205 
2206 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2207 
2208 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2209 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2210 	       omp_context *outer_ctx)
2211 {
2212   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2213   gsi_replace (gsi, bind, false);
2214   gimple_seq seq = NULL;
2215   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2216   tree cond = create_tmp_var_raw (integer_type_node);
2217   DECL_CONTEXT (cond) = current_function_decl;
2218   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2219   gimple_bind_set_vars (bind, cond);
2220   gimple_call_set_lhs (g, cond);
2221   gimple_seq_add_stmt (&seq, g);
2222   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2223   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2224   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2225   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2226   gimple_seq_add_stmt (&seq, g);
2227   g = gimple_build_label (lab1);
2228   gimple_seq_add_stmt (&seq, g);
2229   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2230   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2231   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2232   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2233   gimple_omp_for_set_clauses (new_stmt, clause);
2234   gimple_seq_add_stmt (&seq, new_stmt);
2235   g = gimple_build_goto (lab3);
2236   gimple_seq_add_stmt (&seq, g);
2237   g = gimple_build_label (lab2);
2238   gimple_seq_add_stmt (&seq, g);
2239   gimple_seq_add_stmt (&seq, stmt);
2240   g = gimple_build_label (lab3);
2241   gimple_seq_add_stmt (&seq, g);
2242   gimple_bind_set_body (bind, seq);
2243   update_stmt (bind);
2244   scan_omp_for (new_stmt, outer_ctx);
2245   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2246 }
2247 
2248 /* Scan an OpenMP sections directive.  */
2249 
2250 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2251 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2252 {
2253   omp_context *ctx;
2254 
2255   ctx = new_omp_context (stmt, outer_ctx);
2256   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2257   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2258 }
2259 
2260 /* Scan an OpenMP single directive.  */
2261 
2262 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2263 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2264 {
2265   omp_context *ctx;
2266   tree name;
2267 
2268   ctx = new_omp_context (stmt, outer_ctx);
2269   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2270   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2271   name = create_tmp_var_name (".omp_copy_s");
2272   name = build_decl (gimple_location (stmt),
2273 		     TYPE_DECL, name, ctx->record_type);
2274   TYPE_NAME (ctx->record_type) = name;
2275 
2276   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2277   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2278 
2279   if (TYPE_FIELDS (ctx->record_type) == NULL)
2280     ctx->record_type = NULL;
2281   else
2282     layout_type (ctx->record_type);
2283 }
2284 
2285 /* Return true if the CLAUSES of an omp target guarantee that the base pointers
2286    used in the corresponding offloaded function are restrict.  */
2287 
2288 static bool
omp_target_base_pointers_restrict_p(tree clauses)2289 omp_target_base_pointers_restrict_p (tree clauses)
2290 {
2291   /* The analysis relies on the GOMP_MAP_FORCE_* mapping kinds, which are only
2292      used by OpenACC.  */
2293   if (flag_openacc == 0)
2294     return false;
2295 
2296   /* I.  Basic example:
2297 
2298        void foo (void)
2299        {
2300 	 unsigned int a[2], b[2];
2301 
2302 	 #pragma acc kernels \
2303 	   copyout (a) \
2304 	   copyout (b)
2305 	 {
2306 	   a[0] = 0;
2307 	   b[0] = 1;
2308 	 }
2309        }
2310 
2311      After gimplification, we have:
2312 
2313        #pragma omp target oacc_kernels \
2314 	 map(force_from:a [len: 8]) \
2315 	 map(force_from:b [len: 8])
2316        {
2317 	 a[0] = 0;
2318 	 b[0] = 1;
2319        }
2320 
2321      Because both mappings have the force prefix, we know that they will be
2322      allocated when calling the corresponding offloaded function, which means we
2323      can mark the base pointers for a and b in the offloaded function as
2324      restrict.  */
2325 
2326   tree c;
2327   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2328     {
2329       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP)
2330 	return false;
2331 
2332       switch (OMP_CLAUSE_MAP_KIND (c))
2333 	{
2334 	case GOMP_MAP_FORCE_ALLOC:
2335 	case GOMP_MAP_FORCE_TO:
2336 	case GOMP_MAP_FORCE_FROM:
2337 	case GOMP_MAP_FORCE_TOFROM:
2338 	  break;
2339 	default:
2340 	  return false;
2341 	}
2342     }
2343 
2344   return true;
2345 }
2346 
2347 /* Scan a GIMPLE_OMP_TARGET.  */
2348 
2349 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)2350 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2351 {
2352   omp_context *ctx;
2353   tree name;
2354   bool offloaded = is_gimple_omp_offloaded (stmt);
2355   tree clauses = gimple_omp_target_clauses (stmt);
2356 
2357   ctx = new_omp_context (stmt, outer_ctx);
2358   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2359   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2360   name = create_tmp_var_name (".omp_data_t");
2361   name = build_decl (gimple_location (stmt),
2362 		     TYPE_DECL, name, ctx->record_type);
2363   DECL_ARTIFICIAL (name) = 1;
2364   DECL_NAMELESS (name) = 1;
2365   TYPE_NAME (ctx->record_type) = name;
2366   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2367 
2368   bool base_pointers_restrict = false;
2369   if (offloaded)
2370     {
2371       create_omp_child_function (ctx, false);
2372       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2373 
2374       base_pointers_restrict = omp_target_base_pointers_restrict_p (clauses);
2375       if (base_pointers_restrict
2376 	  && dump_file && (dump_flags & TDF_DETAILS))
2377 	fprintf (dump_file,
2378 		 "Base pointers in offloaded function are restrict\n");
2379     }
2380 
2381   scan_sharing_clauses (clauses, ctx, base_pointers_restrict);
2382   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2383 
2384   if (TYPE_FIELDS (ctx->record_type) == NULL)
2385     ctx->record_type = ctx->receiver_decl = NULL;
2386   else
2387     {
2388       TYPE_FIELDS (ctx->record_type)
2389 	= nreverse (TYPE_FIELDS (ctx->record_type));
2390       if (flag_checking)
2391 	{
2392 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2393 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2394 	       field;
2395 	       field = DECL_CHAIN (field))
2396 	    gcc_assert (DECL_ALIGN (field) == align);
2397 	}
2398       layout_type (ctx->record_type);
2399       if (offloaded)
2400 	fixup_child_record_type (ctx);
2401     }
2402 }
2403 
2404 /* Scan an OpenMP teams directive.  */
2405 
2406 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)2407 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2408 {
2409   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2410   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2411   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2412 }
2413 
2414 /* Check nesting restrictions.  */
2415 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)2416 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2417 {
2418   tree c;
2419 
2420   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2421     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2422        the original copy of its contents.  */
2423     return true;
2424 
2425   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2426      inside an OpenACC CTX.  */
2427   if (!(is_gimple_omp (stmt)
2428 	&& is_gimple_omp_oacc (stmt))
2429       /* Except for atomic codes that we share with OpenMP.  */
2430       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2431 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2432     {
2433       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2434 	{
2435 	  error_at (gimple_location (stmt),
2436 		    "non-OpenACC construct inside of OpenACC routine");
2437 	  return false;
2438 	}
2439       else
2440 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2441 	  if (is_gimple_omp (octx->stmt)
2442 	      && is_gimple_omp_oacc (octx->stmt))
2443 	    {
2444 	      error_at (gimple_location (stmt),
2445 			"non-OpenACC construct inside of OpenACC region");
2446 	      return false;
2447 	    }
2448     }
2449 
2450   if (ctx != NULL)
2451     {
2452       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2453 	  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
2454 	{
2455 	  c = NULL_TREE;
2456 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2457 	    {
2458 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2459 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2460 		{
2461 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2462 		      && (ctx->outer == NULL
2463 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2464 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2465 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2466 			      != GF_OMP_FOR_KIND_FOR)
2467 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2468 		    {
2469 		      error_at (gimple_location (stmt),
2470 				"%<ordered simd threads%> must be closely "
2471 				"nested inside of %<for simd%> region");
2472 		      return false;
2473 		    }
2474 		  return true;
2475 		}
2476 	    }
2477 	  error_at (gimple_location (stmt),
2478 		    "OpenMP constructs other than %<#pragma omp ordered simd%>"
2479 		    " may not be nested inside %<simd%> region");
2480 	  return false;
2481 	}
2482       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2483 	{
2484 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2485 	       || ((gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE)
2486 		   && (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP)))
2487 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2488 	    {
2489 	      error_at (gimple_location (stmt),
2490 			"only %<distribute%> or %<parallel%> regions are "
2491 			"allowed to be strictly nested inside %<teams%> "
2492 			"region");
2493 	      return false;
2494 	    }
2495 	}
2496     }
2497   switch (gimple_code (stmt))
2498     {
2499     case GIMPLE_OMP_FOR:
2500       if (gimple_omp_for_kind (stmt) & GF_OMP_FOR_SIMD)
2501 	return true;
2502       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2503 	{
2504 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2505 	    {
2506 	      error_at (gimple_location (stmt),
2507 			"%<distribute%> region must be strictly nested "
2508 			"inside %<teams%> construct");
2509 	      return false;
2510 	    }
2511 	  return true;
2512 	}
2513       /* We split taskloop into task and nested taskloop in it.  */
2514       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2515 	return true;
2516       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2517 	{
2518 	  bool ok = false;
2519 
2520 	  if (ctx)
2521 	    switch (gimple_code (ctx->stmt))
2522 	      {
2523 	      case GIMPLE_OMP_FOR:
2524 		ok = (gimple_omp_for_kind (ctx->stmt)
2525 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2526 		break;
2527 
2528 	      case GIMPLE_OMP_TARGET:
2529 		switch (gimple_omp_target_kind (ctx->stmt))
2530 		  {
2531 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2532 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2533 		    ok = true;
2534 		    break;
2535 
2536 		  default:
2537 		    break;
2538 		  }
2539 
2540 	      default:
2541 		break;
2542 	      }
2543 	  else if (oacc_get_fn_attrib (current_function_decl))
2544 	    ok = true;
2545 	  if (!ok)
2546 	    {
2547 	      error_at (gimple_location (stmt),
2548 			"OpenACC loop directive must be associated with"
2549 			" an OpenACC compute region");
2550 	      return false;
2551 	    }
2552 	}
2553       /* FALLTHRU */
2554     case GIMPLE_CALL:
2555       if (is_gimple_call (stmt)
2556 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2557 	      == BUILT_IN_GOMP_CANCEL
2558 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2559 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
2560 	{
2561 	  const char *bad = NULL;
2562 	  const char *kind = NULL;
2563 	  const char *construct
2564 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2565 	       == BUILT_IN_GOMP_CANCEL)
2566 	      ? "#pragma omp cancel"
2567 	      : "#pragma omp cancellation point";
2568 	  if (ctx == NULL)
2569 	    {
2570 	      error_at (gimple_location (stmt), "orphaned %qs construct",
2571 			construct);
2572 	      return false;
2573 	    }
2574 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
2575 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
2576 		  : 0)
2577 	    {
2578 	    case 1:
2579 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
2580 		bad = "#pragma omp parallel";
2581 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2582 		       == BUILT_IN_GOMP_CANCEL
2583 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2584 		ctx->cancellable = true;
2585 	      kind = "parallel";
2586 	      break;
2587 	    case 2:
2588 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2589 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
2590 		bad = "#pragma omp for";
2591 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2592 		       == BUILT_IN_GOMP_CANCEL
2593 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2594 		{
2595 		  ctx->cancellable = true;
2596 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2597 				       OMP_CLAUSE_NOWAIT))
2598 		    warning_at (gimple_location (stmt), 0,
2599 				"%<#pragma omp cancel for%> inside "
2600 				"%<nowait%> for construct");
2601 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2602 				       OMP_CLAUSE_ORDERED))
2603 		    warning_at (gimple_location (stmt), 0,
2604 				"%<#pragma omp cancel for%> inside "
2605 				"%<ordered%> for construct");
2606 		}
2607 	      kind = "for";
2608 	      break;
2609 	    case 4:
2610 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
2611 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
2612 		bad = "#pragma omp sections";
2613 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2614 		       == BUILT_IN_GOMP_CANCEL
2615 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
2616 		{
2617 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
2618 		    {
2619 		      ctx->cancellable = true;
2620 		      if (omp_find_clause (gimple_omp_sections_clauses
2621 								(ctx->stmt),
2622 					   OMP_CLAUSE_NOWAIT))
2623 			warning_at (gimple_location (stmt), 0,
2624 				    "%<#pragma omp cancel sections%> inside "
2625 				    "%<nowait%> sections construct");
2626 		    }
2627 		  else
2628 		    {
2629 		      gcc_assert (ctx->outer
2630 				  && gimple_code (ctx->outer->stmt)
2631 				     == GIMPLE_OMP_SECTIONS);
2632 		      ctx->outer->cancellable = true;
2633 		      if (omp_find_clause (gimple_omp_sections_clauses
2634 							(ctx->outer->stmt),
2635 					   OMP_CLAUSE_NOWAIT))
2636 			warning_at (gimple_location (stmt), 0,
2637 				    "%<#pragma omp cancel sections%> inside "
2638 				    "%<nowait%> sections construct");
2639 		    }
2640 		}
2641 	      kind = "sections";
2642 	      break;
2643 	    case 8:
2644 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_TASK)
2645 		bad = "#pragma omp task";
2646 	      else
2647 		{
2648 		  for (omp_context *octx = ctx->outer;
2649 		       octx; octx = octx->outer)
2650 		    {
2651 		      switch (gimple_code (octx->stmt))
2652 			{
2653 			case GIMPLE_OMP_TASKGROUP:
2654 			  break;
2655 			case GIMPLE_OMP_TARGET:
2656 			  if (gimple_omp_target_kind (octx->stmt)
2657 			      != GF_OMP_TARGET_KIND_REGION)
2658 			    continue;
2659 			  /* FALLTHRU */
2660 			case GIMPLE_OMP_PARALLEL:
2661 			case GIMPLE_OMP_TEAMS:
2662 			  error_at (gimple_location (stmt),
2663 				    "%<%s taskgroup%> construct not closely "
2664 				    "nested inside of %<taskgroup%> region",
2665 				    construct);
2666 			  return false;
2667 			default:
2668 			  continue;
2669 			}
2670 		      break;
2671 		    }
2672 		  ctx->cancellable = true;
2673 		}
2674 	      kind = "taskgroup";
2675 	      break;
2676 	    default:
2677 	      error_at (gimple_location (stmt), "invalid arguments");
2678 	      return false;
2679 	    }
2680 	  if (bad)
2681 	    {
2682 	      error_at (gimple_location (stmt),
2683 			"%<%s %s%> construct not closely nested inside of %qs",
2684 			construct, kind, bad);
2685 	      return false;
2686 	    }
2687 	}
2688       /* FALLTHRU */
2689     case GIMPLE_OMP_SECTIONS:
2690     case GIMPLE_OMP_SINGLE:
2691       for (; ctx != NULL; ctx = ctx->outer)
2692 	switch (gimple_code (ctx->stmt))
2693 	  {
2694 	  case GIMPLE_OMP_FOR:
2695 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2696 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2697 	      break;
2698 	    /* FALLTHRU */
2699 	  case GIMPLE_OMP_SECTIONS:
2700 	  case GIMPLE_OMP_SINGLE:
2701 	  case GIMPLE_OMP_ORDERED:
2702 	  case GIMPLE_OMP_MASTER:
2703 	  case GIMPLE_OMP_TASK:
2704 	  case GIMPLE_OMP_CRITICAL:
2705 	    if (is_gimple_call (stmt))
2706 	      {
2707 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
2708 		    != BUILT_IN_GOMP_BARRIER)
2709 		  return true;
2710 		error_at (gimple_location (stmt),
2711 			  "barrier region may not be closely nested inside "
2712 			  "of work-sharing, %<critical%>, %<ordered%>, "
2713 			  "%<master%>, explicit %<task%> or %<taskloop%> "
2714 			  "region");
2715 		return false;
2716 	      }
2717 	    error_at (gimple_location (stmt),
2718 		      "work-sharing region may not be closely nested inside "
2719 		      "of work-sharing, %<critical%>, %<ordered%>, "
2720 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
2721 	    return false;
2722 	  case GIMPLE_OMP_PARALLEL:
2723 	  case GIMPLE_OMP_TEAMS:
2724 	    return true;
2725 	  case GIMPLE_OMP_TARGET:
2726 	    if (gimple_omp_target_kind (ctx->stmt)
2727 		== GF_OMP_TARGET_KIND_REGION)
2728 	      return true;
2729 	    break;
2730 	  default:
2731 	    break;
2732 	  }
2733       break;
2734     case GIMPLE_OMP_MASTER:
2735       for (; ctx != NULL; ctx = ctx->outer)
2736 	switch (gimple_code (ctx->stmt))
2737 	  {
2738 	  case GIMPLE_OMP_FOR:
2739 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
2740 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
2741 	      break;
2742 	    /* FALLTHRU */
2743 	  case GIMPLE_OMP_SECTIONS:
2744 	  case GIMPLE_OMP_SINGLE:
2745 	  case GIMPLE_OMP_TASK:
2746 	    error_at (gimple_location (stmt),
2747 		      "%<master%> region may not be closely nested inside "
2748 		      "of work-sharing, explicit %<task%> or %<taskloop%> "
2749 		      "region");
2750 	    return false;
2751 	  case GIMPLE_OMP_PARALLEL:
2752 	  case GIMPLE_OMP_TEAMS:
2753 	    return true;
2754 	  case GIMPLE_OMP_TARGET:
2755 	    if (gimple_omp_target_kind (ctx->stmt)
2756 		== GF_OMP_TARGET_KIND_REGION)
2757 	      return true;
2758 	    break;
2759 	  default:
2760 	    break;
2761 	  }
2762       break;
2763     case GIMPLE_OMP_TASK:
2764       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2765 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2766 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2767 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2768 	  {
2769 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2770 	    error_at (OMP_CLAUSE_LOCATION (c),
2771 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2772 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2773 	    return false;
2774 	  }
2775       break;
2776     case GIMPLE_OMP_ORDERED:
2777       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2778 	   c; c = OMP_CLAUSE_CHAIN (c))
2779 	{
2780 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
2781 	    {
2782 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
2783 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
2784 	      continue;
2785 	    }
2786 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2787 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
2788 	      || kind == OMP_CLAUSE_DEPEND_SINK)
2789 	    {
2790 	      tree oclause;
2791 	      /* Look for containing ordered(N) loop.  */
2792 	      if (ctx == NULL
2793 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
2794 		  || (oclause
2795 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2796 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
2797 		{
2798 		  error_at (OMP_CLAUSE_LOCATION (c),
2799 			    "%<ordered%> construct with %<depend%> clause "
2800 			    "must be closely nested inside an %<ordered%> "
2801 			    "loop");
2802 		  return false;
2803 		}
2804 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
2805 		{
2806 		  error_at (OMP_CLAUSE_LOCATION (c),
2807 			    "%<ordered%> construct with %<depend%> clause "
2808 			    "must be closely nested inside a loop with "
2809 			    "%<ordered%> clause with a parameter");
2810 		  return false;
2811 		}
2812 	    }
2813 	  else
2814 	    {
2815 	      error_at (OMP_CLAUSE_LOCATION (c),
2816 			"invalid depend kind in omp %<ordered%> %<depend%>");
2817 	      return false;
2818 	    }
2819 	}
2820       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2821       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2822 	{
2823 	  /* ordered simd must be closely nested inside of simd region,
2824 	     and simd region must not encounter constructs other than
2825 	     ordered simd, therefore ordered simd may be either orphaned,
2826 	     or ctx->stmt must be simd.  The latter case is handled already
2827 	     earlier.  */
2828 	  if (ctx != NULL)
2829 	    {
2830 	      error_at (gimple_location (stmt),
2831 			"%<ordered%> %<simd%> must be closely nested inside "
2832 			"%<simd%> region");
2833 	      return false;
2834 	    }
2835 	}
2836       for (; ctx != NULL; ctx = ctx->outer)
2837 	switch (gimple_code (ctx->stmt))
2838 	  {
2839 	  case GIMPLE_OMP_CRITICAL:
2840 	  case GIMPLE_OMP_TASK:
2841 	  case GIMPLE_OMP_ORDERED:
2842 	  ordered_in_taskloop:
2843 	    error_at (gimple_location (stmt),
2844 		      "%<ordered%> region may not be closely nested inside "
2845 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
2846 		      "%<taskloop%> region");
2847 	    return false;
2848 	  case GIMPLE_OMP_FOR:
2849 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2850 	      goto ordered_in_taskloop;
2851 	    tree o;
2852 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
2853 				 OMP_CLAUSE_ORDERED);
2854 	    if (o == NULL)
2855 	      {
2856 		error_at (gimple_location (stmt),
2857 			  "%<ordered%> region must be closely nested inside "
2858 			  "a loop region with an %<ordered%> clause");
2859 		return false;
2860 	      }
2861 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
2862 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
2863 	      {
2864 		error_at (gimple_location (stmt),
2865 			  "%<ordered%> region without %<depend%> clause may "
2866 			  "not be closely nested inside a loop region with "
2867 			  "an %<ordered%> clause with a parameter");
2868 		return false;
2869 	      }
2870 	    return true;
2871 	  case GIMPLE_OMP_TARGET:
2872 	    if (gimple_omp_target_kind (ctx->stmt)
2873 		!= GF_OMP_TARGET_KIND_REGION)
2874 	      break;
2875 	    /* FALLTHRU */
2876 	  case GIMPLE_OMP_PARALLEL:
2877 	  case GIMPLE_OMP_TEAMS:
2878 	    error_at (gimple_location (stmt),
2879 		      "%<ordered%> region must be closely nested inside "
2880 		      "a loop region with an %<ordered%> clause");
2881 	    return false;
2882 	  default:
2883 	    break;
2884 	  }
2885       break;
2886     case GIMPLE_OMP_CRITICAL:
2887       {
2888 	tree this_stmt_name
2889 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
2890 	for (; ctx != NULL; ctx = ctx->outer)
2891 	  if (gomp_critical *other_crit
2892 	        = dyn_cast <gomp_critical *> (ctx->stmt))
2893 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
2894 	      {
2895 		error_at (gimple_location (stmt),
2896 			  "%<critical%> region may not be nested inside "
2897 			   "a %<critical%> region with the same name");
2898 		return false;
2899 	      }
2900       }
2901       break;
2902     case GIMPLE_OMP_TEAMS:
2903       if (ctx == NULL
2904 	  || gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
2905 	  || gimple_omp_target_kind (ctx->stmt) != GF_OMP_TARGET_KIND_REGION)
2906 	{
2907 	  error_at (gimple_location (stmt),
2908 		    "%<teams%> construct not closely nested inside of "
2909 		    "%<target%> construct");
2910 	  return false;
2911 	}
2912       break;
2913     case GIMPLE_OMP_TARGET:
2914       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2915 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
2916 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
2917 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
2918 	  {
2919 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
2920 	    error_at (OMP_CLAUSE_LOCATION (c),
2921 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
2922 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
2923 	    return false;
2924 	  }
2925       if (is_gimple_omp_offloaded (stmt)
2926 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
2927 	{
2928 	  error_at (gimple_location (stmt),
2929 		    "OpenACC region inside of OpenACC routine, nested "
2930 		    "parallelism not supported yet");
2931 	  return false;
2932 	}
2933       for (; ctx != NULL; ctx = ctx->outer)
2934 	{
2935 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
2936 	    {
2937 	      if (is_gimple_omp (stmt)
2938 		  && is_gimple_omp_oacc (stmt)
2939 		  && is_gimple_omp (ctx->stmt))
2940 		{
2941 		  error_at (gimple_location (stmt),
2942 			    "OpenACC construct inside of non-OpenACC region");
2943 		  return false;
2944 		}
2945 	      continue;
2946 	    }
2947 
2948 	  const char *stmt_name, *ctx_stmt_name;
2949 	  switch (gimple_omp_target_kind (stmt))
2950 	    {
2951 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
2952 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
2953 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
2954 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
2955 	      stmt_name = "target enter data"; break;
2956 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
2957 	      stmt_name = "target exit data"; break;
2958 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
2959 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
2960 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
2961 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
2962 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
2963 	      stmt_name = "enter/exit data"; break;
2964 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
2965 	      break;
2966 	    default: gcc_unreachable ();
2967 	    }
2968 	  switch (gimple_omp_target_kind (ctx->stmt))
2969 	    {
2970 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
2971 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
2972 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2973 	      ctx_stmt_name = "parallel"; break;
2974 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
2975 	      ctx_stmt_name = "kernels"; break;
2976 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
2977 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
2978 	      ctx_stmt_name = "host_data"; break;
2979 	    default: gcc_unreachable ();
2980 	    }
2981 
2982 	  /* OpenACC/OpenMP mismatch?  */
2983 	  if (is_gimple_omp_oacc (stmt)
2984 	      != is_gimple_omp_oacc (ctx->stmt))
2985 	    {
2986 	      error_at (gimple_location (stmt),
2987 			"%s %qs construct inside of %s %qs region",
2988 			(is_gimple_omp_oacc (stmt)
2989 			 ? "OpenACC" : "OpenMP"), stmt_name,
2990 			(is_gimple_omp_oacc (ctx->stmt)
2991 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
2992 	      return false;
2993 	    }
2994 	  if (is_gimple_omp_offloaded (ctx->stmt))
2995 	    {
2996 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
2997 	      if (is_gimple_omp_oacc (ctx->stmt))
2998 		{
2999 		  error_at (gimple_location (stmt),
3000 			    "%qs construct inside of %qs region",
3001 			    stmt_name, ctx_stmt_name);
3002 		  return false;
3003 		}
3004 	      else
3005 		{
3006 		  warning_at (gimple_location (stmt), 0,
3007 			      "%qs construct inside of %qs region",
3008 			      stmt_name, ctx_stmt_name);
3009 		}
3010 	    }
3011 	}
3012       break;
3013     default:
3014       break;
3015     }
3016   return true;
3017 }
3018 
3019 
3020 /* Helper function scan_omp.
3021 
3022    Callback for walk_tree or operators in walk_gimple_stmt used to
3023    scan for OMP directives in TP.  */
3024 
3025 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3026 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3027 {
3028   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3029   omp_context *ctx = (omp_context *) wi->info;
3030   tree t = *tp;
3031 
3032   switch (TREE_CODE (t))
3033     {
3034     case VAR_DECL:
3035     case PARM_DECL:
3036     case LABEL_DECL:
3037     case RESULT_DECL:
3038       if (ctx)
3039 	{
3040 	  tree repl = remap_decl (t, &ctx->cb);
3041 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3042 	  *tp = repl;
3043 	}
3044       break;
3045 
3046     default:
3047       if (ctx && TYPE_P (t))
3048 	*tp = remap_type (t, &ctx->cb);
3049       else if (!DECL_P (t))
3050 	{
3051 	  *walk_subtrees = 1;
3052 	  if (ctx)
3053 	    {
3054 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3055 	      if (tem != TREE_TYPE (t))
3056 		{
3057 		  if (TREE_CODE (t) == INTEGER_CST)
3058 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3059 		  else
3060 		    TREE_TYPE (t) = tem;
3061 		}
3062 	    }
3063 	}
3064       break;
3065     }
3066 
3067   return NULL_TREE;
3068 }
3069 
3070 /* Return true if FNDECL is a setjmp or a longjmp.  */
3071 
3072 static bool
setjmp_or_longjmp_p(const_tree fndecl)3073 setjmp_or_longjmp_p (const_tree fndecl)
3074 {
3075   if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
3076       && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_SETJMP
3077 	  || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_LONGJMP))
3078     return true;
3079 
3080   tree declname = DECL_NAME (fndecl);
3081   if (!declname)
3082     return false;
3083   const char *name = IDENTIFIER_POINTER (declname);
3084   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3085 }
3086 
3087 
3088 /* Helper function for scan_omp.
3089 
3090    Callback for walk_gimple_stmt used to scan for OMP directives in
3091    the current statement in GSI.  */
3092 
3093 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)3094 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3095 		 struct walk_stmt_info *wi)
3096 {
3097   gimple *stmt = gsi_stmt (*gsi);
3098   omp_context *ctx = (omp_context *) wi->info;
3099 
3100   if (gimple_has_location (stmt))
3101     input_location = gimple_location (stmt);
3102 
3103   /* Check the nesting restrictions.  */
3104   bool remove = false;
3105   if (is_gimple_omp (stmt))
3106     remove = !check_omp_nesting_restrictions (stmt, ctx);
3107   else if (is_gimple_call (stmt))
3108     {
3109       tree fndecl = gimple_call_fndecl (stmt);
3110       if (fndecl)
3111 	{
3112 	  if (setjmp_or_longjmp_p (fndecl)
3113 	      && ctx
3114 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3115 	      && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
3116 	    {
3117 	      remove = true;
3118 	      error_at (gimple_location (stmt),
3119 			"setjmp/longjmp inside simd construct");
3120 	    }
3121 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3122 	    switch (DECL_FUNCTION_CODE (fndecl))
3123 	      {
3124 	      case BUILT_IN_GOMP_BARRIER:
3125 	      case BUILT_IN_GOMP_CANCEL:
3126 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3127 	      case BUILT_IN_GOMP_TASKYIELD:
3128 	      case BUILT_IN_GOMP_TASKWAIT:
3129 	      case BUILT_IN_GOMP_TASKGROUP_START:
3130 	      case BUILT_IN_GOMP_TASKGROUP_END:
3131 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3132 		break;
3133 	      default:
3134 		break;
3135 	      }
3136 	}
3137     }
3138   if (remove)
3139     {
3140       stmt = gimple_build_nop ();
3141       gsi_replace (gsi, stmt, false);
3142     }
3143 
3144   *handled_ops_p = true;
3145 
3146   switch (gimple_code (stmt))
3147     {
3148     case GIMPLE_OMP_PARALLEL:
3149       taskreg_nesting_level++;
3150       scan_omp_parallel (gsi, ctx);
3151       taskreg_nesting_level--;
3152       break;
3153 
3154     case GIMPLE_OMP_TASK:
3155       taskreg_nesting_level++;
3156       scan_omp_task (gsi, ctx);
3157       taskreg_nesting_level--;
3158       break;
3159 
3160     case GIMPLE_OMP_FOR:
3161       if (((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3162 	    & GF_OMP_FOR_KIND_MASK) == GF_OMP_FOR_KIND_SIMD)
3163 	  && omp_maybe_offloaded_ctx (ctx)
3164 	  && omp_max_simt_vf ())
3165 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3166       else
3167 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3168       break;
3169 
3170     case GIMPLE_OMP_SECTIONS:
3171       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3172       break;
3173 
3174     case GIMPLE_OMP_SINGLE:
3175       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3176       break;
3177 
3178     case GIMPLE_OMP_SECTION:
3179     case GIMPLE_OMP_MASTER:
3180     case GIMPLE_OMP_TASKGROUP:
3181     case GIMPLE_OMP_ORDERED:
3182     case GIMPLE_OMP_CRITICAL:
3183     case GIMPLE_OMP_GRID_BODY:
3184       ctx = new_omp_context (stmt, ctx);
3185       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3186       break;
3187 
3188     case GIMPLE_OMP_TARGET:
3189       if (is_gimple_omp_offloaded (stmt))
3190 	{
3191 	  taskreg_nesting_level++;
3192 	  scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3193 	  taskreg_nesting_level--;
3194 	}
3195       else
3196 	scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3197       break;
3198 
3199     case GIMPLE_OMP_TEAMS:
3200       scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3201       break;
3202 
3203     case GIMPLE_BIND:
3204       {
3205 	tree var;
3206 
3207 	*handled_ops_p = false;
3208 	if (ctx)
3209 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3210 	       var ;
3211 	       var = DECL_CHAIN (var))
3212 	    insert_decl_map (&ctx->cb, var, var);
3213       }
3214       break;
3215     default:
3216       *handled_ops_p = false;
3217       break;
3218     }
3219 
3220   return NULL_TREE;
3221 }
3222 
3223 
3224 /* Scan all the statements starting at the current statement.  CTX
3225    contains context information about the OMP directives and
3226    clauses found during the scan.  */
3227 
3228 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)3229 scan_omp (gimple_seq *body_p, omp_context *ctx)
3230 {
3231   location_t saved_location;
3232   struct walk_stmt_info wi;
3233 
3234   memset (&wi, 0, sizeof (wi));
3235   wi.info = ctx;
3236   wi.want_locations = true;
3237 
3238   saved_location = input_location;
3239   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3240   input_location = saved_location;
3241 }
3242 
3243 /* Re-gimplification and code generation routines.  */
3244 
3245 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3246    of BIND if in a method.  */
3247 
3248 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)3249 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3250 {
3251   if (DECL_ARGUMENTS (current_function_decl)
3252       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3253       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3254 	  == POINTER_TYPE))
3255     {
3256       tree vars = gimple_bind_vars (bind);
3257       for (tree *pvar = &vars; *pvar; )
3258 	if (omp_member_access_dummy_var (*pvar))
3259 	  *pvar = DECL_CHAIN (*pvar);
3260 	else
3261 	  pvar = &DECL_CHAIN (*pvar);
3262       gimple_bind_set_vars (bind, vars);
3263     }
3264 }
3265 
3266 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3267    block and its subblocks.  */
3268 
3269 static void
remove_member_access_dummy_vars(tree block)3270 remove_member_access_dummy_vars (tree block)
3271 {
3272   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3273     if (omp_member_access_dummy_var (*pvar))
3274       *pvar = DECL_CHAIN (*pvar);
3275     else
3276       pvar = &DECL_CHAIN (*pvar);
3277 
3278   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3279     remove_member_access_dummy_vars (block);
3280 }
3281 
3282 /* If a context was created for STMT when it was scanned, return it.  */
3283 
3284 static omp_context *
maybe_lookup_ctx(gimple * stmt)3285 maybe_lookup_ctx (gimple *stmt)
3286 {
3287   splay_tree_node n;
3288   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3289   return n ? (omp_context *) n->value : NULL;
3290 }
3291 
3292 
3293 /* Find the mapping for DECL in CTX or the immediately enclosing
3294    context that has a mapping for DECL.
3295 
3296    If CTX is a nested parallel directive, we may have to use the decl
3297    mappings created in CTX's parent context.  Suppose that we have the
3298    following parallel nesting (variable UIDs showed for clarity):
3299 
3300 	iD.1562 = 0;
3301      	#omp parallel shared(iD.1562)		-> outer parallel
3302 	  iD.1562 = iD.1562 + 1;
3303 
3304 	  #omp parallel shared (iD.1562)	-> inner parallel
3305 	     iD.1562 = iD.1562 - 1;
3306 
3307    Each parallel structure will create a distinct .omp_data_s structure
3308    for copying iD.1562 in/out of the directive:
3309 
3310   	outer parallel		.omp_data_s.1.i -> iD.1562
3311 	inner parallel		.omp_data_s.2.i -> iD.1562
3312 
3313    A shared variable mapping will produce a copy-out operation before
3314    the parallel directive and a copy-in operation after it.  So, in
3315    this case we would have:
3316 
3317   	iD.1562 = 0;
3318 	.omp_data_o.1.i = iD.1562;
3319 	#omp parallel shared(iD.1562)		-> outer parallel
3320 	  .omp_data_i.1 = &.omp_data_o.1
3321 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3322 
3323 	  .omp_data_o.2.i = iD.1562;		-> **
3324 	  #omp parallel shared(iD.1562)		-> inner parallel
3325 	    .omp_data_i.2 = &.omp_data_o.2
3326 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3327 
3328 
3329     ** This is a problem.  The symbol iD.1562 cannot be referenced
3330        inside the body of the outer parallel region.  But since we are
3331        emitting this copy operation while expanding the inner parallel
3332        directive, we need to access the CTX structure of the outer
3333        parallel directive to get the correct mapping:
3334 
3335 	  .omp_data_o.2.i = .omp_data_i.1->i
3336 
3337     Since there may be other workshare or parallel directives enclosing
3338     the parallel directive, it may be necessary to walk up the context
3339     parent chain.  This is not a problem in general because nested
3340     parallelism happens only rarely.  */
3341 
3342 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3343 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3344 {
3345   tree t;
3346   omp_context *up;
3347 
3348   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3349     t = maybe_lookup_decl (decl, up);
3350 
3351   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3352 
3353   return t ? t : decl;
3354 }
3355 
3356 
3357 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3358    in outer contexts.  */
3359 
3360 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3361 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3362 {
3363   tree t = NULL;
3364   omp_context *up;
3365 
3366   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3367     t = maybe_lookup_decl (decl, up);
3368 
3369   return t ? t : decl;
3370 }
3371 
3372 
3373 /* Construct the initialization value for reduction operation OP.  */
3374 
3375 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)3376 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
3377 {
3378   switch (op)
3379     {
3380     case PLUS_EXPR:
3381     case MINUS_EXPR:
3382     case BIT_IOR_EXPR:
3383     case BIT_XOR_EXPR:
3384     case TRUTH_OR_EXPR:
3385     case TRUTH_ORIF_EXPR:
3386     case TRUTH_XOR_EXPR:
3387     case NE_EXPR:
3388       return build_zero_cst (type);
3389 
3390     case MULT_EXPR:
3391     case TRUTH_AND_EXPR:
3392     case TRUTH_ANDIF_EXPR:
3393     case EQ_EXPR:
3394       return fold_convert_loc (loc, type, integer_one_node);
3395 
3396     case BIT_AND_EXPR:
3397       return fold_convert_loc (loc, type, integer_minus_one_node);
3398 
3399     case MAX_EXPR:
3400       if (SCALAR_FLOAT_TYPE_P (type))
3401 	{
3402 	  REAL_VALUE_TYPE max, min;
3403 	  if (HONOR_INFINITIES (type))
3404 	    {
3405 	      real_inf (&max);
3406 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
3407 	    }
3408 	  else
3409 	    real_maxval (&min, 1, TYPE_MODE (type));
3410 	  return build_real (type, min);
3411 	}
3412       else if (POINTER_TYPE_P (type))
3413 	{
3414 	  wide_int min
3415 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3416 	  return wide_int_to_tree (type, min);
3417 	}
3418       else
3419 	{
3420 	  gcc_assert (INTEGRAL_TYPE_P (type));
3421 	  return TYPE_MIN_VALUE (type);
3422 	}
3423 
3424     case MIN_EXPR:
3425       if (SCALAR_FLOAT_TYPE_P (type))
3426 	{
3427 	  REAL_VALUE_TYPE max;
3428 	  if (HONOR_INFINITIES (type))
3429 	    real_inf (&max);
3430 	  else
3431 	    real_maxval (&max, 0, TYPE_MODE (type));
3432 	  return build_real (type, max);
3433 	}
3434       else if (POINTER_TYPE_P (type))
3435 	{
3436 	  wide_int max
3437 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
3438 	  return wide_int_to_tree (type, max);
3439 	}
3440       else
3441 	{
3442 	  gcc_assert (INTEGRAL_TYPE_P (type));
3443 	  return TYPE_MAX_VALUE (type);
3444 	}
3445 
3446     default:
3447       gcc_unreachable ();
3448     }
3449 }
3450 
3451 /* Construct the initialization value for reduction CLAUSE.  */
3452 
3453 tree
omp_reduction_init(tree clause,tree type)3454 omp_reduction_init (tree clause, tree type)
3455 {
3456   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
3457 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
3458 }
3459 
3460 /* Return alignment to be assumed for var in CLAUSE, which should be
3461    OMP_CLAUSE_ALIGNED.  */
3462 
3463 static tree
omp_clause_aligned_alignment(tree clause)3464 omp_clause_aligned_alignment (tree clause)
3465 {
3466   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
3467     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
3468 
3469   /* Otherwise return implementation defined alignment.  */
3470   unsigned int al = 1;
3471   opt_scalar_mode mode_iter;
3472   auto_vector_sizes sizes;
3473   targetm.vectorize.autovectorize_vector_sizes (&sizes);
3474   poly_uint64 vs = 0;
3475   for (unsigned int i = 0; i < sizes.length (); ++i)
3476     vs = ordered_max (vs, sizes[i]);
3477   static enum mode_class classes[]
3478     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
3479   for (int i = 0; i < 4; i += 2)
3480     /* The for loop above dictates that we only walk through scalar classes.  */
3481     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
3482       {
3483 	scalar_mode mode = mode_iter.require ();
3484 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
3485 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
3486 	  continue;
3487 	while (maybe_ne (vs, 0U)
3488 	       && known_lt (GET_MODE_SIZE (vmode), vs)
3489 	       && GET_MODE_2XWIDER_MODE (vmode).exists ())
3490 	  vmode = GET_MODE_2XWIDER_MODE (vmode).require ();
3491 
3492 	tree type = lang_hooks.types.type_for_mode (mode, 1);
3493 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
3494 	  continue;
3495 	poly_uint64 nelts = exact_div (GET_MODE_SIZE (vmode),
3496 				       GET_MODE_SIZE (mode));
3497 	type = build_vector_type (type, nelts);
3498 	if (TYPE_MODE (type) != vmode)
3499 	  continue;
3500 	if (TYPE_ALIGN_UNIT (type) > al)
3501 	  al = TYPE_ALIGN_UNIT (type);
3502       }
3503   return build_int_cst (integer_type_node, al);
3504 }
3505 
3506 
3507 /* This structure is part of the interface between lower_rec_simd_input_clauses
3508    and lower_rec_input_clauses.  */
3509 
3510 struct omplow_simd_context {
omplow_simd_contextomplow_simd_context3511   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
3512   tree idx;
3513   tree lane;
3514   vec<tree, va_heap> simt_eargs;
3515   gimple_seq simt_dlist;
3516   poly_uint64_pod max_vf;
3517   bool is_simt;
3518 };
3519 
3520 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
3521    privatization.  */
3522 
3523 static bool
lower_rec_simd_input_clauses(tree new_var,omp_context * ctx,omplow_simd_context * sctx,tree & ivar,tree & lvar)3524 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
3525 			      omplow_simd_context *sctx, tree &ivar, tree &lvar)
3526 {
3527   if (known_eq (sctx->max_vf, 0U))
3528     {
3529       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
3530       if (maybe_gt (sctx->max_vf, 1U))
3531 	{
3532 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3533 				    OMP_CLAUSE_SAFELEN);
3534 	  if (c)
3535 	    {
3536 	      poly_uint64 safe_len;
3537 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
3538 		  || maybe_lt (safe_len, 1U))
3539 		sctx->max_vf = 1;
3540 	      else
3541 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
3542 	    }
3543 	}
3544       if (maybe_gt (sctx->max_vf, 1U))
3545 	{
3546 	  sctx->idx = create_tmp_var (unsigned_type_node);
3547 	  sctx->lane = create_tmp_var (unsigned_type_node);
3548 	}
3549     }
3550   if (known_eq (sctx->max_vf, 1U))
3551     return false;
3552 
3553   if (sctx->is_simt)
3554     {
3555       if (is_gimple_reg (new_var))
3556 	{
3557 	  ivar = lvar = new_var;
3558 	  return true;
3559 	}
3560       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
3561       ivar = lvar = create_tmp_var (type);
3562       TREE_ADDRESSABLE (ivar) = 1;
3563       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
3564 					  NULL, DECL_ATTRIBUTES (ivar));
3565       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
3566       tree clobber = build_constructor (type, NULL);
3567       TREE_THIS_VOLATILE (clobber) = 1;
3568       gimple *g = gimple_build_assign (ivar, clobber);
3569       gimple_seq_add_stmt (&sctx->simt_dlist, g);
3570     }
3571   else
3572     {
3573       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
3574       tree avar = create_tmp_var_raw (atype);
3575       if (TREE_ADDRESSABLE (new_var))
3576 	TREE_ADDRESSABLE (avar) = 1;
3577       DECL_ATTRIBUTES (avar)
3578 	= tree_cons (get_identifier ("omp simd array"), NULL,
3579 		     DECL_ATTRIBUTES (avar));
3580       gimple_add_tmp_var (avar);
3581       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->idx,
3582 		     NULL_TREE, NULL_TREE);
3583       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
3584 		     NULL_TREE, NULL_TREE);
3585     }
3586   if (DECL_P (new_var))
3587     {
3588       SET_DECL_VALUE_EXPR (new_var, lvar);
3589       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3590     }
3591   return true;
3592 }
3593 
3594 /* Helper function of lower_rec_input_clauses.  For a reference
3595    in simd reduction, add an underlying variable it will reference.  */
3596 
3597 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)3598 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
3599 {
3600   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
3601   if (TREE_CONSTANT (z))
3602     {
3603       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
3604 			      get_name (new_vard));
3605       gimple_add_tmp_var (z);
3606       TREE_ADDRESSABLE (z) = 1;
3607       z = build_fold_addr_expr_loc (loc, z);
3608       gimplify_assign (new_vard, z, ilist);
3609     }
3610 }
3611 
3612 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
3613    from the receiver (aka child) side and initializers for REFERENCE_TYPE
3614    private variables.  Initialization statements go in ILIST, while calls
3615    to destructors go in DLIST.  */
3616 
3617 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)3618 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
3619 			 omp_context *ctx, struct omp_for_data *fd)
3620 {
3621   tree c, dtor, copyin_seq, x, ptr;
3622   bool copyin_by_ref = false;
3623   bool lastprivate_firstprivate = false;
3624   bool reduction_omp_orig_ref = false;
3625   int pass;
3626   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3627 		  && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD);
3628   omplow_simd_context sctx = omplow_simd_context ();
3629   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
3630   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
3631   gimple_seq llist[3] = { };
3632 
3633   copyin_seq = NULL;
3634   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
3635 
3636   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
3637      with data sharing clauses referencing variable sized vars.  That
3638      is unnecessarily hard to support and very unlikely to result in
3639      vectorized code anyway.  */
3640   if (is_simd)
3641     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3642       switch (OMP_CLAUSE_CODE (c))
3643 	{
3644 	case OMP_CLAUSE_LINEAR:
3645 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
3646 	    sctx.max_vf = 1;
3647 	  /* FALLTHRU */
3648 	case OMP_CLAUSE_PRIVATE:
3649 	case OMP_CLAUSE_FIRSTPRIVATE:
3650 	case OMP_CLAUSE_LASTPRIVATE:
3651 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
3652 	    sctx.max_vf = 1;
3653 	  break;
3654 	case OMP_CLAUSE_REDUCTION:
3655 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
3656 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
3657 	    sctx.max_vf = 1;
3658 	  break;
3659 	default:
3660 	  continue;
3661 	}
3662 
3663   /* Add a placeholder for simduid.  */
3664   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
3665     sctx.simt_eargs.safe_push (NULL_TREE);
3666 
3667   /* Do all the fixed sized types in the first pass, and the variable sized
3668      types in the second pass.  This makes sure that the scalar arguments to
3669      the variable sized types are processed before we use them in the
3670      variable sized operations.  */
3671   for (pass = 0; pass < 2; ++pass)
3672     {
3673       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
3674 	{
3675 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
3676 	  tree var, new_var;
3677 	  bool by_ref;
3678 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
3679 
3680 	  switch (c_kind)
3681 	    {
3682 	    case OMP_CLAUSE_PRIVATE:
3683 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
3684 		continue;
3685 	      break;
3686 	    case OMP_CLAUSE_SHARED:
3687 	      /* Ignore shared directives in teams construct.  */
3688 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3689 		continue;
3690 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
3691 		{
3692 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
3693 			      || is_global_var (OMP_CLAUSE_DECL (c)));
3694 		  continue;
3695 		}
3696 	    case OMP_CLAUSE_FIRSTPRIVATE:
3697 	    case OMP_CLAUSE_COPYIN:
3698 	      break;
3699 	    case OMP_CLAUSE_LINEAR:
3700 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
3701 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
3702 		lastprivate_firstprivate = true;
3703 	      break;
3704 	    case OMP_CLAUSE_REDUCTION:
3705 	      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3706 		reduction_omp_orig_ref = true;
3707 	      break;
3708 	    case OMP_CLAUSE__LOOPTEMP_:
3709 	      /* Handle _looptemp_ clauses only on parallel/task.  */
3710 	      if (fd)
3711 		continue;
3712 	      break;
3713 	    case OMP_CLAUSE_LASTPRIVATE:
3714 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
3715 		{
3716 		  lastprivate_firstprivate = true;
3717 		  if (pass != 0 || is_taskloop_ctx (ctx))
3718 		    continue;
3719 		}
3720 	      /* Even without corresponding firstprivate, if
3721 		 decl is Fortran allocatable, it needs outer var
3722 		 reference.  */
3723 	      else if (pass == 0
3724 		       && lang_hooks.decls.omp_private_outer_ref
3725 							(OMP_CLAUSE_DECL (c)))
3726 		lastprivate_firstprivate = true;
3727 	      break;
3728 	    case OMP_CLAUSE_ALIGNED:
3729 	      if (pass == 0)
3730 		continue;
3731 	      var = OMP_CLAUSE_DECL (c);
3732 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
3733 		  && !is_global_var (var))
3734 		{
3735 		  new_var = maybe_lookup_decl (var, ctx);
3736 		  if (new_var == NULL_TREE)
3737 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
3738 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3739 		  tree alarg = omp_clause_aligned_alignment (c);
3740 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3741 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
3742 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3743 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
3744 		  gimplify_and_add (x, ilist);
3745 		}
3746 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
3747 		       && is_global_var (var))
3748 		{
3749 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
3750 		  new_var = lookup_decl (var, ctx);
3751 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
3752 		  t = build_fold_addr_expr_loc (clause_loc, t);
3753 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
3754 		  tree alarg = omp_clause_aligned_alignment (c);
3755 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
3756 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
3757 		  t = fold_convert_loc (clause_loc, ptype, t);
3758 		  x = create_tmp_var (ptype);
3759 		  t = build2 (MODIFY_EXPR, ptype, x, t);
3760 		  gimplify_and_add (t, ilist);
3761 		  t = build_simple_mem_ref_loc (clause_loc, x);
3762 		  SET_DECL_VALUE_EXPR (new_var, t);
3763 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3764 		}
3765 	      continue;
3766 	    default:
3767 	      continue;
3768 	    }
3769 
3770 	  new_var = var = OMP_CLAUSE_DECL (c);
3771 	  if (c_kind == OMP_CLAUSE_REDUCTION && TREE_CODE (var) == MEM_REF)
3772 	    {
3773 	      var = TREE_OPERAND (var, 0);
3774 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
3775 		var = TREE_OPERAND (var, 0);
3776 	      if (TREE_CODE (var) == INDIRECT_REF
3777 		  || TREE_CODE (var) == ADDR_EXPR)
3778 		var = TREE_OPERAND (var, 0);
3779 	      if (is_variable_sized (var))
3780 		{
3781 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
3782 		  var = DECL_VALUE_EXPR (var);
3783 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
3784 		  var = TREE_OPERAND (var, 0);
3785 		  gcc_assert (DECL_P (var));
3786 		}
3787 	      new_var = var;
3788 	    }
3789 	  if (c_kind != OMP_CLAUSE_COPYIN)
3790 	    new_var = lookup_decl (var, ctx);
3791 
3792 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
3793 	    {
3794 	      if (pass != 0)
3795 		continue;
3796 	    }
3797 	  /* C/C++ array section reductions.  */
3798 	  else if (c_kind == OMP_CLAUSE_REDUCTION
3799 		   && var != OMP_CLAUSE_DECL (c))
3800 	    {
3801 	      if (pass == 0)
3802 		continue;
3803 
3804 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
3805 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
3806 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
3807 		{
3808 		  tree b = TREE_OPERAND (orig_var, 1);
3809 		  b = maybe_lookup_decl (b, ctx);
3810 		  if (b == NULL)
3811 		    {
3812 		      b = TREE_OPERAND (orig_var, 1);
3813 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
3814 		    }
3815 		  if (integer_zerop (bias))
3816 		    bias = b;
3817 		  else
3818 		    {
3819 		      bias = fold_convert_loc (clause_loc,
3820 					       TREE_TYPE (b), bias);
3821 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
3822 					      TREE_TYPE (b), b, bias);
3823 		    }
3824 		  orig_var = TREE_OPERAND (orig_var, 0);
3825 		}
3826 	      if (TREE_CODE (orig_var) == INDIRECT_REF
3827 		  || TREE_CODE (orig_var) == ADDR_EXPR)
3828 		orig_var = TREE_OPERAND (orig_var, 0);
3829 	      tree d = OMP_CLAUSE_DECL (c);
3830 	      tree type = TREE_TYPE (d);
3831 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
3832 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
3833 	      const char *name = get_name (orig_var);
3834 	      if (TREE_CONSTANT (v))
3835 		{
3836 		  x = create_tmp_var_raw (type, name);
3837 		  gimple_add_tmp_var (x);
3838 		  TREE_ADDRESSABLE (x) = 1;
3839 		  x = build_fold_addr_expr_loc (clause_loc, x);
3840 		}
3841 	      else
3842 		{
3843 		  tree atmp
3844 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
3845 		  tree t = maybe_lookup_decl (v, ctx);
3846 		  if (t)
3847 		    v = t;
3848 		  else
3849 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
3850 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
3851 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
3852 				       TREE_TYPE (v), v,
3853 				       build_int_cst (TREE_TYPE (v), 1));
3854 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
3855 				       TREE_TYPE (v), t,
3856 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
3857 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
3858 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
3859 		}
3860 
3861 	      tree ptype = build_pointer_type (TREE_TYPE (type));
3862 	      x = fold_convert_loc (clause_loc, ptype, x);
3863 	      tree y = create_tmp_var (ptype, name);
3864 	      gimplify_assign (y, x, ilist);
3865 	      x = y;
3866 	      tree yb = y;
3867 
3868 	      if (!integer_zerop (bias))
3869 		{
3870 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
3871 					   bias);
3872 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
3873 					 x);
3874 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
3875 					pointer_sized_int_node, yb, bias);
3876 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
3877 		  yb = create_tmp_var (ptype, name);
3878 		  gimplify_assign (yb, x, ilist);
3879 		  x = yb;
3880 		}
3881 
3882 	      d = TREE_OPERAND (d, 0);
3883 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
3884 		d = TREE_OPERAND (d, 0);
3885 	      if (TREE_CODE (d) == ADDR_EXPR)
3886 		{
3887 		  if (orig_var != var)
3888 		    {
3889 		      gcc_assert (is_variable_sized (orig_var));
3890 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
3891 					    x);
3892 		      gimplify_assign (new_var, x, ilist);
3893 		      tree new_orig_var = lookup_decl (orig_var, ctx);
3894 		      tree t = build_fold_indirect_ref (new_var);
3895 		      DECL_IGNORED_P (new_var) = 0;
3896 		      TREE_THIS_NOTRAP (t);
3897 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
3898 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
3899 		    }
3900 		  else
3901 		    {
3902 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
3903 				  build_int_cst (ptype, 0));
3904 		      SET_DECL_VALUE_EXPR (new_var, x);
3905 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
3906 		    }
3907 		}
3908 	      else
3909 		{
3910 		  gcc_assert (orig_var == var);
3911 		  if (TREE_CODE (d) == INDIRECT_REF)
3912 		    {
3913 		      x = create_tmp_var (ptype, name);
3914 		      TREE_ADDRESSABLE (x) = 1;
3915 		      gimplify_assign (x, yb, ilist);
3916 		      x = build_fold_addr_expr_loc (clause_loc, x);
3917 		    }
3918 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
3919 		  gimplify_assign (new_var, x, ilist);
3920 		}
3921 	      tree y1 = create_tmp_var (ptype, NULL);
3922 	      gimplify_assign (y1, y, ilist);
3923 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
3924 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
3925 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
3926 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
3927 		{
3928 		  y2 = create_tmp_var (ptype, NULL);
3929 		  gimplify_assign (y2, y, ilist);
3930 		  tree ref = build_outer_var_ref (var, ctx);
3931 		  /* For ref build_outer_var_ref already performs this.  */
3932 		  if (TREE_CODE (d) == INDIRECT_REF)
3933 		    gcc_assert (omp_is_reference (var));
3934 		  else if (TREE_CODE (d) == ADDR_EXPR)
3935 		    ref = build_fold_addr_expr (ref);
3936 		  else if (omp_is_reference (var))
3937 		    ref = build_fold_addr_expr (ref);
3938 		  ref = fold_convert_loc (clause_loc, ptype, ref);
3939 		  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
3940 		      && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
3941 		    {
3942 		      y3 = create_tmp_var (ptype, NULL);
3943 		      gimplify_assign (y3, unshare_expr (ref), ilist);
3944 		    }
3945 		  if (is_simd)
3946 		    {
3947 		      y4 = create_tmp_var (ptype, NULL);
3948 		      gimplify_assign (y4, ref, dlist);
3949 		    }
3950 		}
3951 	      tree i = create_tmp_var (TREE_TYPE (v), NULL);
3952 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
3953 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
3954 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
3955 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
3956 	      if (y2)
3957 		{
3958 		  i2 = create_tmp_var (TREE_TYPE (v), NULL);
3959 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
3960 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
3961 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
3962 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
3963 		}
3964 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
3965 		{
3966 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
3967 		  tree decl_placeholder
3968 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
3969 		  SET_DECL_VALUE_EXPR (decl_placeholder,
3970 				       build_simple_mem_ref (y1));
3971 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
3972 		  SET_DECL_VALUE_EXPR (placeholder,
3973 				       y3 ? build_simple_mem_ref (y3)
3974 				       : error_mark_node);
3975 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
3976 		  x = lang_hooks.decls.omp_clause_default_ctor
3977 				(c, build_simple_mem_ref (y1),
3978 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
3979 		  if (x)
3980 		    gimplify_and_add (x, ilist);
3981 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
3982 		    {
3983 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
3984 		      lower_omp (&tseq, ctx);
3985 		      gimple_seq_add_seq (ilist, tseq);
3986 		    }
3987 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
3988 		  if (is_simd)
3989 		    {
3990 		      SET_DECL_VALUE_EXPR (decl_placeholder,
3991 					   build_simple_mem_ref (y2));
3992 		      SET_DECL_VALUE_EXPR (placeholder,
3993 					   build_simple_mem_ref (y4));
3994 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
3995 		      lower_omp (&tseq, ctx);
3996 		      gimple_seq_add_seq (dlist, tseq);
3997 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
3998 		    }
3999 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4000 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4001 		  x = lang_hooks.decls.omp_clause_dtor
4002 					(c, build_simple_mem_ref (y2));
4003 		  if (x)
4004 		    {
4005 		      gimple_seq tseq = NULL;
4006 		      dtor = x;
4007 		      gimplify_stmt (&dtor, &tseq);
4008 		      gimple_seq_add_seq (dlist, tseq);
4009 		    }
4010 		}
4011 	      else
4012 		{
4013 		  x = omp_reduction_init (c, TREE_TYPE (type));
4014 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4015 
4016 		  /* reduction(-:var) sums up the partial results, so it
4017 		     acts identically to reduction(+:var).  */
4018 		  if (code == MINUS_EXPR)
4019 		    code = PLUS_EXPR;
4020 
4021 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
4022 		  if (is_simd)
4023 		    {
4024 		      x = build2 (code, TREE_TYPE (type),
4025 				  build_simple_mem_ref (y4),
4026 				  build_simple_mem_ref (y2));
4027 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
4028 		    }
4029 		}
4030 	      gimple *g
4031 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
4032 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4033 	      gimple_seq_add_stmt (ilist, g);
4034 	      if (y3)
4035 		{
4036 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
4037 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4038 		  gimple_seq_add_stmt (ilist, g);
4039 		}
4040 	      g = gimple_build_assign (i, PLUS_EXPR, i,
4041 				       build_int_cst (TREE_TYPE (i), 1));
4042 	      gimple_seq_add_stmt (ilist, g);
4043 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
4044 	      gimple_seq_add_stmt (ilist, g);
4045 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
4046 	      if (y2)
4047 		{
4048 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
4049 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4050 		  gimple_seq_add_stmt (dlist, g);
4051 		  if (y4)
4052 		    {
4053 		      g = gimple_build_assign
4054 					(y4, POINTER_PLUS_EXPR, y4,
4055 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
4056 		      gimple_seq_add_stmt (dlist, g);
4057 		    }
4058 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
4059 					   build_int_cst (TREE_TYPE (i2), 1));
4060 		  gimple_seq_add_stmt (dlist, g);
4061 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
4062 		  gimple_seq_add_stmt (dlist, g);
4063 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
4064 		}
4065 	      continue;
4066 	    }
4067 	  else if (is_variable_sized (var))
4068 	    {
4069 	      /* For variable sized types, we need to allocate the
4070 		 actual storage here.  Call alloca and store the
4071 		 result in the pointer decl that we created elsewhere.  */
4072 	      if (pass == 0)
4073 		continue;
4074 
4075 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
4076 		{
4077 		  gcall *stmt;
4078 		  tree tmp, atmp;
4079 
4080 		  ptr = DECL_VALUE_EXPR (new_var);
4081 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
4082 		  ptr = TREE_OPERAND (ptr, 0);
4083 		  gcc_assert (DECL_P (ptr));
4084 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
4085 
4086 		  /* void *tmp = __builtin_alloca */
4087 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4088 		  stmt = gimple_build_call (atmp, 2, x,
4089 					    size_int (DECL_ALIGN (var)));
4090 		  cfun->calls_alloca = 1;
4091 		  tmp = create_tmp_var_raw (ptr_type_node);
4092 		  gimple_add_tmp_var (tmp);
4093 		  gimple_call_set_lhs (stmt, tmp);
4094 
4095 		  gimple_seq_add_stmt (ilist, stmt);
4096 
4097 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
4098 		  gimplify_assign (ptr, x, ilist);
4099 		}
4100 	    }
4101 	  else if (omp_is_reference (var))
4102 	    {
4103 	      /* For references that are being privatized for Fortran,
4104 		 allocate new backing storage for the new pointer
4105 		 variable.  This allows us to avoid changing all the
4106 		 code that expects a pointer to something that expects
4107 		 a direct variable.  */
4108 	      if (pass == 0)
4109 		continue;
4110 
4111 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
4112 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
4113 		{
4114 		  x = build_receiver_ref (var, false, ctx);
4115 		  x = build_fold_addr_expr_loc (clause_loc, x);
4116 		}
4117 	      else if (TREE_CONSTANT (x))
4118 		{
4119 		  /* For reduction in SIMD loop, defer adding the
4120 		     initialization of the reference, because if we decide
4121 		     to use SIMD array for it, the initilization could cause
4122 		     expansion ICE.  */
4123 		  if (c_kind == OMP_CLAUSE_REDUCTION && is_simd)
4124 		    x = NULL_TREE;
4125 		  else
4126 		    {
4127 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
4128 					      get_name (var));
4129 		      gimple_add_tmp_var (x);
4130 		      TREE_ADDRESSABLE (x) = 1;
4131 		      x = build_fold_addr_expr_loc (clause_loc, x);
4132 		    }
4133 		}
4134 	      else
4135 		{
4136 		  tree atmp
4137 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4138 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
4139 		  tree al = size_int (TYPE_ALIGN (rtype));
4140 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
4141 		}
4142 
4143 	      if (x)
4144 		{
4145 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4146 		  gimplify_assign (new_var, x, ilist);
4147 		}
4148 
4149 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4150 	    }
4151 	  else if (c_kind == OMP_CLAUSE_REDUCTION
4152 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4153 	    {
4154 	      if (pass == 0)
4155 		continue;
4156 	    }
4157 	  else if (pass != 0)
4158 	    continue;
4159 
4160 	  switch (OMP_CLAUSE_CODE (c))
4161 	    {
4162 	    case OMP_CLAUSE_SHARED:
4163 	      /* Ignore shared directives in teams construct.  */
4164 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
4165 		continue;
4166 	      /* Shared global vars are just accessed directly.  */
4167 	      if (is_global_var (new_var))
4168 		break;
4169 	      /* For taskloop firstprivate/lastprivate, represented
4170 		 as firstprivate and shared clause on the task, new_var
4171 		 is the firstprivate var.  */
4172 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
4173 		break;
4174 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
4175 		 needs to be delayed until after fixup_child_record_type so
4176 		 that we get the correct type during the dereference.  */
4177 	      by_ref = use_pointer_for_field (var, ctx);
4178 	      x = build_receiver_ref (var, by_ref, ctx);
4179 	      SET_DECL_VALUE_EXPR (new_var, x);
4180 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4181 
4182 	      /* ??? If VAR is not passed by reference, and the variable
4183 		 hasn't been initialized yet, then we'll get a warning for
4184 		 the store into the omp_data_s structure.  Ideally, we'd be
4185 		 able to notice this and not store anything at all, but
4186 		 we're generating code too early.  Suppress the warning.  */
4187 	      if (!by_ref)
4188 		TREE_NO_WARNING (var) = 1;
4189 	      break;
4190 
4191 	    case OMP_CLAUSE_LASTPRIVATE:
4192 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4193 		break;
4194 	      /* FALLTHRU */
4195 
4196 	    case OMP_CLAUSE_PRIVATE:
4197 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
4198 		x = build_outer_var_ref (var, ctx);
4199 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
4200 		{
4201 		  if (is_task_ctx (ctx))
4202 		    x = build_receiver_ref (var, false, ctx);
4203 		  else
4204 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
4205 		}
4206 	      else
4207 		x = NULL;
4208 	    do_private:
4209 	      tree nx;
4210 	      nx = lang_hooks.decls.omp_clause_default_ctor
4211 						(c, unshare_expr (new_var), x);
4212 	      if (is_simd)
4213 		{
4214 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
4215 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
4216 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
4217 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4218 						       ivar, lvar))
4219 		    {
4220 		      if (nx)
4221 			x = lang_hooks.decls.omp_clause_default_ctor
4222 						(c, unshare_expr (ivar), x);
4223 		      if (nx && x)
4224 			gimplify_and_add (x, &llist[0]);
4225 		      if (y)
4226 			{
4227 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
4228 			  if (y)
4229 			    {
4230 			      gimple_seq tseq = NULL;
4231 
4232 			      dtor = y;
4233 			      gimplify_stmt (&dtor, &tseq);
4234 			      gimple_seq_add_seq (&llist[1], tseq);
4235 			    }
4236 			}
4237 		      break;
4238 		    }
4239 		}
4240 	      if (nx)
4241 		gimplify_and_add (nx, ilist);
4242 	      /* FALLTHRU */
4243 
4244 	    do_dtor:
4245 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
4246 	      if (x)
4247 		{
4248 		  gimple_seq tseq = NULL;
4249 
4250 		  dtor = x;
4251 		  gimplify_stmt (&dtor, &tseq);
4252 		  gimple_seq_add_seq (dlist, tseq);
4253 		}
4254 	      break;
4255 
4256 	    case OMP_CLAUSE_LINEAR:
4257 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
4258 		goto do_firstprivate;
4259 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4260 		x = NULL;
4261 	      else
4262 		x = build_outer_var_ref (var, ctx);
4263 	      goto do_private;
4264 
4265 	    case OMP_CLAUSE_FIRSTPRIVATE:
4266 	      if (is_task_ctx (ctx))
4267 		{
4268 		  if (omp_is_reference (var) || is_variable_sized (var))
4269 		    goto do_dtor;
4270 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
4271 									  ctx))
4272 			   || use_pointer_for_field (var, NULL))
4273 		    {
4274 		      x = build_receiver_ref (var, false, ctx);
4275 		      SET_DECL_VALUE_EXPR (new_var, x);
4276 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4277 		      goto do_dtor;
4278 		    }
4279 		}
4280 	    do_firstprivate:
4281 	      x = build_outer_var_ref (var, ctx);
4282 	      if (is_simd)
4283 		{
4284 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4285 		      && gimple_omp_for_combined_into_p (ctx->stmt))
4286 		    {
4287 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
4288 		      tree stept = TREE_TYPE (t);
4289 		      tree ct = omp_find_clause (clauses,
4290 						 OMP_CLAUSE__LOOPTEMP_);
4291 		      gcc_assert (ct);
4292 		      tree l = OMP_CLAUSE_DECL (ct);
4293 		      tree n1 = fd->loop.n1;
4294 		      tree step = fd->loop.step;
4295 		      tree itype = TREE_TYPE (l);
4296 		      if (POINTER_TYPE_P (itype))
4297 			itype = signed_type_for (itype);
4298 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
4299 		      if (TYPE_UNSIGNED (itype)
4300 			  && fd->loop.cond_code == GT_EXPR)
4301 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
4302 					 fold_build1 (NEGATE_EXPR, itype, l),
4303 					 fold_build1 (NEGATE_EXPR,
4304 						      itype, step));
4305 		      else
4306 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
4307 		      t = fold_build2 (MULT_EXPR, stept,
4308 				       fold_convert (stept, l), t);
4309 
4310 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
4311 			{
4312 			  x = lang_hooks.decls.omp_clause_linear_ctor
4313 							(c, new_var, x, t);
4314 			  gimplify_and_add (x, ilist);
4315 			  goto do_dtor;
4316 			}
4317 
4318 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
4319 			x = fold_build2 (POINTER_PLUS_EXPR,
4320 					 TREE_TYPE (x), x, t);
4321 		      else
4322 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
4323 		    }
4324 
4325 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
4326 		       || TREE_ADDRESSABLE (new_var))
4327 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4328 						       ivar, lvar))
4329 		    {
4330 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
4331 			{
4332 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
4333 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
4334 			  gimplify_and_add (x, ilist);
4335 			  gimple_stmt_iterator gsi
4336 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4337 			  gassign *g
4338 			    = gimple_build_assign (unshare_expr (lvar), iv);
4339 			  gsi_insert_before_without_update (&gsi, g,
4340 							    GSI_SAME_STMT);
4341 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
4342 			  enum tree_code code = PLUS_EXPR;
4343 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
4344 			    code = POINTER_PLUS_EXPR;
4345 			  g = gimple_build_assign (iv, code, iv, t);
4346 			  gsi_insert_before_without_update (&gsi, g,
4347 							    GSI_SAME_STMT);
4348 			  break;
4349 			}
4350 		      x = lang_hooks.decls.omp_clause_copy_ctor
4351 						(c, unshare_expr (ivar), x);
4352 		      gimplify_and_add (x, &llist[0]);
4353 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4354 		      if (x)
4355 			{
4356 			  gimple_seq tseq = NULL;
4357 
4358 			  dtor = x;
4359 			  gimplify_stmt (&dtor, &tseq);
4360 			  gimple_seq_add_seq (&llist[1], tseq);
4361 			}
4362 		      break;
4363 		    }
4364 		}
4365 	      x = lang_hooks.decls.omp_clause_copy_ctor
4366 						(c, unshare_expr (new_var), x);
4367 	      gimplify_and_add (x, ilist);
4368 	      goto do_dtor;
4369 
4370 	    case OMP_CLAUSE__LOOPTEMP_:
4371 	      gcc_assert (is_taskreg_ctx (ctx));
4372 	      x = build_outer_var_ref (var, ctx);
4373 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4374 	      gimplify_and_add (x, ilist);
4375 	      break;
4376 
4377 	    case OMP_CLAUSE_COPYIN:
4378 	      by_ref = use_pointer_for_field (var, NULL);
4379 	      x = build_receiver_ref (var, by_ref, ctx);
4380 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
4381 	      append_to_statement_list (x, &copyin_seq);
4382 	      copyin_by_ref |= by_ref;
4383 	      break;
4384 
4385 	    case OMP_CLAUSE_REDUCTION:
4386 	      /* OpenACC reductions are initialized using the
4387 		 GOACC_REDUCTION internal function.  */
4388 	      if (is_gimple_omp_oacc (ctx->stmt))
4389 		break;
4390 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4391 		{
4392 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4393 		  gimple *tseq;
4394 		  x = build_outer_var_ref (var, ctx);
4395 
4396 		  if (omp_is_reference (var)
4397 		      && !useless_type_conversion_p (TREE_TYPE (placeholder),
4398 						     TREE_TYPE (x)))
4399 		    x = build_fold_addr_expr_loc (clause_loc, x);
4400 		  SET_DECL_VALUE_EXPR (placeholder, x);
4401 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4402 		  tree new_vard = new_var;
4403 		  if (omp_is_reference (var))
4404 		    {
4405 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4406 		      new_vard = TREE_OPERAND (new_var, 0);
4407 		      gcc_assert (DECL_P (new_vard));
4408 		    }
4409 		  if (is_simd
4410 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4411 						       ivar, lvar))
4412 		    {
4413 		      if (new_vard == new_var)
4414 			{
4415 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
4416 			  SET_DECL_VALUE_EXPR (new_var, ivar);
4417 			}
4418 		      else
4419 			{
4420 			  SET_DECL_VALUE_EXPR (new_vard,
4421 					       build_fold_addr_expr (ivar));
4422 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4423 			}
4424 		      x = lang_hooks.decls.omp_clause_default_ctor
4425 				(c, unshare_expr (ivar),
4426 				 build_outer_var_ref (var, ctx));
4427 		      if (x)
4428 			gimplify_and_add (x, &llist[0]);
4429 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4430 			{
4431 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4432 			  lower_omp (&tseq, ctx);
4433 			  gimple_seq_add_seq (&llist[0], tseq);
4434 			}
4435 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4436 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4437 		      lower_omp (&tseq, ctx);
4438 		      gimple_seq_add_seq (&llist[1], tseq);
4439 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4440 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4441 		      if (new_vard == new_var)
4442 			SET_DECL_VALUE_EXPR (new_var, lvar);
4443 		      else
4444 			SET_DECL_VALUE_EXPR (new_vard,
4445 					     build_fold_addr_expr (lvar));
4446 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
4447 		      if (x)
4448 			{
4449 			  tseq = NULL;
4450 			  dtor = x;
4451 			  gimplify_stmt (&dtor, &tseq);
4452 			  gimple_seq_add_seq (&llist[1], tseq);
4453 			}
4454 		      break;
4455 		    }
4456 		  /* If this is a reference to constant size reduction var
4457 		     with placeholder, we haven't emitted the initializer
4458 		     for it because it is undesirable if SIMD arrays are used.
4459 		     But if they aren't used, we need to emit the deferred
4460 		     initialization now.  */
4461 		  else if (omp_is_reference (var) && is_simd)
4462 		    handle_simd_reference (clause_loc, new_vard, ilist);
4463 		  x = lang_hooks.decls.omp_clause_default_ctor
4464 				(c, unshare_expr (new_var),
4465 				 build_outer_var_ref (var, ctx));
4466 		  if (x)
4467 		    gimplify_and_add (x, ilist);
4468 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4469 		    {
4470 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4471 		      lower_omp (&tseq, ctx);
4472 		      gimple_seq_add_seq (ilist, tseq);
4473 		    }
4474 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4475 		  if (is_simd)
4476 		    {
4477 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4478 		      lower_omp (&tseq, ctx);
4479 		      gimple_seq_add_seq (dlist, tseq);
4480 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4481 		    }
4482 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4483 		  goto do_dtor;
4484 		}
4485 	      else
4486 		{
4487 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
4488 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
4489 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
4490 
4491 		  /* reduction(-:var) sums up the partial results, so it
4492 		     acts identically to reduction(+:var).  */
4493 		  if (code == MINUS_EXPR)
4494 		    code = PLUS_EXPR;
4495 
4496 		  tree new_vard = new_var;
4497 		  if (is_simd && omp_is_reference (var))
4498 		    {
4499 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
4500 		      new_vard = TREE_OPERAND (new_var, 0);
4501 		      gcc_assert (DECL_P (new_vard));
4502 		    }
4503 		  if (is_simd
4504 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
4505 						       ivar, lvar))
4506 		    {
4507 		      tree ref = build_outer_var_ref (var, ctx);
4508 
4509 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
4510 
4511 		      if (sctx.is_simt)
4512 			{
4513 			  if (!simt_lane)
4514 			    simt_lane = create_tmp_var (unsigned_type_node);
4515 			  x = build_call_expr_internal_loc
4516 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
4517 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
4518 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
4519 			  gimplify_assign (ivar, x, &llist[2]);
4520 			}
4521 		      x = build2 (code, TREE_TYPE (ref), ref, ivar);
4522 		      ref = build_outer_var_ref (var, ctx);
4523 		      gimplify_assign (ref, x, &llist[1]);
4524 
4525 		      if (new_vard != new_var)
4526 			{
4527 			  SET_DECL_VALUE_EXPR (new_vard,
4528 					       build_fold_addr_expr (lvar));
4529 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
4530 			}
4531 		    }
4532 		  else
4533 		    {
4534 		      if (omp_is_reference (var) && is_simd)
4535 			handle_simd_reference (clause_loc, new_vard, ilist);
4536 		      gimplify_assign (new_var, x, ilist);
4537 		      if (is_simd)
4538 			{
4539 			  tree ref = build_outer_var_ref (var, ctx);
4540 
4541 			  x = build2 (code, TREE_TYPE (ref), ref, new_var);
4542 			  ref = build_outer_var_ref (var, ctx);
4543 			  gimplify_assign (ref, x, dlist);
4544 			}
4545 		    }
4546 		}
4547 	      break;
4548 
4549 	    default:
4550 	      gcc_unreachable ();
4551 	    }
4552 	}
4553     }
4554 
4555   if (known_eq (sctx.max_vf, 1U))
4556     sctx.is_simt = false;
4557 
4558   if (sctx.lane || sctx.is_simt)
4559     {
4560       uid = create_tmp_var (ptr_type_node, "simduid");
4561       /* Don't want uninit warnings on simduid, it is always uninitialized,
4562 	 but we use it not for the value, but for the DECL_UID only.  */
4563       TREE_NO_WARNING (uid) = 1;
4564       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
4565       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
4566       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4567       gimple_omp_for_set_clauses (ctx->stmt, c);
4568     }
4569   /* Emit calls denoting privatized variables and initializing a pointer to
4570      structure that holds private variables as fields after ompdevlow pass.  */
4571   if (sctx.is_simt)
4572     {
4573       sctx.simt_eargs[0] = uid;
4574       gimple *g
4575 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
4576       gimple_call_set_lhs (g, uid);
4577       gimple_seq_add_stmt (ilist, g);
4578       sctx.simt_eargs.release ();
4579 
4580       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
4581       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
4582       gimple_call_set_lhs (g, simtrec);
4583       gimple_seq_add_stmt (ilist, g);
4584     }
4585   if (sctx.lane)
4586     {
4587       gimple *g
4588 	= gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 1, uid);
4589       gimple_call_set_lhs (g, sctx.lane);
4590       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
4591       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
4592       g = gimple_build_assign (sctx.lane, INTEGER_CST,
4593 			       build_int_cst (unsigned_type_node, 0));
4594       gimple_seq_add_stmt (ilist, g);
4595       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
4596       if (llist[2])
4597 	{
4598 	  tree simt_vf = create_tmp_var (unsigned_type_node);
4599 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
4600 	  gimple_call_set_lhs (g, simt_vf);
4601 	  gimple_seq_add_stmt (dlist, g);
4602 
4603 	  tree t = build_int_cst (unsigned_type_node, 1);
4604 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
4605 	  gimple_seq_add_stmt (dlist, g);
4606 
4607 	  t = build_int_cst (unsigned_type_node, 0);
4608 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4609 	  gimple_seq_add_stmt (dlist, g);
4610 
4611 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
4612 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
4613 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
4614 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
4615 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
4616 
4617 	  gimple_seq_add_seq (dlist, llist[2]);
4618 
4619 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
4620 	  gimple_seq_add_stmt (dlist, g);
4621 
4622 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
4623 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
4624 	  gimple_seq_add_stmt (dlist, g);
4625 
4626 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
4627 	}
4628       for (int i = 0; i < 2; i++)
4629 	if (llist[i])
4630 	  {
4631 	    tree vf = create_tmp_var (unsigned_type_node);
4632 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
4633 	    gimple_call_set_lhs (g, vf);
4634 	    gimple_seq *seq = i == 0 ? ilist : dlist;
4635 	    gimple_seq_add_stmt (seq, g);
4636 	    tree t = build_int_cst (unsigned_type_node, 0);
4637 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
4638 	    gimple_seq_add_stmt (seq, g);
4639 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
4640 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
4641 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
4642 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
4643 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
4644 	    gimple_seq_add_seq (seq, llist[i]);
4645 	    t = build_int_cst (unsigned_type_node, 1);
4646 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
4647 	    gimple_seq_add_stmt (seq, g);
4648 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
4649 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
4650 	    gimple_seq_add_stmt (seq, g);
4651 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
4652 	  }
4653     }
4654   if (sctx.is_simt)
4655     {
4656       gimple_seq_add_seq (dlist, sctx.simt_dlist);
4657       gimple *g
4658 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
4659       gimple_seq_add_stmt (dlist, g);
4660     }
4661 
4662   /* The copyin sequence is not to be executed by the main thread, since
4663      that would result in self-copies.  Perhaps not visible to scalars,
4664      but it certainly is to C++ operator=.  */
4665   if (copyin_seq)
4666     {
4667       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
4668 			   0);
4669       x = build2 (NE_EXPR, boolean_type_node, x,
4670 		  build_int_cst (TREE_TYPE (x), 0));
4671       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
4672       gimplify_and_add (x, ilist);
4673     }
4674 
4675   /* If any copyin variable is passed by reference, we must ensure the
4676      master thread doesn't modify it before it is copied over in all
4677      threads.  Similarly for variables in both firstprivate and
4678      lastprivate clauses we need to ensure the lastprivate copying
4679      happens after firstprivate copying in all threads.  And similarly
4680      for UDRs if initializer expression refers to omp_orig.  */
4681   if (copyin_by_ref || lastprivate_firstprivate || reduction_omp_orig_ref)
4682     {
4683       /* Don't add any barrier for #pragma omp simd or
4684 	 #pragma omp distribute.  */
4685       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
4686 	  || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR)
4687 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
4688     }
4689 
4690   /* If max_vf is non-zero, then we can use only a vectorization factor
4691      up to the max_vf we chose.  So stick it into the safelen clause.  */
4692   if (maybe_ne (sctx.max_vf, 0U))
4693     {
4694       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4695 				OMP_CLAUSE_SAFELEN);
4696       poly_uint64 safe_len;
4697       if (c == NULL_TREE
4698 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4699 	      && maybe_gt (safe_len, sctx.max_vf)))
4700 	{
4701 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
4702 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
4703 						       sctx.max_vf);
4704 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
4705 	  gimple_omp_for_set_clauses (ctx->stmt, c);
4706 	}
4707     }
4708 }
4709 
4710 
4711 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
4712    both parallel and workshare constructs.  PREDICATE may be NULL if it's
4713    always true.   */
4714 
4715 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * stmt_list,omp_context * ctx)4716 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
4717 			   omp_context *ctx)
4718 {
4719   tree x, c, label = NULL, orig_clauses = clauses;
4720   bool par_clauses = false;
4721   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
4722 
4723   /* Early exit if there are no lastprivate or linear clauses.  */
4724   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
4725     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
4726 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
4727 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
4728       break;
4729   if (clauses == NULL)
4730     {
4731       /* If this was a workshare clause, see if it had been combined
4732 	 with its parallel.  In that case, look for the clauses on the
4733 	 parallel statement itself.  */
4734       if (is_parallel_ctx (ctx))
4735 	return;
4736 
4737       ctx = ctx->outer;
4738       if (ctx == NULL || !is_parallel_ctx (ctx))
4739 	return;
4740 
4741       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4742 				 OMP_CLAUSE_LASTPRIVATE);
4743       if (clauses == NULL)
4744 	return;
4745       par_clauses = true;
4746     }
4747 
4748   bool maybe_simt = false;
4749   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4750       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
4751     {
4752       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
4753       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
4754       if (simduid)
4755 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
4756     }
4757 
4758   if (predicate)
4759     {
4760       gcond *stmt;
4761       tree label_true, arm1, arm2;
4762       enum tree_code pred_code = TREE_CODE (predicate);
4763 
4764       label = create_artificial_label (UNKNOWN_LOCATION);
4765       label_true = create_artificial_label (UNKNOWN_LOCATION);
4766       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
4767 	{
4768 	  arm1 = TREE_OPERAND (predicate, 0);
4769 	  arm2 = TREE_OPERAND (predicate, 1);
4770 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4771 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
4772 	}
4773       else
4774 	{
4775 	  arm1 = predicate;
4776 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
4777 	  arm2 = boolean_false_node;
4778 	  pred_code = NE_EXPR;
4779 	}
4780       if (maybe_simt)
4781 	{
4782 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
4783 	  c = fold_convert (integer_type_node, c);
4784 	  simtcond = create_tmp_var (integer_type_node);
4785 	  gimplify_assign (simtcond, c, stmt_list);
4786 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
4787 						 1, simtcond);
4788 	  c = create_tmp_var (integer_type_node);
4789 	  gimple_call_set_lhs (g, c);
4790 	  gimple_seq_add_stmt (stmt_list, g);
4791 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
4792 				    label_true, label);
4793 	}
4794       else
4795 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
4796       gimple_seq_add_stmt (stmt_list, stmt);
4797       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
4798     }
4799 
4800   for (c = clauses; c ;)
4801     {
4802       tree var, new_var;
4803       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4804 
4805       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4806 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4807 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
4808 	{
4809 	  var = OMP_CLAUSE_DECL (c);
4810 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4811 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4812 	      && is_taskloop_ctx (ctx))
4813 	    {
4814 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
4815 	      new_var = lookup_decl (var, ctx->outer);
4816 	    }
4817 	  else
4818 	    {
4819 	      new_var = lookup_decl (var, ctx);
4820 	      /* Avoid uninitialized warnings for lastprivate and
4821 		 for linear iterators.  */
4822 	      if (predicate
4823 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4824 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
4825 		TREE_NO_WARNING (new_var) = 1;
4826 	    }
4827 
4828 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
4829 	    {
4830 	      tree val = DECL_VALUE_EXPR (new_var);
4831 	      if (TREE_CODE (val) == ARRAY_REF
4832 		  && VAR_P (TREE_OPERAND (val, 0))
4833 		  && lookup_attribute ("omp simd array",
4834 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
4835 								      0))))
4836 		{
4837 		  if (lastlane == NULL)
4838 		    {
4839 		      lastlane = create_tmp_var (unsigned_type_node);
4840 		      gcall *g
4841 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
4842 						      2, simduid,
4843 						      TREE_OPERAND (val, 1));
4844 		      gimple_call_set_lhs (g, lastlane);
4845 		      gimple_seq_add_stmt (stmt_list, g);
4846 		    }
4847 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
4848 				    TREE_OPERAND (val, 0), lastlane,
4849 				    NULL_TREE, NULL_TREE);
4850 		}
4851 	    }
4852 	  else if (maybe_simt)
4853 	    {
4854 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
4855 			  ? DECL_VALUE_EXPR (new_var)
4856 			  : new_var);
4857 	      if (simtlast == NULL)
4858 		{
4859 		  simtlast = create_tmp_var (unsigned_type_node);
4860 		  gcall *g = gimple_build_call_internal
4861 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
4862 		  gimple_call_set_lhs (g, simtlast);
4863 		  gimple_seq_add_stmt (stmt_list, g);
4864 		}
4865 	      x = build_call_expr_internal_loc
4866 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
4867 		 TREE_TYPE (val), 2, val, simtlast);
4868 	      new_var = unshare_expr (new_var);
4869 	      gimplify_assign (new_var, x, stmt_list);
4870 	      new_var = unshare_expr (new_var);
4871 	    }
4872 
4873 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4874 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
4875 	    {
4876 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
4877 	      gimple_seq_add_seq (stmt_list,
4878 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
4879 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
4880 	    }
4881 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
4882 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
4883 	    {
4884 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
4885 	      gimple_seq_add_seq (stmt_list,
4886 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
4887 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
4888 	    }
4889 
4890 	  x = NULL_TREE;
4891 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
4892 	      && OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
4893 	    {
4894 	      gcc_checking_assert (is_taskloop_ctx (ctx));
4895 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
4896 							  ctx->outer->outer);
4897 	      if (is_global_var (ovar))
4898 		x = ovar;
4899 	    }
4900 	  if (!x)
4901 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
4902 	  if (omp_is_reference (var))
4903 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
4904 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
4905 	  gimplify_and_add (x, stmt_list);
4906 	}
4907       c = OMP_CLAUSE_CHAIN (c);
4908       if (c == NULL && !par_clauses)
4909 	{
4910 	  /* If this was a workshare clause, see if it had been combined
4911 	     with its parallel.  In that case, continue looking for the
4912 	     clauses also on the parallel statement itself.  */
4913 	  if (is_parallel_ctx (ctx))
4914 	    break;
4915 
4916 	  ctx = ctx->outer;
4917 	  if (ctx == NULL || !is_parallel_ctx (ctx))
4918 	    break;
4919 
4920 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
4921 			       OMP_CLAUSE_LASTPRIVATE);
4922 	  par_clauses = true;
4923 	}
4924     }
4925 
4926   if (label)
4927     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
4928 }
4929 
4930 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
4931    (which might be a placeholder).  INNER is true if this is an inner
4932    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
4933    join markers.  Generate the before-loop forking sequence in
4934    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
4935    general form of these sequences is
4936 
4937      GOACC_REDUCTION_SETUP
4938      GOACC_FORK
4939      GOACC_REDUCTION_INIT
4940      ...
4941      GOACC_REDUCTION_FINI
4942      GOACC_JOIN
4943      GOACC_REDUCTION_TEARDOWN.  */
4944 
4945 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)4946 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
4947 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
4948 		       gimple_seq *join_seq, omp_context *ctx)
4949 {
4950   gimple_seq before_fork = NULL;
4951   gimple_seq after_fork = NULL;
4952   gimple_seq before_join = NULL;
4953   gimple_seq after_join = NULL;
4954   tree init_code = NULL_TREE, fini_code = NULL_TREE,
4955     setup_code = NULL_TREE, teardown_code = NULL_TREE;
4956   unsigned offset = 0;
4957 
4958   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
4959     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4960       {
4961 	tree orig = OMP_CLAUSE_DECL (c);
4962 	tree var = maybe_lookup_decl (orig, ctx);
4963 	tree ref_to_res = NULL_TREE;
4964 	tree incoming, outgoing, v1, v2, v3;
4965 	bool is_private = false;
4966 
4967 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
4968 	if (rcode == MINUS_EXPR)
4969 	  rcode = PLUS_EXPR;
4970 	else if (rcode == TRUTH_ANDIF_EXPR)
4971 	  rcode = BIT_AND_EXPR;
4972 	else if (rcode == TRUTH_ORIF_EXPR)
4973 	  rcode = BIT_IOR_EXPR;
4974 	tree op = build_int_cst (unsigned_type_node, rcode);
4975 
4976 	if (!var)
4977 	  var = orig;
4978 
4979 	incoming = outgoing = var;
4980 
4981 	if (!inner)
4982 	  {
4983 	    /* See if an outer construct also reduces this variable.  */
4984 	    omp_context *outer = ctx;
4985 
4986 	    while (omp_context *probe = outer->outer)
4987 	      {
4988 		enum gimple_code type = gimple_code (probe->stmt);
4989 		tree cls;
4990 
4991 		switch (type)
4992 		  {
4993 		  case GIMPLE_OMP_FOR:
4994 		    cls = gimple_omp_for_clauses (probe->stmt);
4995 		    break;
4996 
4997 		  case GIMPLE_OMP_TARGET:
4998 		    if (gimple_omp_target_kind (probe->stmt)
4999 			!= GF_OMP_TARGET_KIND_OACC_PARALLEL)
5000 		      goto do_lookup;
5001 
5002 		    cls = gimple_omp_target_clauses (probe->stmt);
5003 		    break;
5004 
5005 		  default:
5006 		    goto do_lookup;
5007 		  }
5008 
5009 		outer = probe;
5010 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
5011 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
5012 		      && orig == OMP_CLAUSE_DECL (cls))
5013 		    {
5014 		      incoming = outgoing = lookup_decl (orig, probe);
5015 		      goto has_outer_reduction;
5016 		    }
5017 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
5018 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
5019 			   && orig == OMP_CLAUSE_DECL (cls))
5020 		    {
5021 		      is_private = true;
5022 		      goto do_lookup;
5023 		    }
5024 	      }
5025 
5026 	  do_lookup:
5027 	    /* This is the outermost construct with this reduction,
5028 	       see if there's a mapping for it.  */
5029 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
5030 		&& maybe_lookup_field (orig, outer) && !is_private)
5031 	      {
5032 		ref_to_res = build_receiver_ref (orig, false, outer);
5033 		if (omp_is_reference (orig))
5034 		  ref_to_res = build_simple_mem_ref (ref_to_res);
5035 
5036 		tree type = TREE_TYPE (var);
5037 		if (POINTER_TYPE_P (type))
5038 		  type = TREE_TYPE (type);
5039 
5040 		outgoing = var;
5041 		incoming = omp_reduction_init_op (loc, rcode, type);
5042 	      }
5043 	    else
5044 	      {
5045 		/* Try to look at enclosing contexts for reduction var,
5046 		   use original if no mapping found.  */
5047 		tree t = NULL_TREE;
5048 		omp_context *c = ctx->outer;
5049 		while (c && !t)
5050 		  {
5051 		    t = maybe_lookup_decl (orig, c);
5052 		    c = c->outer;
5053 		  }
5054 		incoming = outgoing = (t ? t : orig);
5055 	      }
5056 
5057 	  has_outer_reduction:;
5058 	  }
5059 
5060 	if (!ref_to_res)
5061 	  ref_to_res = integer_zero_node;
5062 
5063 	if (omp_is_reference (orig))
5064 	  {
5065 	    tree type = TREE_TYPE (var);
5066 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
5067 
5068 	    if (!inner)
5069 	      {
5070 		tree x = create_tmp_var (TREE_TYPE (type), id);
5071 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
5072 	      }
5073 
5074 	    v1 = create_tmp_var (type, id);
5075 	    v2 = create_tmp_var (type, id);
5076 	    v3 = create_tmp_var (type, id);
5077 
5078 	    gimplify_assign (v1, var, fork_seq);
5079 	    gimplify_assign (v2, var, fork_seq);
5080 	    gimplify_assign (v3, var, fork_seq);
5081 
5082 	    var = build_simple_mem_ref (var);
5083 	    v1 = build_simple_mem_ref (v1);
5084 	    v2 = build_simple_mem_ref (v2);
5085 	    v3 = build_simple_mem_ref (v3);
5086 	    outgoing = build_simple_mem_ref (outgoing);
5087 
5088 	    if (!TREE_CONSTANT (incoming))
5089 	      incoming = build_simple_mem_ref (incoming);
5090 	  }
5091 	else
5092 	  v1 = v2 = v3 = var;
5093 
5094 	/* Determine position in reduction buffer, which may be used
5095 	   by target.  The parser has ensured that this is not a
5096 	   variable-sized type.  */
5097 	fixed_size_mode mode
5098 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
5099 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
5100 	offset = (offset + align - 1) & ~(align - 1);
5101 	tree off = build_int_cst (sizetype, offset);
5102 	offset += GET_MODE_SIZE (mode);
5103 
5104 	if (!init_code)
5105 	  {
5106 	    init_code = build_int_cst (integer_type_node,
5107 				       IFN_GOACC_REDUCTION_INIT);
5108 	    fini_code = build_int_cst (integer_type_node,
5109 				       IFN_GOACC_REDUCTION_FINI);
5110 	    setup_code = build_int_cst (integer_type_node,
5111 					IFN_GOACC_REDUCTION_SETUP);
5112 	    teardown_code = build_int_cst (integer_type_node,
5113 					   IFN_GOACC_REDUCTION_TEARDOWN);
5114 	  }
5115 
5116 	tree setup_call
5117 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5118 					  TREE_TYPE (var), 6, setup_code,
5119 					  unshare_expr (ref_to_res),
5120 					  incoming, level, op, off);
5121 	tree init_call
5122 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5123 					  TREE_TYPE (var), 6, init_code,
5124 					  unshare_expr (ref_to_res),
5125 					  v1, level, op, off);
5126 	tree fini_call
5127 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5128 					  TREE_TYPE (var), 6, fini_code,
5129 					  unshare_expr (ref_to_res),
5130 					  v2, level, op, off);
5131 	tree teardown_call
5132 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
5133 					  TREE_TYPE (var), 6, teardown_code,
5134 					  ref_to_res, v3, level, op, off);
5135 
5136 	gimplify_assign (v1, setup_call, &before_fork);
5137 	gimplify_assign (v2, init_call, &after_fork);
5138 	gimplify_assign (v3, fini_call, &before_join);
5139 	gimplify_assign (outgoing, teardown_call, &after_join);
5140       }
5141 
5142   /* Now stitch things together.  */
5143   gimple_seq_add_seq (fork_seq, before_fork);
5144   if (fork)
5145     gimple_seq_add_stmt (fork_seq, fork);
5146   gimple_seq_add_seq (fork_seq, after_fork);
5147 
5148   gimple_seq_add_seq (join_seq, before_join);
5149   if (join)
5150     gimple_seq_add_stmt (join_seq, join);
5151   gimple_seq_add_seq (join_seq, after_join);
5152 }
5153 
5154 /* Generate code to implement the REDUCTION clauses.  */
5155 
5156 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,omp_context * ctx)5157 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
5158 {
5159   gimple_seq sub_seq = NULL;
5160   gimple *stmt;
5161   tree x, c;
5162   int count = 0;
5163 
5164   /* OpenACC loop reductions are handled elsewhere.  */
5165   if (is_gimple_omp_oacc (ctx->stmt))
5166     return;
5167 
5168   /* SIMD reductions are handled in lower_rec_input_clauses.  */
5169   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
5170       && gimple_omp_for_kind (ctx->stmt) & GF_OMP_FOR_SIMD)
5171     return;
5172 
5173   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
5174      update in that case, otherwise use a lock.  */
5175   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
5176     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5177       {
5178 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5179 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5180 	  {
5181 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
5182 	    count = -1;
5183 	    break;
5184 	  }
5185 	count++;
5186       }
5187 
5188   if (count == 0)
5189     return;
5190 
5191   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5192     {
5193       tree var, ref, new_var, orig_var;
5194       enum tree_code code;
5195       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5196 
5197       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5198 	continue;
5199 
5200       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
5201       orig_var = var = OMP_CLAUSE_DECL (c);
5202       if (TREE_CODE (var) == MEM_REF)
5203 	{
5204 	  var = TREE_OPERAND (var, 0);
5205 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5206 	    var = TREE_OPERAND (var, 0);
5207 	  if (TREE_CODE (var) == ADDR_EXPR)
5208 	    var = TREE_OPERAND (var, 0);
5209 	  else
5210 	    {
5211 	      /* If this is a pointer or referenced based array
5212 		 section, the var could be private in the outer
5213 		 context e.g. on orphaned loop construct.  Pretend this
5214 		 is private variable's outer reference.  */
5215 	      ccode = OMP_CLAUSE_PRIVATE;
5216 	      if (TREE_CODE (var) == INDIRECT_REF)
5217 		var = TREE_OPERAND (var, 0);
5218 	    }
5219 	  orig_var = var;
5220 	  if (is_variable_sized (var))
5221 	    {
5222 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5223 	      var = DECL_VALUE_EXPR (var);
5224 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5225 	      var = TREE_OPERAND (var, 0);
5226 	      gcc_assert (DECL_P (var));
5227 	    }
5228 	}
5229       new_var = lookup_decl (var, ctx);
5230       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
5231 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5232       ref = build_outer_var_ref (var, ctx, ccode);
5233       code = OMP_CLAUSE_REDUCTION_CODE (c);
5234 
5235       /* reduction(-:var) sums up the partial results, so it acts
5236 	 identically to reduction(+:var).  */
5237       if (code == MINUS_EXPR)
5238         code = PLUS_EXPR;
5239 
5240       if (count == 1)
5241 	{
5242 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
5243 
5244 	  addr = save_expr (addr);
5245 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
5246 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (ref), ref, new_var);
5247 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
5248 	  gimplify_and_add (x, stmt_seqp);
5249 	  return;
5250 	}
5251       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
5252 	{
5253 	  tree d = OMP_CLAUSE_DECL (c);
5254 	  tree type = TREE_TYPE (d);
5255 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5256 	  tree i = create_tmp_var (TREE_TYPE (v), NULL);
5257 	  tree ptype = build_pointer_type (TREE_TYPE (type));
5258 	  tree bias = TREE_OPERAND (d, 1);
5259 	  d = TREE_OPERAND (d, 0);
5260 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5261 	    {
5262 	      tree b = TREE_OPERAND (d, 1);
5263 	      b = maybe_lookup_decl (b, ctx);
5264 	      if (b == NULL)
5265 		{
5266 		  b = TREE_OPERAND (d, 1);
5267 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5268 		}
5269 	      if (integer_zerop (bias))
5270 		bias = b;
5271 	      else
5272 		{
5273 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
5274 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5275 					  TREE_TYPE (b), b, bias);
5276 		}
5277 	      d = TREE_OPERAND (d, 0);
5278 	    }
5279 	  /* For ref build_outer_var_ref already performs this, so
5280 	     only new_var needs a dereference.  */
5281 	  if (TREE_CODE (d) == INDIRECT_REF)
5282 	    {
5283 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5284 	      gcc_assert (omp_is_reference (var) && var == orig_var);
5285 	    }
5286 	  else if (TREE_CODE (d) == ADDR_EXPR)
5287 	    {
5288 	      if (orig_var == var)
5289 		{
5290 		  new_var = build_fold_addr_expr (new_var);
5291 		  ref = build_fold_addr_expr (ref);
5292 		}
5293 	    }
5294 	  else
5295 	    {
5296 	      gcc_assert (orig_var == var);
5297 	      if (omp_is_reference (var))
5298 		ref = build_fold_addr_expr (ref);
5299 	    }
5300 	  if (DECL_P (v))
5301 	    {
5302 	      tree t = maybe_lookup_decl (v, ctx);
5303 	      if (t)
5304 		v = t;
5305 	      else
5306 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5307 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
5308 	    }
5309 	  if (!integer_zerop (bias))
5310 	    {
5311 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
5312 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5313 					 TREE_TYPE (new_var), new_var,
5314 					 unshare_expr (bias));
5315 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5316 					 TREE_TYPE (ref), ref, bias);
5317 	    }
5318 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
5319 	  ref = fold_convert_loc (clause_loc, ptype, ref);
5320 	  tree m = create_tmp_var (ptype, NULL);
5321 	  gimplify_assign (m, new_var, stmt_seqp);
5322 	  new_var = m;
5323 	  m = create_tmp_var (ptype, NULL);
5324 	  gimplify_assign (m, ref, stmt_seqp);
5325 	  ref = m;
5326 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
5327 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
5328 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
5329 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
5330 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
5331 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
5332 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5333 	    {
5334 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5335 	      tree decl_placeholder
5336 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5337 	      SET_DECL_VALUE_EXPR (placeholder, out);
5338 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5339 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
5340 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5341 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5342 	      gimple_seq_add_seq (&sub_seq,
5343 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5344 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5345 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5346 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
5347 	    }
5348 	  else
5349 	    {
5350 	      x = build2 (code, TREE_TYPE (out), out, priv);
5351 	      out = unshare_expr (out);
5352 	      gimplify_assign (out, x, &sub_seq);
5353 	    }
5354 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
5355 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5356 	  gimple_seq_add_stmt (&sub_seq, g);
5357 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
5358 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5359 	  gimple_seq_add_stmt (&sub_seq, g);
5360 	  g = gimple_build_assign (i, PLUS_EXPR, i,
5361 				   build_int_cst (TREE_TYPE (i), 1));
5362 	  gimple_seq_add_stmt (&sub_seq, g);
5363 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
5364 	  gimple_seq_add_stmt (&sub_seq, g);
5365 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
5366 	}
5367       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5368 	{
5369 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5370 
5371 	  if (omp_is_reference (var)
5372 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
5373 					     TREE_TYPE (ref)))
5374 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
5375 	  SET_DECL_VALUE_EXPR (placeholder, ref);
5376 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5377 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
5378 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5379 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5380 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
5381 	}
5382       else
5383 	{
5384 	  x = build2 (code, TREE_TYPE (ref), ref, new_var);
5385 	  ref = build_outer_var_ref (var, ctx);
5386 	  gimplify_assign (ref, x, &sub_seq);
5387 	}
5388     }
5389 
5390   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
5391 			    0);
5392   gimple_seq_add_stmt (stmt_seqp, stmt);
5393 
5394   gimple_seq_add_seq (stmt_seqp, sub_seq);
5395 
5396   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
5397 			    0);
5398   gimple_seq_add_stmt (stmt_seqp, stmt);
5399 }
5400 
5401 
5402 /* Generate code to implement the COPYPRIVATE clauses.  */
5403 
5404 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)5405 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
5406 			    omp_context *ctx)
5407 {
5408   tree c;
5409 
5410   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5411     {
5412       tree var, new_var, ref, x;
5413       bool by_ref;
5414       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5415 
5416       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
5417 	continue;
5418 
5419       var = OMP_CLAUSE_DECL (c);
5420       by_ref = use_pointer_for_field (var, NULL);
5421 
5422       ref = build_sender_ref (var, ctx);
5423       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
5424       if (by_ref)
5425 	{
5426 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
5427 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
5428 	}
5429       gimplify_assign (ref, x, slist);
5430 
5431       ref = build_receiver_ref (var, false, ctx);
5432       if (by_ref)
5433 	{
5434 	  ref = fold_convert_loc (clause_loc,
5435 				  build_pointer_type (TREE_TYPE (new_var)),
5436 				  ref);
5437 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
5438 	}
5439       if (omp_is_reference (var))
5440 	{
5441 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
5442 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
5443 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5444 	}
5445       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
5446       gimplify_and_add (x, rlist);
5447     }
5448 }
5449 
5450 
5451 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
5452    and REDUCTION from the sender (aka parent) side.  */
5453 
5454 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)5455 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
5456     		    omp_context *ctx)
5457 {
5458   tree c, t;
5459   int ignored_looptemp = 0;
5460   bool is_taskloop = false;
5461 
5462   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
5463      by GOMP_taskloop.  */
5464   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
5465     {
5466       ignored_looptemp = 2;
5467       is_taskloop = true;
5468     }
5469 
5470   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
5471     {
5472       tree val, ref, x, var;
5473       bool by_ref, do_in = false, do_out = false;
5474       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
5475 
5476       switch (OMP_CLAUSE_CODE (c))
5477 	{
5478 	case OMP_CLAUSE_PRIVATE:
5479 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5480 	    break;
5481 	  continue;
5482 	case OMP_CLAUSE_FIRSTPRIVATE:
5483 	case OMP_CLAUSE_COPYIN:
5484 	case OMP_CLAUSE_LASTPRIVATE:
5485 	case OMP_CLAUSE_REDUCTION:
5486 	  break;
5487 	case OMP_CLAUSE_SHARED:
5488 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5489 	    break;
5490 	  continue;
5491 	case OMP_CLAUSE__LOOPTEMP_:
5492 	  if (ignored_looptemp)
5493 	    {
5494 	      ignored_looptemp--;
5495 	      continue;
5496 	    }
5497 	  break;
5498 	default:
5499 	  continue;
5500 	}
5501 
5502       val = OMP_CLAUSE_DECL (c);
5503       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5504 	  && TREE_CODE (val) == MEM_REF)
5505 	{
5506 	  val = TREE_OPERAND (val, 0);
5507 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
5508 	    val = TREE_OPERAND (val, 0);
5509 	  if (TREE_CODE (val) == INDIRECT_REF
5510 	      || TREE_CODE (val) == ADDR_EXPR)
5511 	    val = TREE_OPERAND (val, 0);
5512 	  if (is_variable_sized (val))
5513 	    continue;
5514 	}
5515 
5516       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
5517 	 outer taskloop region.  */
5518       omp_context *ctx_for_o = ctx;
5519       if (is_taskloop
5520 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
5521 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5522 	ctx_for_o = ctx->outer;
5523 
5524       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
5525 
5526       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
5527 	  && is_global_var (var))
5528 	continue;
5529 
5530       t = omp_member_access_dummy_var (var);
5531       if (t)
5532 	{
5533 	  var = DECL_VALUE_EXPR (var);
5534 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
5535 	  if (o != t)
5536 	    var = unshare_and_remap (var, t, o);
5537 	  else
5538 	    var = unshare_expr (var);
5539 	}
5540 
5541       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
5542 	{
5543 	  /* Handle taskloop firstprivate/lastprivate, where the
5544 	     lastprivate on GIMPLE_OMP_TASK is represented as
5545 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
5546 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
5547 	  x = omp_build_component_ref (ctx->sender_decl, f);
5548 	  if (use_pointer_for_field (val, ctx))
5549 	    var = build_fold_addr_expr (var);
5550 	  gimplify_assign (x, var, ilist);
5551 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
5552 	  continue;
5553 	}
5554 
5555       if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
5556 	   || val == OMP_CLAUSE_DECL (c))
5557 	  && is_variable_sized (val))
5558 	continue;
5559       by_ref = use_pointer_for_field (val, NULL);
5560 
5561       switch (OMP_CLAUSE_CODE (c))
5562 	{
5563 	case OMP_CLAUSE_FIRSTPRIVATE:
5564 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
5565 	      && !by_ref
5566 	      && is_task_ctx (ctx))
5567 	    TREE_NO_WARNING (var) = 1;
5568 	  do_in = true;
5569 	  break;
5570 
5571 	case OMP_CLAUSE_PRIVATE:
5572 	case OMP_CLAUSE_COPYIN:
5573 	case OMP_CLAUSE__LOOPTEMP_:
5574 	  do_in = true;
5575 	  break;
5576 
5577 	case OMP_CLAUSE_LASTPRIVATE:
5578 	  if (by_ref || omp_is_reference (val))
5579 	    {
5580 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5581 		continue;
5582 	      do_in = true;
5583 	    }
5584 	  else
5585 	    {
5586 	      do_out = true;
5587 	      if (lang_hooks.decls.omp_private_outer_ref (val))
5588 		do_in = true;
5589 	    }
5590 	  break;
5591 
5592 	case OMP_CLAUSE_REDUCTION:
5593 	  do_in = true;
5594 	  if (val == OMP_CLAUSE_DECL (c))
5595 	    do_out = !(by_ref || omp_is_reference (val));
5596 	  else
5597 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
5598 	  break;
5599 
5600 	default:
5601 	  gcc_unreachable ();
5602 	}
5603 
5604       if (do_in)
5605 	{
5606 	  ref = build_sender_ref (val, ctx);
5607 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
5608 	  gimplify_assign (ref, x, ilist);
5609 	  if (is_task_ctx (ctx))
5610 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
5611 	}
5612 
5613       if (do_out)
5614 	{
5615 	  ref = build_sender_ref (val, ctx);
5616 	  gimplify_assign (var, ref, olist);
5617 	}
5618     }
5619 }
5620 
5621 /* Generate code to implement SHARED from the sender (aka parent)
5622    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
5623    list things that got automatically shared.  */
5624 
5625 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)5626 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
5627 {
5628   tree var, ovar, nvar, t, f, x, record_type;
5629 
5630   if (ctx->record_type == NULL)
5631     return;
5632 
5633   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
5634   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
5635     {
5636       ovar = DECL_ABSTRACT_ORIGIN (f);
5637       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
5638 	continue;
5639 
5640       nvar = maybe_lookup_decl (ovar, ctx);
5641       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
5642 	continue;
5643 
5644       /* If CTX is a nested parallel directive.  Find the immediately
5645 	 enclosing parallel or workshare construct that contains a
5646 	 mapping for OVAR.  */
5647       var = lookup_decl_in_outer_ctx (ovar, ctx);
5648 
5649       t = omp_member_access_dummy_var (var);
5650       if (t)
5651 	{
5652 	  var = DECL_VALUE_EXPR (var);
5653 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
5654 	  if (o != t)
5655 	    var = unshare_and_remap (var, t, o);
5656 	  else
5657 	    var = unshare_expr (var);
5658 	}
5659 
5660       if (use_pointer_for_field (ovar, ctx))
5661 	{
5662 	  x = build_sender_ref (ovar, ctx);
5663 	  var = build_fold_addr_expr (var);
5664 	  gimplify_assign (x, var, ilist);
5665 	}
5666       else
5667 	{
5668 	  x = build_sender_ref (ovar, ctx);
5669 	  gimplify_assign (x, var, ilist);
5670 
5671 	  if (!TREE_READONLY (var)
5672 	      /* We don't need to receive a new reference to a result
5673 	         or parm decl.  In fact we may not store to it as we will
5674 		 invalidate any pending RSO and generate wrong gimple
5675 		 during inlining.  */
5676 	      && !((TREE_CODE (var) == RESULT_DECL
5677 		    || TREE_CODE (var) == PARM_DECL)
5678 		   && DECL_BY_REFERENCE (var)))
5679 	    {
5680 	      x = build_sender_ref (ovar, ctx);
5681 	      gimplify_assign (var, x, olist);
5682 	    }
5683 	}
5684     }
5685 }
5686 
5687 /* Emit an OpenACC head marker call, encapulating the partitioning and
5688    other information that must be processed by the target compiler.
5689    Return the maximum number of dimensions the associated loop might
5690    be partitioned over.  */
5691 
5692 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)5693 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
5694 		      gimple_seq *seq, omp_context *ctx)
5695 {
5696   unsigned levels = 0;
5697   unsigned tag = 0;
5698   tree gang_static = NULL_TREE;
5699   auto_vec<tree, 5> args;
5700 
5701   args.quick_push (build_int_cst
5702 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
5703   args.quick_push (ddvar);
5704   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
5705     {
5706       switch (OMP_CLAUSE_CODE (c))
5707 	{
5708 	case OMP_CLAUSE_GANG:
5709 	  tag |= OLF_DIM_GANG;
5710 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
5711 	  /* static:* is represented by -1, and we can ignore it, as
5712 	     scheduling is always static.  */
5713 	  if (gang_static && integer_minus_onep (gang_static))
5714 	    gang_static = NULL_TREE;
5715 	  levels++;
5716 	  break;
5717 
5718 	case OMP_CLAUSE_WORKER:
5719 	  tag |= OLF_DIM_WORKER;
5720 	  levels++;
5721 	  break;
5722 
5723 	case OMP_CLAUSE_VECTOR:
5724 	  tag |= OLF_DIM_VECTOR;
5725 	  levels++;
5726 	  break;
5727 
5728 	case OMP_CLAUSE_SEQ:
5729 	  tag |= OLF_SEQ;
5730 	  break;
5731 
5732 	case OMP_CLAUSE_AUTO:
5733 	  tag |= OLF_AUTO;
5734 	  break;
5735 
5736 	case OMP_CLAUSE_INDEPENDENT:
5737 	  tag |= OLF_INDEPENDENT;
5738 	  break;
5739 
5740 	case OMP_CLAUSE_TILE:
5741 	  tag |= OLF_TILE;
5742 	  break;
5743 
5744 	default:
5745 	  continue;
5746 	}
5747     }
5748 
5749   if (gang_static)
5750     {
5751       if (DECL_P (gang_static))
5752 	gang_static = build_outer_var_ref (gang_static, ctx);
5753       tag |= OLF_GANG_STATIC;
5754     }
5755 
5756   /* In a parallel region, loops are implicitly INDEPENDENT.  */
5757   omp_context *tgt = enclosing_target_ctx (ctx);
5758   if (!tgt || is_oacc_parallel (tgt))
5759     tag |= OLF_INDEPENDENT;
5760 
5761   if (tag & OLF_TILE)
5762     /* Tiling could use all 3 levels.  */
5763     levels = 3;
5764   else
5765     {
5766       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
5767 	 Ensure at least one level, or 2 for possible auto
5768 	 partitioning */
5769       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
5770 				  << OLF_DIM_BASE) | OLF_SEQ));
5771 
5772       if (levels < 1u + maybe_auto)
5773 	levels = 1u + maybe_auto;
5774     }
5775 
5776   args.quick_push (build_int_cst (integer_type_node, levels));
5777   args.quick_push (build_int_cst (integer_type_node, tag));
5778   if (gang_static)
5779     args.quick_push (gang_static);
5780 
5781   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
5782   gimple_set_location (call, loc);
5783   gimple_set_lhs (call, ddvar);
5784   gimple_seq_add_stmt (seq, call);
5785 
5786   return levels;
5787 }
5788 
5789 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
5790    partitioning level of the enclosed region.  */
5791 
5792 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)5793 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
5794 			tree tofollow, gimple_seq *seq)
5795 {
5796   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
5797 		     : IFN_UNIQUE_OACC_TAIL_MARK);
5798   tree marker = build_int_cst (integer_type_node, marker_kind);
5799   int nargs = 2 + (tofollow != NULL_TREE);
5800   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
5801 					    marker, ddvar, tofollow);
5802   gimple_set_location (call, loc);
5803   gimple_set_lhs (call, ddvar);
5804   gimple_seq_add_stmt (seq, call);
5805 }
5806 
5807 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
5808    the loop clauses, from which we extract reductions.  Initialize
5809    HEAD and TAIL.  */
5810 
5811 static void
lower_oacc_head_tail(location_t loc,tree clauses,gimple_seq * head,gimple_seq * tail,omp_context * ctx)5812 lower_oacc_head_tail (location_t loc, tree clauses,
5813 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
5814 {
5815   bool inner = false;
5816   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
5817   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
5818 
5819   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
5820   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
5821   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
5822 
5823   gcc_assert (count);
5824   for (unsigned done = 1; count; count--, done++)
5825     {
5826       gimple_seq fork_seq = NULL;
5827       gimple_seq join_seq = NULL;
5828 
5829       tree place = build_int_cst (integer_type_node, -1);
5830       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
5831 						fork_kind, ddvar, place);
5832       gimple_set_location (fork, loc);
5833       gimple_set_lhs (fork, ddvar);
5834 
5835       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
5836 						join_kind, ddvar, place);
5837       gimple_set_location (join, loc);
5838       gimple_set_lhs (join, ddvar);
5839 
5840       /* Mark the beginning of this level sequence.  */
5841       if (inner)
5842 	lower_oacc_loop_marker (loc, ddvar, true,
5843 				build_int_cst (integer_type_node, count),
5844 				&fork_seq);
5845       lower_oacc_loop_marker (loc, ddvar, false,
5846 			      build_int_cst (integer_type_node, done),
5847 			      &join_seq);
5848 
5849       lower_oacc_reductions (loc, clauses, place, inner,
5850 			     fork, join, &fork_seq, &join_seq,  ctx);
5851 
5852       /* Append this level to head. */
5853       gimple_seq_add_seq (head, fork_seq);
5854       /* Prepend it to tail.  */
5855       gimple_seq_add_seq (&join_seq, *tail);
5856       *tail = join_seq;
5857 
5858       inner = true;
5859     }
5860 
5861   /* Mark the end of the sequence.  */
5862   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
5863   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
5864 }
5865 
5866 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
5867    catch handler and return it.  This prevents programs from violating the
5868    structured block semantics with throws.  */
5869 
5870 static gimple_seq
maybe_catch_exception(gimple_seq body)5871 maybe_catch_exception (gimple_seq body)
5872 {
5873   gimple *g;
5874   tree decl;
5875 
5876   if (!flag_exceptions)
5877     return body;
5878 
5879   if (lang_hooks.eh_protect_cleanup_actions != NULL)
5880     decl = lang_hooks.eh_protect_cleanup_actions ();
5881   else
5882     decl = builtin_decl_explicit (BUILT_IN_TRAP);
5883 
5884   g = gimple_build_eh_must_not_throw (decl);
5885   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
5886       			GIMPLE_TRY_CATCH);
5887 
5888  return gimple_seq_alloc_with_stmt (g);
5889 }
5890 
5891 
5892 /* Routines to lower OMP directives into OMP-GIMPLE.  */
5893 
5894 /* If ctx is a worksharing context inside of a cancellable parallel
5895    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
5896    and conditional branch to parallel's cancel_label to handle
5897    cancellation in the implicit barrier.  */
5898 
5899 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple_seq * body)5900 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple_seq *body)
5901 {
5902   gimple *omp_return = gimple_seq_last_stmt (*body);
5903   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
5904   if (gimple_omp_return_nowait_p (omp_return))
5905     return;
5906   if (ctx->outer
5907       && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_PARALLEL
5908       && ctx->outer->cancellable)
5909     {
5910       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
5911       tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
5912       tree lhs = create_tmp_var (c_bool_type);
5913       gimple_omp_return_set_lhs (omp_return, lhs);
5914       tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
5915       gimple *g = gimple_build_cond (NE_EXPR, lhs,
5916 				    fold_convert (c_bool_type,
5917 						  boolean_false_node),
5918 				    ctx->outer->cancel_label, fallthru_label);
5919       gimple_seq_add_stmt (body, g);
5920       gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
5921     }
5922 }
5923 
5924 /* Lower the OpenMP sections directive in the current statement in GSI_P.
5925    CTX is the enclosing OMP context for the current statement.  */
5926 
5927 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)5928 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
5929 {
5930   tree block, control;
5931   gimple_stmt_iterator tgsi;
5932   gomp_sections *stmt;
5933   gimple *t;
5934   gbind *new_stmt, *bind;
5935   gimple_seq ilist, dlist, olist, new_body;
5936 
5937   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
5938 
5939   push_gimplify_context ();
5940 
5941   dlist = NULL;
5942   ilist = NULL;
5943   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
5944       			   &ilist, &dlist, ctx, NULL);
5945 
5946   new_body = gimple_omp_body (stmt);
5947   gimple_omp_set_body (stmt, NULL);
5948   tgsi = gsi_start (new_body);
5949   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
5950     {
5951       omp_context *sctx;
5952       gimple *sec_start;
5953 
5954       sec_start = gsi_stmt (tgsi);
5955       sctx = maybe_lookup_ctx (sec_start);
5956       gcc_assert (sctx);
5957 
5958       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
5959       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
5960 			    GSI_CONTINUE_LINKING);
5961       gimple_omp_set_body (sec_start, NULL);
5962 
5963       if (gsi_one_before_end_p (tgsi))
5964 	{
5965 	  gimple_seq l = NULL;
5966 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
5967 				     &l, ctx);
5968 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
5969 	  gimple_omp_section_set_last (sec_start);
5970 	}
5971 
5972       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
5973 			GSI_CONTINUE_LINKING);
5974     }
5975 
5976   block = make_node (BLOCK);
5977   bind = gimple_build_bind (NULL, new_body, block);
5978 
5979   olist = NULL;
5980   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist, ctx);
5981 
5982   block = make_node (BLOCK);
5983   new_stmt = gimple_build_bind (NULL, NULL, block);
5984   gsi_replace (gsi_p, new_stmt, true);
5985 
5986   pop_gimplify_context (new_stmt);
5987   gimple_bind_append_vars (new_stmt, ctx->block_vars);
5988   BLOCK_VARS (block) = gimple_bind_vars (bind);
5989   if (BLOCK_VARS (block))
5990     TREE_USED (block) = 1;
5991 
5992   new_body = NULL;
5993   gimple_seq_add_seq (&new_body, ilist);
5994   gimple_seq_add_stmt (&new_body, stmt);
5995   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
5996   gimple_seq_add_stmt (&new_body, bind);
5997 
5998   control = create_tmp_var (unsigned_type_node, ".section");
5999   t = gimple_build_omp_continue (control, control);
6000   gimple_omp_sections_set_control (stmt, control);
6001   gimple_seq_add_stmt (&new_body, t);
6002 
6003   gimple_seq_add_seq (&new_body, olist);
6004   if (ctx->cancellable)
6005     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
6006   gimple_seq_add_seq (&new_body, dlist);
6007 
6008   new_body = maybe_catch_exception (new_body);
6009 
6010   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
6011 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6012   t = gimple_build_omp_return (nowait);
6013   gimple_seq_add_stmt (&new_body, t);
6014   maybe_add_implicit_barrier_cancel (ctx, &new_body);
6015 
6016   gimple_bind_set_body (new_stmt, new_body);
6017 }
6018 
6019 
6020 /* A subroutine of lower_omp_single.  Expand the simple form of
6021    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
6022 
6023      	if (GOMP_single_start ())
6024 	  BODY;
6025 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
6026 
6027   FIXME.  It may be better to delay expanding the logic of this until
6028   pass_expand_omp.  The expanded logic may make the job more difficult
6029   to a synchronization analysis pass.  */
6030 
6031 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)6032 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
6033 {
6034   location_t loc = gimple_location (single_stmt);
6035   tree tlabel = create_artificial_label (loc);
6036   tree flabel = create_artificial_label (loc);
6037   gimple *call, *cond;
6038   tree lhs, decl;
6039 
6040   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
6041   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
6042   call = gimple_build_call (decl, 0);
6043   gimple_call_set_lhs (call, lhs);
6044   gimple_seq_add_stmt (pre_p, call);
6045 
6046   cond = gimple_build_cond (EQ_EXPR, lhs,
6047 			    fold_convert_loc (loc, TREE_TYPE (lhs),
6048 					      boolean_true_node),
6049 			    tlabel, flabel);
6050   gimple_seq_add_stmt (pre_p, cond);
6051   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
6052   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6053   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
6054 }
6055 
6056 
6057 /* A subroutine of lower_omp_single.  Expand the simple form of
6058    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
6059 
6060 	#pragma omp single copyprivate (a, b, c)
6061 
6062    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
6063 
6064       {
6065 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
6066 	  {
6067 	    BODY;
6068 	    copyout.a = a;
6069 	    copyout.b = b;
6070 	    copyout.c = c;
6071 	    GOMP_single_copy_end (&copyout);
6072 	  }
6073 	else
6074 	  {
6075 	    a = copyout_p->a;
6076 	    b = copyout_p->b;
6077 	    c = copyout_p->c;
6078 	  }
6079 	GOMP_barrier ();
6080       }
6081 
6082   FIXME.  It may be better to delay expanding the logic of this until
6083   pass_expand_omp.  The expanded logic may make the job more difficult
6084   to a synchronization analysis pass.  */
6085 
6086 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)6087 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
6088 		       omp_context *ctx)
6089 {
6090   tree ptr_type, t, l0, l1, l2, bfn_decl;
6091   gimple_seq copyin_seq;
6092   location_t loc = gimple_location (single_stmt);
6093 
6094   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
6095 
6096   ptr_type = build_pointer_type (ctx->record_type);
6097   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
6098 
6099   l0 = create_artificial_label (loc);
6100   l1 = create_artificial_label (loc);
6101   l2 = create_artificial_label (loc);
6102 
6103   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
6104   t = build_call_expr_loc (loc, bfn_decl, 0);
6105   t = fold_convert_loc (loc, ptr_type, t);
6106   gimplify_assign (ctx->receiver_decl, t, pre_p);
6107 
6108   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
6109 	      build_int_cst (ptr_type, 0));
6110   t = build3 (COND_EXPR, void_type_node, t,
6111 	      build_and_jump (&l0), build_and_jump (&l1));
6112   gimplify_and_add (t, pre_p);
6113 
6114   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
6115 
6116   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
6117 
6118   copyin_seq = NULL;
6119   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
6120 			      &copyin_seq, ctx);
6121 
6122   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
6123   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
6124   t = build_call_expr_loc (loc, bfn_decl, 1, t);
6125   gimplify_and_add (t, pre_p);
6126 
6127   t = build_and_jump (&l2);
6128   gimplify_and_add (t, pre_p);
6129 
6130   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
6131 
6132   gimple_seq_add_seq (pre_p, copyin_seq);
6133 
6134   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
6135 }
6136 
6137 
6138 /* Expand code for an OpenMP single directive.  */
6139 
6140 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)6141 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6142 {
6143   tree block;
6144   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
6145   gbind *bind;
6146   gimple_seq bind_body, bind_body_tail = NULL, dlist;
6147 
6148   push_gimplify_context ();
6149 
6150   block = make_node (BLOCK);
6151   bind = gimple_build_bind (NULL, NULL, block);
6152   gsi_replace (gsi_p, bind, true);
6153   bind_body = NULL;
6154   dlist = NULL;
6155   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
6156 			   &bind_body, &dlist, ctx, NULL);
6157   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
6158 
6159   gimple_seq_add_stmt (&bind_body, single_stmt);
6160 
6161   if (ctx->record_type)
6162     lower_omp_single_copy (single_stmt, &bind_body, ctx);
6163   else
6164     lower_omp_single_simple (single_stmt, &bind_body);
6165 
6166   gimple_omp_set_body (single_stmt, NULL);
6167 
6168   gimple_seq_add_seq (&bind_body, dlist);
6169 
6170   bind_body = maybe_catch_exception (bind_body);
6171 
6172   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
6173 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
6174   gimple *g = gimple_build_omp_return (nowait);
6175   gimple_seq_add_stmt (&bind_body_tail, g);
6176   maybe_add_implicit_barrier_cancel (ctx, &bind_body_tail);
6177   if (ctx->record_type)
6178     {
6179       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
6180       tree clobber = build_constructor (ctx->record_type, NULL);
6181       TREE_THIS_VOLATILE (clobber) = 1;
6182       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
6183 						   clobber), GSI_SAME_STMT);
6184     }
6185   gimple_seq_add_seq (&bind_body, bind_body_tail);
6186   gimple_bind_set_body (bind, bind_body);
6187 
6188   pop_gimplify_context (bind);
6189 
6190   gimple_bind_append_vars (bind, ctx->block_vars);
6191   BLOCK_VARS (block) = ctx->block_vars;
6192   if (BLOCK_VARS (block))
6193     TREE_USED (block) = 1;
6194 }
6195 
6196 
6197 /* Expand code for an OpenMP master directive.  */
6198 
6199 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)6200 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6201 {
6202   tree block, lab = NULL, x, bfn_decl;
6203   gimple *stmt = gsi_stmt (*gsi_p);
6204   gbind *bind;
6205   location_t loc = gimple_location (stmt);
6206   gimple_seq tseq;
6207 
6208   push_gimplify_context ();
6209 
6210   block = make_node (BLOCK);
6211   bind = gimple_build_bind (NULL, NULL, block);
6212   gsi_replace (gsi_p, bind, true);
6213   gimple_bind_add_stmt (bind, stmt);
6214 
6215   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
6216   x = build_call_expr_loc (loc, bfn_decl, 0);
6217   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
6218   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
6219   tseq = NULL;
6220   gimplify_and_add (x, &tseq);
6221   gimple_bind_add_seq (bind, tseq);
6222 
6223   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6224   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6225   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6226   gimple_omp_set_body (stmt, NULL);
6227 
6228   gimple_bind_add_stmt (bind, gimple_build_label (lab));
6229 
6230   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6231 
6232   pop_gimplify_context (bind);
6233 
6234   gimple_bind_append_vars (bind, ctx->block_vars);
6235   BLOCK_VARS (block) = ctx->block_vars;
6236 }
6237 
6238 
6239 /* Expand code for an OpenMP taskgroup directive.  */
6240 
6241 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)6242 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6243 {
6244   gimple *stmt = gsi_stmt (*gsi_p);
6245   gcall *x;
6246   gbind *bind;
6247   tree block = make_node (BLOCK);
6248 
6249   bind = gimple_build_bind (NULL, NULL, block);
6250   gsi_replace (gsi_p, bind, true);
6251   gimple_bind_add_stmt (bind, stmt);
6252 
6253   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
6254 			 0);
6255   gimple_bind_add_stmt (bind, x);
6256 
6257   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6258   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6259   gimple_omp_set_body (stmt, NULL);
6260 
6261   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6262 
6263   gimple_bind_append_vars (bind, ctx->block_vars);
6264   BLOCK_VARS (block) = ctx->block_vars;
6265 }
6266 
6267 
6268 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
6269 
6270 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)6271 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
6272 			   omp_context *ctx)
6273 {
6274   struct omp_for_data fd;
6275   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
6276     return;
6277 
6278   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
6279   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
6280   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
6281   if (!fd.ordered)
6282     return;
6283 
6284   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6285   tree c = gimple_omp_ordered_clauses (ord_stmt);
6286   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
6287       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
6288     {
6289       /* Merge depend clauses from multiple adjacent
6290 	 #pragma omp ordered depend(sink:...) constructs
6291 	 into one #pragma omp ordered depend(sink:...), so that
6292 	 we can optimize them together.  */
6293       gimple_stmt_iterator gsi = *gsi_p;
6294       gsi_next (&gsi);
6295       while (!gsi_end_p (gsi))
6296 	{
6297 	  gimple *stmt = gsi_stmt (gsi);
6298 	  if (is_gimple_debug (stmt)
6299 	      || gimple_code (stmt) == GIMPLE_NOP)
6300 	    {
6301 	      gsi_next (&gsi);
6302 	      continue;
6303 	    }
6304 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
6305 	    break;
6306 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
6307 	  c = gimple_omp_ordered_clauses (ord_stmt2);
6308 	  if (c == NULL_TREE
6309 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
6310 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6311 	    break;
6312 	  while (*list_p)
6313 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
6314 	  *list_p = c;
6315 	  gsi_remove (&gsi, true);
6316 	}
6317     }
6318 
6319   /* Canonicalize sink dependence clauses into one folded clause if
6320      possible.
6321 
6322      The basic algorithm is to create a sink vector whose first
6323      element is the GCD of all the first elements, and whose remaining
6324      elements are the minimum of the subsequent columns.
6325 
6326      We ignore dependence vectors whose first element is zero because
6327      such dependencies are known to be executed by the same thread.
6328 
6329      We take into account the direction of the loop, so a minimum
6330      becomes a maximum if the loop is iterating forwards.  We also
6331      ignore sink clauses where the loop direction is unknown, or where
6332      the offsets are clearly invalid because they are not a multiple
6333      of the loop increment.
6334 
6335      For example:
6336 
6337 	#pragma omp for ordered(2)
6338 	for (i=0; i < N; ++i)
6339 	  for (j=0; j < M; ++j)
6340 	    {
6341 	      #pragma omp ordered \
6342 		depend(sink:i-8,j-2) \
6343 		depend(sink:i,j-1) \	// Completely ignored because i+0.
6344 		depend(sink:i-4,j-3) \
6345 		depend(sink:i-6,j-4)
6346 	      #pragma omp ordered depend(source)
6347 	    }
6348 
6349      Folded clause is:
6350 
6351 	depend(sink:-gcd(8,4,6),-min(2,3,4))
6352 	  -or-
6353 	depend(sink:-2,-2)
6354   */
6355 
6356   /* FIXME: Computing GCD's where the first element is zero is
6357      non-trivial in the presence of collapsed loops.  Do this later.  */
6358   if (fd.collapse > 1)
6359     return;
6360 
6361   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
6362 
6363   /* wide_int is not a POD so it must be default-constructed.  */
6364   for (unsigned i = 0; i != 2 * len - 1; ++i)
6365     new (static_cast<void*>(folded_deps + i)) wide_int ();
6366 
6367   tree folded_dep = NULL_TREE;
6368   /* TRUE if the first dimension's offset is negative.  */
6369   bool neg_offset_p = false;
6370 
6371   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
6372   unsigned int i;
6373   while ((c = *list_p) != NULL)
6374     {
6375       bool remove = false;
6376 
6377       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
6378       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
6379 	goto next_ordered_clause;
6380 
6381       tree vec;
6382       for (vec = OMP_CLAUSE_DECL (c), i = 0;
6383 	   vec && TREE_CODE (vec) == TREE_LIST;
6384 	   vec = TREE_CHAIN (vec), ++i)
6385 	{
6386 	  gcc_assert (i < len);
6387 
6388 	  /* omp_extract_for_data has canonicalized the condition.  */
6389 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
6390 		      || fd.loops[i].cond_code == GT_EXPR);
6391 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
6392 	  bool maybe_lexically_later = true;
6393 
6394 	  /* While the committee makes up its mind, bail if we have any
6395 	     non-constant steps.  */
6396 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
6397 	    goto lower_omp_ordered_ret;
6398 
6399 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
6400 	  if (POINTER_TYPE_P (itype))
6401 	    itype = sizetype;
6402 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
6403 					    TYPE_PRECISION (itype),
6404 					    TYPE_SIGN (itype));
6405 
6406 	  /* Ignore invalid offsets that are not multiples of the step.  */
6407 	  if (!wi::multiple_of_p (wi::abs (offset),
6408 				  wi::abs (wi::to_wide (fd.loops[i].step)),
6409 				  UNSIGNED))
6410 	    {
6411 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
6412 			  "ignoring sink clause with offset that is not "
6413 			  "a multiple of the loop step");
6414 	      remove = true;
6415 	      goto next_ordered_clause;
6416 	    }
6417 
6418 	  /* Calculate the first dimension.  The first dimension of
6419 	     the folded dependency vector is the GCD of the first
6420 	     elements, while ignoring any first elements whose offset
6421 	     is 0.  */
6422 	  if (i == 0)
6423 	    {
6424 	      /* Ignore dependence vectors whose first dimension is 0.  */
6425 	      if (offset == 0)
6426 		{
6427 		  remove = true;
6428 		  goto next_ordered_clause;
6429 		}
6430 	      else
6431 		{
6432 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
6433 		    {
6434 		      error_at (OMP_CLAUSE_LOCATION (c),
6435 				"first offset must be in opposite direction "
6436 				"of loop iterations");
6437 		      goto lower_omp_ordered_ret;
6438 		    }
6439 		  if (forward)
6440 		    offset = -offset;
6441 		  neg_offset_p = forward;
6442 		  /* Initialize the first time around.  */
6443 		  if (folded_dep == NULL_TREE)
6444 		    {
6445 		      folded_dep = c;
6446 		      folded_deps[0] = offset;
6447 		    }
6448 		  else
6449 		    folded_deps[0] = wi::gcd (folded_deps[0],
6450 					      offset, UNSIGNED);
6451 		}
6452 	    }
6453 	  /* Calculate minimum for the remaining dimensions.  */
6454 	  else
6455 	    {
6456 	      folded_deps[len + i - 1] = offset;
6457 	      if (folded_dep == c)
6458 		folded_deps[i] = offset;
6459 	      else if (maybe_lexically_later
6460 		       && !wi::eq_p (folded_deps[i], offset))
6461 		{
6462 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
6463 		    {
6464 		      unsigned int j;
6465 		      folded_dep = c;
6466 		      for (j = 1; j <= i; j++)
6467 			folded_deps[j] = folded_deps[len + j - 1];
6468 		    }
6469 		  else
6470 		    maybe_lexically_later = false;
6471 		}
6472 	    }
6473 	}
6474       gcc_assert (i == len);
6475 
6476       remove = true;
6477 
6478     next_ordered_clause:
6479       if (remove)
6480 	*list_p = OMP_CLAUSE_CHAIN (c);
6481       else
6482 	list_p = &OMP_CLAUSE_CHAIN (c);
6483     }
6484 
6485   if (folded_dep)
6486     {
6487       if (neg_offset_p)
6488 	folded_deps[0] = -folded_deps[0];
6489 
6490       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
6491       if (POINTER_TYPE_P (itype))
6492 	itype = sizetype;
6493 
6494       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
6495 	= wide_int_to_tree (itype, folded_deps[0]);
6496       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
6497       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
6498     }
6499 
6500  lower_omp_ordered_ret:
6501 
6502   /* Ordered without clauses is #pragma omp threads, while we want
6503      a nop instead if we remove all clauses.  */
6504   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
6505     gsi_replace (gsi_p, gimple_build_nop (), true);
6506 }
6507 
6508 
6509 /* Expand code for an OpenMP ordered directive.  */
6510 
6511 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)6512 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6513 {
6514   tree block;
6515   gimple *stmt = gsi_stmt (*gsi_p), *g;
6516   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
6517   gcall *x;
6518   gbind *bind;
6519   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6520 			       OMP_CLAUSE_SIMD);
6521   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
6522      loop.  */
6523   bool maybe_simt
6524     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
6525   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6526 				  OMP_CLAUSE_THREADS);
6527 
6528   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
6529 		       OMP_CLAUSE_DEPEND))
6530     {
6531       /* FIXME: This is needs to be moved to the expansion to verify various
6532 	 conditions only testable on cfg with dominators computed, and also
6533 	 all the depend clauses to be merged still might need to be available
6534 	 for the runtime checks.  */
6535       if (0)
6536 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
6537       return;
6538     }
6539 
6540   push_gimplify_context ();
6541 
6542   block = make_node (BLOCK);
6543   bind = gimple_build_bind (NULL, NULL, block);
6544   gsi_replace (gsi_p, bind, true);
6545   gimple_bind_add_stmt (bind, stmt);
6546 
6547   if (simd)
6548     {
6549       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
6550 				      build_int_cst (NULL_TREE, threads));
6551       cfun->has_simduid_loops = true;
6552     }
6553   else
6554     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
6555 			   0);
6556   gimple_bind_add_stmt (bind, x);
6557 
6558   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
6559   if (maybe_simt)
6560     {
6561       counter = create_tmp_var (integer_type_node);
6562       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
6563       gimple_call_set_lhs (g, counter);
6564       gimple_bind_add_stmt (bind, g);
6565 
6566       body = create_artificial_label (UNKNOWN_LOCATION);
6567       test = create_artificial_label (UNKNOWN_LOCATION);
6568       gimple_bind_add_stmt (bind, gimple_build_label (body));
6569 
6570       tree simt_pred = create_tmp_var (integer_type_node);
6571       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
6572       gimple_call_set_lhs (g, simt_pred);
6573       gimple_bind_add_stmt (bind, g);
6574 
6575       tree t = create_artificial_label (UNKNOWN_LOCATION);
6576       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
6577       gimple_bind_add_stmt (bind, g);
6578 
6579       gimple_bind_add_stmt (bind, gimple_build_label (t));
6580     }
6581   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6582   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6583   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6584   gimple_omp_set_body (stmt, NULL);
6585 
6586   if (maybe_simt)
6587     {
6588       gimple_bind_add_stmt (bind, gimple_build_label (test));
6589       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
6590       gimple_bind_add_stmt (bind, g);
6591 
6592       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
6593       tree nonneg = create_tmp_var (integer_type_node);
6594       gimple_seq tseq = NULL;
6595       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
6596       gimple_bind_add_seq (bind, tseq);
6597 
6598       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
6599       gimple_call_set_lhs (g, nonneg);
6600       gimple_bind_add_stmt (bind, g);
6601 
6602       tree end = create_artificial_label (UNKNOWN_LOCATION);
6603       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
6604       gimple_bind_add_stmt (bind, g);
6605 
6606       gimple_bind_add_stmt (bind, gimple_build_label (end));
6607     }
6608   if (simd)
6609     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
6610 				    build_int_cst (NULL_TREE, threads));
6611   else
6612     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
6613 			   0);
6614   gimple_bind_add_stmt (bind, x);
6615 
6616   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6617 
6618   pop_gimplify_context (bind);
6619 
6620   gimple_bind_append_vars (bind, ctx->block_vars);
6621   BLOCK_VARS (block) = gimple_bind_vars (bind);
6622 }
6623 
6624 
6625 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
6626    substitution of a couple of function calls.  But in the NAMED case,
6627    requires that languages coordinate a symbol name.  It is therefore
6628    best put here in common code.  */
6629 
6630 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
6631 
6632 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)6633 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6634 {
6635   tree block;
6636   tree name, lock, unlock;
6637   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
6638   gbind *bind;
6639   location_t loc = gimple_location (stmt);
6640   gimple_seq tbody;
6641 
6642   name = gimple_omp_critical_name (stmt);
6643   if (name)
6644     {
6645       tree decl;
6646 
6647       if (!critical_name_mutexes)
6648 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
6649 
6650       tree *n = critical_name_mutexes->get (name);
6651       if (n == NULL)
6652 	{
6653 	  char *new_str;
6654 
6655 	  decl = create_tmp_var_raw (ptr_type_node);
6656 
6657 	  new_str = ACONCAT ((".gomp_critical_user_",
6658 			      IDENTIFIER_POINTER (name), NULL));
6659 	  DECL_NAME (decl) = get_identifier (new_str);
6660 	  TREE_PUBLIC (decl) = 1;
6661 	  TREE_STATIC (decl) = 1;
6662 	  DECL_COMMON (decl) = 1;
6663 	  DECL_ARTIFICIAL (decl) = 1;
6664 	  DECL_IGNORED_P (decl) = 1;
6665 
6666 	  varpool_node::finalize_decl (decl);
6667 
6668 	  critical_name_mutexes->put (name, decl);
6669 	}
6670       else
6671 	decl = *n;
6672 
6673       /* If '#pragma omp critical' is inside offloaded region or
6674 	 inside function marked as offloadable, the symbol must be
6675 	 marked as offloadable too.  */
6676       omp_context *octx;
6677       if (cgraph_node::get (current_function_decl)->offloadable)
6678 	varpool_node::get_create (decl)->offloadable = 1;
6679       else
6680 	for (octx = ctx->outer; octx; octx = octx->outer)
6681 	  if (is_gimple_omp_offloaded (octx->stmt))
6682 	    {
6683 	      varpool_node::get_create (decl)->offloadable = 1;
6684 	      break;
6685 	    }
6686 
6687       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
6688       lock = build_call_expr_loc (loc, lock, 1,
6689 				  build_fold_addr_expr_loc (loc, decl));
6690 
6691       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
6692       unlock = build_call_expr_loc (loc, unlock, 1,
6693 				build_fold_addr_expr_loc (loc, decl));
6694     }
6695   else
6696     {
6697       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
6698       lock = build_call_expr_loc (loc, lock, 0);
6699 
6700       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
6701       unlock = build_call_expr_loc (loc, unlock, 0);
6702     }
6703 
6704   push_gimplify_context ();
6705 
6706   block = make_node (BLOCK);
6707   bind = gimple_build_bind (NULL, NULL, block);
6708   gsi_replace (gsi_p, bind, true);
6709   gimple_bind_add_stmt (bind, stmt);
6710 
6711   tbody = gimple_bind_body (bind);
6712   gimplify_and_add (lock, &tbody);
6713   gimple_bind_set_body (bind, tbody);
6714 
6715   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6716   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
6717   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
6718   gimple_omp_set_body (stmt, NULL);
6719 
6720   tbody = gimple_bind_body (bind);
6721   gimplify_and_add (unlock, &tbody);
6722   gimple_bind_set_body (bind, tbody);
6723 
6724   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
6725 
6726   pop_gimplify_context (bind);
6727   gimple_bind_append_vars (bind, ctx->block_vars);
6728   BLOCK_VARS (block) = gimple_bind_vars (bind);
6729 }
6730 
6731 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
6732    for a lastprivate clause.  Given a loop control predicate of (V
6733    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
6734    is appended to *DLIST, iterator initialization is appended to
6735    *BODY_P.  */
6736 
6737 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,struct omp_context * ctx)6738 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
6739 			   gimple_seq *dlist, struct omp_context *ctx)
6740 {
6741   tree clauses, cond, vinit;
6742   enum tree_code cond_code;
6743   gimple_seq stmts;
6744 
6745   cond_code = fd->loop.cond_code;
6746   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
6747 
6748   /* When possible, use a strict equality expression.  This can let VRP
6749      type optimizations deduce the value and remove a copy.  */
6750   if (tree_fits_shwi_p (fd->loop.step))
6751     {
6752       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
6753       if (step == 1 || step == -1)
6754 	cond_code = EQ_EXPR;
6755     }
6756 
6757   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
6758       || gimple_omp_for_grid_phony (fd->for_stmt))
6759     cond = omp_grid_lastprivate_predicate (fd);
6760   else
6761     {
6762       tree n2 = fd->loop.n2;
6763       if (fd->collapse > 1
6764 	  && TREE_CODE (n2) != INTEGER_CST
6765 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
6766 	{
6767 	  struct omp_context *taskreg_ctx = NULL;
6768 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
6769 	    {
6770 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
6771 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
6772 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
6773 		{
6774 		  if (gimple_omp_for_combined_into_p (gfor))
6775 		    {
6776 		      gcc_assert (ctx->outer->outer
6777 				  && is_parallel_ctx (ctx->outer->outer));
6778 		      taskreg_ctx = ctx->outer->outer;
6779 		    }
6780 		  else
6781 		    {
6782 		      struct omp_for_data outer_fd;
6783 		      omp_extract_for_data (gfor, &outer_fd, NULL);
6784 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
6785 		    }
6786 		}
6787 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
6788 		taskreg_ctx = ctx->outer->outer;
6789 	    }
6790 	  else if (is_taskreg_ctx (ctx->outer))
6791 	    taskreg_ctx = ctx->outer;
6792 	  if (taskreg_ctx)
6793 	    {
6794 	      int i;
6795 	      tree taskreg_clauses
6796 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
6797 	      tree innerc = omp_find_clause (taskreg_clauses,
6798 					     OMP_CLAUSE__LOOPTEMP_);
6799 	      gcc_assert (innerc);
6800 	      for (i = 0; i < fd->collapse; i++)
6801 		{
6802 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6803 					    OMP_CLAUSE__LOOPTEMP_);
6804 		  gcc_assert (innerc);
6805 		}
6806 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
6807 					OMP_CLAUSE__LOOPTEMP_);
6808 	      if (innerc)
6809 		n2 = fold_convert (TREE_TYPE (n2),
6810 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
6811 						taskreg_ctx));
6812 	    }
6813 	}
6814       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
6815     }
6816 
6817   clauses = gimple_omp_for_clauses (fd->for_stmt);
6818   stmts = NULL;
6819   lower_lastprivate_clauses (clauses, cond, &stmts, ctx);
6820   if (!gimple_seq_empty_p (stmts))
6821     {
6822       gimple_seq_add_seq (&stmts, *dlist);
6823       *dlist = stmts;
6824 
6825       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
6826       vinit = fd->loop.n1;
6827       if (cond_code == EQ_EXPR
6828 	  && tree_fits_shwi_p (fd->loop.n2)
6829 	  && ! integer_zerop (fd->loop.n2))
6830 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
6831       else
6832 	vinit = unshare_expr (vinit);
6833 
6834       /* Initialize the iterator variable, so that threads that don't execute
6835 	 any iterations don't execute the lastprivate clauses by accident.  */
6836       gimplify_assign (fd->loop.v, vinit, body_p);
6837     }
6838 }
6839 
6840 
6841 /* Lower code for an OMP loop directive.  */
6842 
6843 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)6844 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
6845 {
6846   tree *rhs_p, block;
6847   struct omp_for_data fd, *fdp = NULL;
6848   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
6849   gbind *new_stmt;
6850   gimple_seq omp_for_body, body, dlist;
6851   gimple_seq oacc_head = NULL, oacc_tail = NULL;
6852   size_t i;
6853 
6854   push_gimplify_context ();
6855 
6856   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
6857 
6858   block = make_node (BLOCK);
6859   new_stmt = gimple_build_bind (NULL, NULL, block);
6860   /* Replace at gsi right away, so that 'stmt' is no member
6861      of a sequence anymore as we're going to add to a different
6862      one below.  */
6863   gsi_replace (gsi_p, new_stmt, true);
6864 
6865   /* Move declaration of temporaries in the loop body before we make
6866      it go away.  */
6867   omp_for_body = gimple_omp_body (stmt);
6868   if (!gimple_seq_empty_p (omp_for_body)
6869       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
6870     {
6871       gbind *inner_bind
6872 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
6873       tree vars = gimple_bind_vars (inner_bind);
6874       gimple_bind_append_vars (new_stmt, vars);
6875       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
6876 	 keep them on the inner_bind and it's block.  */
6877       gimple_bind_set_vars (inner_bind, NULL_TREE);
6878       if (gimple_bind_block (inner_bind))
6879 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
6880     }
6881 
6882   if (gimple_omp_for_combined_into_p (stmt))
6883     {
6884       omp_extract_for_data (stmt, &fd, NULL);
6885       fdp = &fd;
6886 
6887       /* We need two temporaries with fd.loop.v type (istart/iend)
6888 	 and then (fd.collapse - 1) temporaries with the same
6889 	 type for count2 ... countN-1 vars if not constant.  */
6890       size_t count = 2;
6891       tree type = fd.iter_type;
6892       if (fd.collapse > 1
6893 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
6894 	count += fd.collapse - 1;
6895       bool taskreg_for
6896 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
6897 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
6898       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
6899       tree simtc = NULL;
6900       tree clauses = *pc;
6901       if (taskreg_for)
6902 	outerc
6903 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
6904 			     OMP_CLAUSE__LOOPTEMP_);
6905       if (ctx->simt_stmt)
6906 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
6907 				 OMP_CLAUSE__LOOPTEMP_);
6908       for (i = 0; i < count; i++)
6909 	{
6910 	  tree temp;
6911 	  if (taskreg_for)
6912 	    {
6913 	      gcc_assert (outerc);
6914 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
6915 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
6916 					OMP_CLAUSE__LOOPTEMP_);
6917 	    }
6918 	  else
6919 	    {
6920 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
6921 		 clause, another without, make sure they have the same
6922 		 decls in _looptemp_ clauses, because the outer stmt
6923 		 they are combined into will look up just one inner_stmt.  */
6924 	      if (ctx->simt_stmt)
6925 		temp = OMP_CLAUSE_DECL (simtc);
6926 	      else
6927 		temp = create_tmp_var (type);
6928 	      insert_decl_map (&ctx->outer->cb, temp, temp);
6929 	    }
6930 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
6931 	  OMP_CLAUSE_DECL (*pc) = temp;
6932 	  pc = &OMP_CLAUSE_CHAIN (*pc);
6933 	  if (ctx->simt_stmt)
6934 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
6935 				     OMP_CLAUSE__LOOPTEMP_);
6936 	}
6937       *pc = clauses;
6938     }
6939 
6940   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
6941   dlist = NULL;
6942   body = NULL;
6943   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
6944 			   fdp);
6945   gimple_seq_add_seq (&body, gimple_omp_for_pre_body (stmt));
6946 
6947   lower_omp (gimple_omp_body_ptr (stmt), ctx);
6948 
6949   /* Lower the header expressions.  At this point, we can assume that
6950      the header is of the form:
6951 
6952      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
6953 
6954      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
6955      using the .omp_data_s mapping, if needed.  */
6956   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
6957     {
6958       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
6959       if (!is_gimple_min_invariant (*rhs_p))
6960 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6961       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6962 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6963 
6964       rhs_p = gimple_omp_for_final_ptr (stmt, i);
6965       if (!is_gimple_min_invariant (*rhs_p))
6966 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6967       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
6968 	recompute_tree_invariant_for_addr_expr (*rhs_p);
6969 
6970       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
6971       if (!is_gimple_min_invariant (*rhs_p))
6972 	*rhs_p = get_formal_tmp_var (*rhs_p, &body);
6973     }
6974 
6975   /* Once lowered, extract the bounds and clauses.  */
6976   omp_extract_for_data (stmt, &fd, NULL);
6977 
6978   if (is_gimple_omp_oacc (ctx->stmt)
6979       && !ctx_in_oacc_kernels_region (ctx))
6980     lower_oacc_head_tail (gimple_location (stmt),
6981 			  gimple_omp_for_clauses (stmt),
6982 			  &oacc_head, &oacc_tail, ctx);
6983 
6984   /* Add OpenACC partitioning and reduction markers just before the loop.  */
6985   if (oacc_head)
6986     gimple_seq_add_seq (&body, oacc_head);
6987 
6988   lower_omp_for_lastprivate (&fd, &body, &dlist, ctx);
6989 
6990   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
6991     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
6992       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6993 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6994 	{
6995 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6996 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
6997 	    OMP_CLAUSE_LINEAR_STEP (c)
6998 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
6999 						ctx);
7000 	}
7001 
7002   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
7003 		     && gimple_omp_for_grid_phony (stmt));
7004   if (!phony_loop)
7005     gimple_seq_add_stmt (&body, stmt);
7006   gimple_seq_add_seq (&body, gimple_omp_body (stmt));
7007 
7008   if (!phony_loop)
7009     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
7010 							   fd.loop.v));
7011 
7012   /* After the loop, add exit clauses.  */
7013   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, ctx);
7014 
7015   if (ctx->cancellable)
7016     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
7017 
7018   gimple_seq_add_seq (&body, dlist);
7019 
7020   body = maybe_catch_exception (body);
7021 
7022   if (!phony_loop)
7023     {
7024       /* Region exit marker goes at the end of the loop body.  */
7025       gimple_seq_add_stmt (&body, gimple_build_omp_return (fd.have_nowait));
7026       maybe_add_implicit_barrier_cancel (ctx, &body);
7027     }
7028 
7029   /* Add OpenACC joining and reduction markers just after the loop.  */
7030   if (oacc_tail)
7031     gimple_seq_add_seq (&body, oacc_tail);
7032 
7033   pop_gimplify_context (new_stmt);
7034 
7035   gimple_bind_append_vars (new_stmt, ctx->block_vars);
7036   maybe_remove_omp_member_access_dummy_vars (new_stmt);
7037   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
7038   if (BLOCK_VARS (block))
7039     TREE_USED (block) = 1;
7040 
7041   gimple_bind_set_body (new_stmt, body);
7042   gimple_omp_set_body (stmt, NULL);
7043   gimple_omp_for_set_pre_body (stmt, NULL);
7044 }
7045 
7046 /* Callback for walk_stmts.  Check if the current statement only contains
7047    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
7048 
7049 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)7050 check_combined_parallel (gimple_stmt_iterator *gsi_p,
7051     			 bool *handled_ops_p,
7052     			 struct walk_stmt_info *wi)
7053 {
7054   int *info = (int *) wi->info;
7055   gimple *stmt = gsi_stmt (*gsi_p);
7056 
7057   *handled_ops_p = true;
7058   switch (gimple_code (stmt))
7059     {
7060     WALK_SUBSTMTS;
7061 
7062     case GIMPLE_DEBUG:
7063       break;
7064     case GIMPLE_OMP_FOR:
7065     case GIMPLE_OMP_SECTIONS:
7066       *info = *info == 0 ? 1 : -1;
7067       break;
7068     default:
7069       *info = -1;
7070       break;
7071     }
7072   return NULL;
7073 }
7074 
7075 struct omp_taskcopy_context
7076 {
7077   /* This field must be at the beginning, as we do "inheritance": Some
7078      callback functions for tree-inline.c (e.g., omp_copy_decl)
7079      receive a copy_body_data pointer that is up-casted to an
7080      omp_context pointer.  */
7081   copy_body_data cb;
7082   omp_context *ctx;
7083 };
7084 
7085 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)7086 task_copyfn_copy_decl (tree var, copy_body_data *cb)
7087 {
7088   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
7089 
7090   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
7091     return create_tmp_var (TREE_TYPE (var));
7092 
7093   return var;
7094 }
7095 
7096 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)7097 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
7098 {
7099   tree name, new_fields = NULL, type, f;
7100 
7101   type = lang_hooks.types.make_type (RECORD_TYPE);
7102   name = DECL_NAME (TYPE_NAME (orig_type));
7103   name = build_decl (gimple_location (tcctx->ctx->stmt),
7104 		     TYPE_DECL, name, type);
7105   TYPE_NAME (type) = name;
7106 
7107   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
7108     {
7109       tree new_f = copy_node (f);
7110       DECL_CONTEXT (new_f) = type;
7111       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
7112       TREE_CHAIN (new_f) = new_fields;
7113       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7114       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
7115       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
7116 		 &tcctx->cb, NULL);
7117       new_fields = new_f;
7118       tcctx->cb.decl_map->put (f, new_f);
7119     }
7120   TYPE_FIELDS (type) = nreverse (new_fields);
7121   layout_type (type);
7122   return type;
7123 }
7124 
7125 /* Create task copyfn.  */
7126 
7127 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)7128 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
7129 {
7130   struct function *child_cfun;
7131   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
7132   tree record_type, srecord_type, bind, list;
7133   bool record_needs_remap = false, srecord_needs_remap = false;
7134   splay_tree_node n;
7135   struct omp_taskcopy_context tcctx;
7136   location_t loc = gimple_location (task_stmt);
7137   size_t looptempno = 0;
7138 
7139   child_fn = gimple_omp_task_copy_fn (task_stmt);
7140   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
7141   gcc_assert (child_cfun->cfg == NULL);
7142   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
7143 
7144   /* Reset DECL_CONTEXT on function arguments.  */
7145   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
7146     DECL_CONTEXT (t) = child_fn;
7147 
7148   /* Populate the function.  */
7149   push_gimplify_context ();
7150   push_cfun (child_cfun);
7151 
7152   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
7153   TREE_SIDE_EFFECTS (bind) = 1;
7154   list = NULL;
7155   DECL_SAVED_TREE (child_fn) = bind;
7156   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
7157 
7158   /* Remap src and dst argument types if needed.  */
7159   record_type = ctx->record_type;
7160   srecord_type = ctx->srecord_type;
7161   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7162     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7163       {
7164 	record_needs_remap = true;
7165 	break;
7166       }
7167   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
7168     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
7169       {
7170 	srecord_needs_remap = true;
7171 	break;
7172       }
7173 
7174   if (record_needs_remap || srecord_needs_remap)
7175     {
7176       memset (&tcctx, '\0', sizeof (tcctx));
7177       tcctx.cb.src_fn = ctx->cb.src_fn;
7178       tcctx.cb.dst_fn = child_fn;
7179       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
7180       gcc_checking_assert (tcctx.cb.src_node);
7181       tcctx.cb.dst_node = tcctx.cb.src_node;
7182       tcctx.cb.src_cfun = ctx->cb.src_cfun;
7183       tcctx.cb.copy_decl = task_copyfn_copy_decl;
7184       tcctx.cb.eh_lp_nr = 0;
7185       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
7186       tcctx.cb.decl_map = new hash_map<tree, tree>;
7187       tcctx.ctx = ctx;
7188 
7189       if (record_needs_remap)
7190 	record_type = task_copyfn_remap_type (&tcctx, record_type);
7191       if (srecord_needs_remap)
7192 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
7193     }
7194   else
7195     tcctx.cb.decl_map = NULL;
7196 
7197   arg = DECL_ARGUMENTS (child_fn);
7198   TREE_TYPE (arg) = build_pointer_type (record_type);
7199   sarg = DECL_CHAIN (arg);
7200   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
7201 
7202   /* First pass: initialize temporaries used in record_type and srecord_type
7203      sizes and field offsets.  */
7204   if (tcctx.cb.decl_map)
7205     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7206       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7207 	{
7208 	  tree *p;
7209 
7210 	  decl = OMP_CLAUSE_DECL (c);
7211 	  p = tcctx.cb.decl_map->get (decl);
7212 	  if (p == NULL)
7213 	    continue;
7214 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7215 	  sf = (tree) n->value;
7216 	  sf = *tcctx.cb.decl_map->get (sf);
7217 	  src = build_simple_mem_ref_loc (loc, sarg);
7218 	  src = omp_build_component_ref (src, sf);
7219 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
7220 	  append_to_statement_list (t, &list);
7221 	}
7222 
7223   /* Second pass: copy shared var pointers and copy construct non-VLA
7224      firstprivate vars.  */
7225   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7226     switch (OMP_CLAUSE_CODE (c))
7227       {
7228 	splay_tree_key key;
7229       case OMP_CLAUSE_SHARED:
7230 	decl = OMP_CLAUSE_DECL (c);
7231 	key = (splay_tree_key) decl;
7232 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7233 	  key = (splay_tree_key) &DECL_UID (decl);
7234 	n = splay_tree_lookup (ctx->field_map, key);
7235 	if (n == NULL)
7236 	  break;
7237 	f = (tree) n->value;
7238 	if (tcctx.cb.decl_map)
7239 	  f = *tcctx.cb.decl_map->get (f);
7240 	n = splay_tree_lookup (ctx->sfield_map, key);
7241 	sf = (tree) n->value;
7242 	if (tcctx.cb.decl_map)
7243 	  sf = *tcctx.cb.decl_map->get (sf);
7244 	src = build_simple_mem_ref_loc (loc, sarg);
7245 	src = omp_build_component_ref (src, sf);
7246 	dst = build_simple_mem_ref_loc (loc, arg);
7247 	dst = omp_build_component_ref (dst, f);
7248 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7249 	append_to_statement_list (t, &list);
7250 	break;
7251       case OMP_CLAUSE__LOOPTEMP_:
7252 	/* Fields for first two _looptemp_ clauses are initialized by
7253 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
7254         if (looptempno < 2)
7255 	  {
7256 	    looptempno++;
7257 	    break;
7258 	  }
7259 	/* FALLTHRU */
7260       case OMP_CLAUSE_FIRSTPRIVATE:
7261 	decl = OMP_CLAUSE_DECL (c);
7262 	if (is_variable_sized (decl))
7263 	  break;
7264 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7265 	if (n == NULL)
7266 	  break;
7267 	f = (tree) n->value;
7268 	if (tcctx.cb.decl_map)
7269 	  f = *tcctx.cb.decl_map->get (f);
7270 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7271 	if (n != NULL)
7272 	  {
7273 	    sf = (tree) n->value;
7274 	    if (tcctx.cb.decl_map)
7275 	      sf = *tcctx.cb.decl_map->get (sf);
7276 	    src = build_simple_mem_ref_loc (loc, sarg);
7277 	    src = omp_build_component_ref (src, sf);
7278 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
7279 	      src = build_simple_mem_ref_loc (loc, src);
7280 	  }
7281 	else
7282 	  src = decl;
7283 	dst = build_simple_mem_ref_loc (loc, arg);
7284 	dst = omp_build_component_ref (dst, f);
7285 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__LOOPTEMP_)
7286 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7287 	else
7288 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7289 	append_to_statement_list (t, &list);
7290 	break;
7291       case OMP_CLAUSE_PRIVATE:
7292 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7293 	  break;
7294 	decl = OMP_CLAUSE_DECL (c);
7295 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7296 	f = (tree) n->value;
7297 	if (tcctx.cb.decl_map)
7298 	  f = *tcctx.cb.decl_map->get (f);
7299 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
7300 	if (n != NULL)
7301 	  {
7302 	    sf = (tree) n->value;
7303 	    if (tcctx.cb.decl_map)
7304 	      sf = *tcctx.cb.decl_map->get (sf);
7305 	    src = build_simple_mem_ref_loc (loc, sarg);
7306 	    src = omp_build_component_ref (src, sf);
7307 	    if (use_pointer_for_field (decl, NULL))
7308 	      src = build_simple_mem_ref_loc (loc, src);
7309 	  }
7310 	else
7311 	  src = decl;
7312 	dst = build_simple_mem_ref_loc (loc, arg);
7313 	dst = omp_build_component_ref (dst, f);
7314 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
7315 	append_to_statement_list (t, &list);
7316 	break;
7317       default:
7318 	break;
7319       }
7320 
7321   /* Last pass: handle VLA firstprivates.  */
7322   if (tcctx.cb.decl_map)
7323     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7324       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7325 	{
7326 	  tree ind, ptr, df;
7327 
7328 	  decl = OMP_CLAUSE_DECL (c);
7329 	  if (!is_variable_sized (decl))
7330 	    continue;
7331 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
7332 	  if (n == NULL)
7333 	    continue;
7334 	  f = (tree) n->value;
7335 	  f = *tcctx.cb.decl_map->get (f);
7336 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
7337 	  ind = DECL_VALUE_EXPR (decl);
7338 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
7339 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
7340 	  n = splay_tree_lookup (ctx->sfield_map,
7341 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7342 	  sf = (tree) n->value;
7343 	  sf = *tcctx.cb.decl_map->get (sf);
7344 	  src = build_simple_mem_ref_loc (loc, sarg);
7345 	  src = omp_build_component_ref (src, sf);
7346 	  src = build_simple_mem_ref_loc (loc, src);
7347 	  dst = build_simple_mem_ref_loc (loc, arg);
7348 	  dst = omp_build_component_ref (dst, f);
7349 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
7350 	  append_to_statement_list (t, &list);
7351 	  n = splay_tree_lookup (ctx->field_map,
7352 				 (splay_tree_key) TREE_OPERAND (ind, 0));
7353 	  df = (tree) n->value;
7354 	  df = *tcctx.cb.decl_map->get (df);
7355 	  ptr = build_simple_mem_ref_loc (loc, arg);
7356 	  ptr = omp_build_component_ref (ptr, df);
7357 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
7358 		      build_fold_addr_expr_loc (loc, dst));
7359 	  append_to_statement_list (t, &list);
7360 	}
7361 
7362   t = build1 (RETURN_EXPR, void_type_node, NULL);
7363   append_to_statement_list (t, &list);
7364 
7365   if (tcctx.cb.decl_map)
7366     delete tcctx.cb.decl_map;
7367   pop_gimplify_context (NULL);
7368   BIND_EXPR_BODY (bind) = list;
7369   pop_cfun ();
7370 }
7371 
7372 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)7373 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
7374 {
7375   tree c, clauses;
7376   gimple *g;
7377   size_t n_in = 0, n_out = 0, idx = 2, i;
7378 
7379   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
7380   gcc_assert (clauses);
7381   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7382     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7383       switch (OMP_CLAUSE_DEPEND_KIND (c))
7384 	{
7385 	case OMP_CLAUSE_DEPEND_IN:
7386 	  n_in++;
7387 	  break;
7388 	case OMP_CLAUSE_DEPEND_OUT:
7389 	case OMP_CLAUSE_DEPEND_INOUT:
7390 	  n_out++;
7391 	  break;
7392 	case OMP_CLAUSE_DEPEND_SOURCE:
7393 	case OMP_CLAUSE_DEPEND_SINK:
7394 	  /* FALLTHRU */
7395 	default:
7396 	  gcc_unreachable ();
7397 	}
7398   tree type = build_array_type_nelts (ptr_type_node, n_in + n_out + 2);
7399   tree array = create_tmp_var (type);
7400   TREE_ADDRESSABLE (array) = 1;
7401   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7402 		   NULL_TREE);
7403   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_in + n_out));
7404   gimple_seq_add_stmt (iseq, g);
7405   r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7406 	      NULL_TREE);
7407   g = gimple_build_assign (r, build_int_cst (ptr_type_node, n_out));
7408   gimple_seq_add_stmt (iseq, g);
7409   for (i = 0; i < 2; i++)
7410     {
7411       if ((i ? n_in : n_out) == 0)
7412 	continue;
7413       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7414 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
7415 	    && ((OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_IN) ^ i))
7416 	  {
7417 	    tree t = OMP_CLAUSE_DECL (c);
7418 	    t = fold_convert (ptr_type_node, t);
7419 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
7420 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
7421 			NULL_TREE, NULL_TREE);
7422 	    g = gimple_build_assign (r, t);
7423 	    gimple_seq_add_stmt (iseq, g);
7424 	  }
7425     }
7426   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
7427   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
7428   OMP_CLAUSE_CHAIN (c) = *pclauses;
7429   *pclauses = c;
7430   tree clobber = build_constructor (type, NULL);
7431   TREE_THIS_VOLATILE (clobber) = 1;
7432   g = gimple_build_assign (array, clobber);
7433   gimple_seq_add_stmt (oseq, g);
7434 }
7435 
7436 /* Lower the OpenMP parallel or task directive in the current statement
7437    in GSI_P.  CTX holds context information for the directive.  */
7438 
7439 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)7440 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7441 {
7442   tree clauses;
7443   tree child_fn, t;
7444   gimple *stmt = gsi_stmt (*gsi_p);
7445   gbind *par_bind, *bind, *dep_bind = NULL;
7446   gimple_seq par_body, olist, ilist, par_olist, par_rlist, par_ilist, new_body;
7447   location_t loc = gimple_location (stmt);
7448 
7449   clauses = gimple_omp_taskreg_clauses (stmt);
7450   par_bind
7451     = as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
7452   par_body = gimple_bind_body (par_bind);
7453   child_fn = ctx->cb.dst_fn;
7454   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7455       && !gimple_omp_parallel_combined_p (stmt))
7456     {
7457       struct walk_stmt_info wi;
7458       int ws_num = 0;
7459 
7460       memset (&wi, 0, sizeof (wi));
7461       wi.info = &ws_num;
7462       wi.val_only = true;
7463       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
7464       if (ws_num == 1)
7465 	gimple_omp_parallel_set_combined_p (stmt, true);
7466     }
7467   gimple_seq dep_ilist = NULL;
7468   gimple_seq dep_olist = NULL;
7469   if (gimple_code (stmt) == GIMPLE_OMP_TASK
7470       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7471     {
7472       push_gimplify_context ();
7473       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7474       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
7475 			    &dep_ilist, &dep_olist);
7476     }
7477 
7478   if (ctx->srecord_type)
7479     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
7480 
7481   push_gimplify_context ();
7482 
7483   par_olist = NULL;
7484   par_ilist = NULL;
7485   par_rlist = NULL;
7486   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
7487     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
7488   if (phony_construct && ctx->record_type)
7489     {
7490       gcc_checking_assert (!ctx->receiver_decl);
7491       ctx->receiver_decl = create_tmp_var
7492 	(build_reference_type (ctx->record_type), ".omp_rec");
7493     }
7494   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
7495   lower_omp (&par_body, ctx);
7496   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL)
7497     lower_reduction_clauses (clauses, &par_rlist, ctx);
7498 
7499   /* Declare all the variables created by mapping and the variables
7500      declared in the scope of the parallel body.  */
7501   record_vars_into (ctx->block_vars, child_fn);
7502   maybe_remove_omp_member_access_dummy_vars (par_bind);
7503   record_vars_into (gimple_bind_vars (par_bind), child_fn);
7504 
7505   if (ctx->record_type)
7506     {
7507       ctx->sender_decl
7508 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
7509 			  : ctx->record_type, ".omp_data_o");
7510       DECL_NAMELESS (ctx->sender_decl) = 1;
7511       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7512       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
7513     }
7514 
7515   olist = NULL;
7516   ilist = NULL;
7517   lower_send_clauses (clauses, &ilist, &olist, ctx);
7518   lower_send_shared_vars (&ilist, &olist, ctx);
7519 
7520   if (ctx->record_type)
7521     {
7522       tree clobber = build_constructor (TREE_TYPE (ctx->sender_decl), NULL);
7523       TREE_THIS_VOLATILE (clobber) = 1;
7524       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
7525 							clobber));
7526     }
7527 
7528   /* Once all the expansions are done, sequence all the different
7529      fragments inside gimple_omp_body.  */
7530 
7531   new_body = NULL;
7532 
7533   if (ctx->record_type)
7534     {
7535       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
7536       /* fixup_child_record_type might have changed receiver_decl's type.  */
7537       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
7538       gimple_seq_add_stmt (&new_body,
7539 	  		   gimple_build_assign (ctx->receiver_decl, t));
7540     }
7541 
7542   gimple_seq_add_seq (&new_body, par_ilist);
7543   gimple_seq_add_seq (&new_body, par_body);
7544   gimple_seq_add_seq (&new_body, par_rlist);
7545   if (ctx->cancellable)
7546     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7547   gimple_seq_add_seq (&new_body, par_olist);
7548   new_body = maybe_catch_exception (new_body);
7549   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
7550     gimple_seq_add_stmt (&new_body,
7551 			 gimple_build_omp_continue (integer_zero_node,
7552 						    integer_zero_node));
7553   if (!phony_construct)
7554     {
7555       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
7556       gimple_omp_set_body (stmt, new_body);
7557     }
7558 
7559   bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
7560   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
7561   gimple_bind_add_seq (bind, ilist);
7562   if (!phony_construct)
7563     gimple_bind_add_stmt (bind, stmt);
7564   else
7565     gimple_bind_add_seq (bind, new_body);
7566   gimple_bind_add_seq (bind, olist);
7567 
7568   pop_gimplify_context (NULL);
7569 
7570   if (dep_bind)
7571     {
7572       gimple_bind_add_seq (dep_bind, dep_ilist);
7573       gimple_bind_add_stmt (dep_bind, bind);
7574       gimple_bind_add_seq (dep_bind, dep_olist);
7575       pop_gimplify_context (dep_bind);
7576     }
7577 }
7578 
7579 /* Lower the GIMPLE_OMP_TARGET in the current statement
7580    in GSI_P.  CTX holds context information for the directive.  */
7581 
7582 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)7583 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7584 {
7585   tree clauses;
7586   tree child_fn, t, c;
7587   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
7588   gbind *tgt_bind, *bind, *dep_bind = NULL;
7589   gimple_seq tgt_body, olist, ilist, fplist, new_body;
7590   location_t loc = gimple_location (stmt);
7591   bool offloaded, data_region;
7592   unsigned int map_cnt = 0;
7593 
7594   offloaded = is_gimple_omp_offloaded (stmt);
7595   switch (gimple_omp_target_kind (stmt))
7596     {
7597     case GF_OMP_TARGET_KIND_REGION:
7598     case GF_OMP_TARGET_KIND_UPDATE:
7599     case GF_OMP_TARGET_KIND_ENTER_DATA:
7600     case GF_OMP_TARGET_KIND_EXIT_DATA:
7601     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
7602     case GF_OMP_TARGET_KIND_OACC_KERNELS:
7603     case GF_OMP_TARGET_KIND_OACC_UPDATE:
7604     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
7605     case GF_OMP_TARGET_KIND_OACC_DECLARE:
7606       data_region = false;
7607       break;
7608     case GF_OMP_TARGET_KIND_DATA:
7609     case GF_OMP_TARGET_KIND_OACC_DATA:
7610     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
7611       data_region = true;
7612       break;
7613     default:
7614       gcc_unreachable ();
7615     }
7616 
7617   clauses = gimple_omp_target_clauses (stmt);
7618 
7619   gimple_seq dep_ilist = NULL;
7620   gimple_seq dep_olist = NULL;
7621   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
7622     {
7623       push_gimplify_context ();
7624       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
7625       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
7626 			    &dep_ilist, &dep_olist);
7627     }
7628 
7629   tgt_bind = NULL;
7630   tgt_body = NULL;
7631   if (offloaded)
7632     {
7633       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
7634       tgt_body = gimple_bind_body (tgt_bind);
7635     }
7636   else if (data_region)
7637     tgt_body = gimple_omp_body (stmt);
7638   child_fn = ctx->cb.dst_fn;
7639 
7640   push_gimplify_context ();
7641   fplist = NULL;
7642 
7643   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7644     switch (OMP_CLAUSE_CODE (c))
7645       {
7646 	tree var, x;
7647 
7648       default:
7649 	break;
7650       case OMP_CLAUSE_MAP:
7651 #if CHECKING_P
7652 	/* First check what we're prepared to handle in the following.  */
7653 	switch (OMP_CLAUSE_MAP_KIND (c))
7654 	  {
7655 	  case GOMP_MAP_ALLOC:
7656 	  case GOMP_MAP_TO:
7657 	  case GOMP_MAP_FROM:
7658 	  case GOMP_MAP_TOFROM:
7659 	  case GOMP_MAP_POINTER:
7660 	  case GOMP_MAP_TO_PSET:
7661 	  case GOMP_MAP_DELETE:
7662 	  case GOMP_MAP_RELEASE:
7663 	  case GOMP_MAP_ALWAYS_TO:
7664 	  case GOMP_MAP_ALWAYS_FROM:
7665 	  case GOMP_MAP_ALWAYS_TOFROM:
7666 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
7667 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
7668 	  case GOMP_MAP_STRUCT:
7669 	  case GOMP_MAP_ALWAYS_POINTER:
7670 	    break;
7671 	  case GOMP_MAP_FORCE_ALLOC:
7672 	  case GOMP_MAP_FORCE_TO:
7673 	  case GOMP_MAP_FORCE_FROM:
7674 	  case GOMP_MAP_FORCE_TOFROM:
7675 	  case GOMP_MAP_FORCE_PRESENT:
7676 	  case GOMP_MAP_FORCE_DEVICEPTR:
7677 	  case GOMP_MAP_DEVICE_RESIDENT:
7678 	  case GOMP_MAP_LINK:
7679 	    gcc_assert (is_gimple_omp_oacc (stmt));
7680 	    break;
7681 	  default:
7682 	    gcc_unreachable ();
7683 	  }
7684 #endif
7685 	  /* FALLTHRU */
7686       case OMP_CLAUSE_TO:
7687       case OMP_CLAUSE_FROM:
7688       oacc_firstprivate:
7689 	var = OMP_CLAUSE_DECL (c);
7690 	if (!DECL_P (var))
7691 	  {
7692 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
7693 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7694 		    && (OMP_CLAUSE_MAP_KIND (c)
7695 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
7696 	      map_cnt++;
7697 	    continue;
7698 	  }
7699 
7700 	if (DECL_SIZE (var)
7701 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
7702 	  {
7703 	    tree var2 = DECL_VALUE_EXPR (var);
7704 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
7705 	    var2 = TREE_OPERAND (var2, 0);
7706 	    gcc_assert (DECL_P (var2));
7707 	    var = var2;
7708 	  }
7709 
7710 	if (offloaded
7711 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7712 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7713 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7714 	  {
7715 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7716 	      {
7717 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
7718 		    && varpool_node::get_create (var)->offloadable)
7719 		  continue;
7720 
7721 		tree type = build_pointer_type (TREE_TYPE (var));
7722 		tree new_var = lookup_decl (var, ctx);
7723 		x = create_tmp_var_raw (type, get_name (new_var));
7724 		gimple_add_tmp_var (x);
7725 		x = build_simple_mem_ref (x);
7726 		SET_DECL_VALUE_EXPR (new_var, x);
7727 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7728 	      }
7729 	    continue;
7730 	  }
7731 
7732 	if (!maybe_lookup_field (var, ctx))
7733 	  continue;
7734 
7735 	/* Don't remap oacc parallel reduction variables, because the
7736 	   intermediate result must be local to each gang.  */
7737 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7738 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
7739 	  {
7740 	    x = build_receiver_ref (var, true, ctx);
7741 	    tree new_var = lookup_decl (var, ctx);
7742 
7743 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7744 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7745 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7746 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7747 	      x = build_simple_mem_ref (x);
7748 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7749 	      {
7750 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7751 		if (omp_is_reference (new_var))
7752 		  {
7753 		    /* Create a local object to hold the instance
7754 		       value.  */
7755 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
7756 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
7757 		    tree inst = create_tmp_var (type, id);
7758 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
7759 		    x = build_fold_addr_expr (inst);
7760 		  }
7761 		gimplify_assign (new_var, x, &fplist);
7762 	      }
7763 	    else if (DECL_P (new_var))
7764 	      {
7765 		SET_DECL_VALUE_EXPR (new_var, x);
7766 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7767 	      }
7768 	    else
7769 	      gcc_unreachable ();
7770 	  }
7771 	map_cnt++;
7772 	break;
7773 
7774       case OMP_CLAUSE_FIRSTPRIVATE:
7775 	if (is_oacc_parallel (ctx))
7776 	  goto oacc_firstprivate;
7777 	map_cnt++;
7778 	var = OMP_CLAUSE_DECL (c);
7779 	if (!omp_is_reference (var)
7780 	    && !is_gimple_reg_type (TREE_TYPE (var)))
7781 	  {
7782 	    tree new_var = lookup_decl (var, ctx);
7783 	    if (is_variable_sized (var))
7784 	      {
7785 		tree pvar = DECL_VALUE_EXPR (var);
7786 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7787 		pvar = TREE_OPERAND (pvar, 0);
7788 		gcc_assert (DECL_P (pvar));
7789 		tree new_pvar = lookup_decl (pvar, ctx);
7790 		x = build_fold_indirect_ref (new_pvar);
7791 		TREE_THIS_NOTRAP (x) = 1;
7792 	      }
7793 	    else
7794 	      x = build_receiver_ref (var, true, ctx);
7795 	    SET_DECL_VALUE_EXPR (new_var, x);
7796 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7797 	  }
7798 	break;
7799 
7800       case OMP_CLAUSE_PRIVATE:
7801 	if (is_gimple_omp_oacc (ctx->stmt))
7802 	  break;
7803 	var = OMP_CLAUSE_DECL (c);
7804 	if (is_variable_sized (var))
7805 	  {
7806 	    tree new_var = lookup_decl (var, ctx);
7807 	    tree pvar = DECL_VALUE_EXPR (var);
7808 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7809 	    pvar = TREE_OPERAND (pvar, 0);
7810 	    gcc_assert (DECL_P (pvar));
7811 	    tree new_pvar = lookup_decl (pvar, ctx);
7812 	    x = build_fold_indirect_ref (new_pvar);
7813 	    TREE_THIS_NOTRAP (x) = 1;
7814 	    SET_DECL_VALUE_EXPR (new_var, x);
7815 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7816 	  }
7817 	break;
7818 
7819       case OMP_CLAUSE_USE_DEVICE_PTR:
7820       case OMP_CLAUSE_IS_DEVICE_PTR:
7821 	var = OMP_CLAUSE_DECL (c);
7822 	map_cnt++;
7823 	if (is_variable_sized (var))
7824 	  {
7825 	    tree new_var = lookup_decl (var, ctx);
7826 	    tree pvar = DECL_VALUE_EXPR (var);
7827 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
7828 	    pvar = TREE_OPERAND (pvar, 0);
7829 	    gcc_assert (DECL_P (pvar));
7830 	    tree new_pvar = lookup_decl (pvar, ctx);
7831 	    x = build_fold_indirect_ref (new_pvar);
7832 	    TREE_THIS_NOTRAP (x) = 1;
7833 	    SET_DECL_VALUE_EXPR (new_var, x);
7834 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7835 	  }
7836 	else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
7837 	  {
7838 	    tree new_var = lookup_decl (var, ctx);
7839 	    tree type = build_pointer_type (TREE_TYPE (var));
7840 	    x = create_tmp_var_raw (type, get_name (new_var));
7841 	    gimple_add_tmp_var (x);
7842 	    x = build_simple_mem_ref (x);
7843 	    SET_DECL_VALUE_EXPR (new_var, x);
7844 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7845 	  }
7846 	else
7847 	  {
7848 	    tree new_var = lookup_decl (var, ctx);
7849 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
7850 	    gimple_add_tmp_var (x);
7851 	    SET_DECL_VALUE_EXPR (new_var, x);
7852 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
7853 	  }
7854 	break;
7855       }
7856 
7857   if (offloaded)
7858     {
7859       target_nesting_level++;
7860       lower_omp (&tgt_body, ctx);
7861       target_nesting_level--;
7862     }
7863   else if (data_region)
7864     lower_omp (&tgt_body, ctx);
7865 
7866   if (offloaded)
7867     {
7868       /* Declare all the variables created by mapping and the variables
7869 	 declared in the scope of the target body.  */
7870       record_vars_into (ctx->block_vars, child_fn);
7871       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
7872       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
7873     }
7874 
7875   olist = NULL;
7876   ilist = NULL;
7877   if (ctx->record_type)
7878     {
7879       ctx->sender_decl
7880 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
7881       DECL_NAMELESS (ctx->sender_decl) = 1;
7882       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
7883       t = make_tree_vec (3);
7884       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
7885       TREE_VEC_ELT (t, 1)
7886 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
7887 			  ".omp_data_sizes");
7888       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
7889       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
7890       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
7891       tree tkind_type = short_unsigned_type_node;
7892       int talign_shift = 8;
7893       TREE_VEC_ELT (t, 2)
7894 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
7895 			  ".omp_data_kinds");
7896       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
7897       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
7898       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
7899       gimple_omp_target_set_data_arg (stmt, t);
7900 
7901       vec<constructor_elt, va_gc> *vsize;
7902       vec<constructor_elt, va_gc> *vkind;
7903       vec_alloc (vsize, map_cnt);
7904       vec_alloc (vkind, map_cnt);
7905       unsigned int map_idx = 0;
7906 
7907       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7908 	switch (OMP_CLAUSE_CODE (c))
7909 	  {
7910 	    tree ovar, nc, s, purpose, var, x, type;
7911 	    unsigned int talign;
7912 
7913 	  default:
7914 	    break;
7915 
7916 	  case OMP_CLAUSE_MAP:
7917 	  case OMP_CLAUSE_TO:
7918 	  case OMP_CLAUSE_FROM:
7919 	  oacc_firstprivate_map:
7920 	    nc = c;
7921 	    ovar = OMP_CLAUSE_DECL (c);
7922 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7923 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7924 		    || (OMP_CLAUSE_MAP_KIND (c)
7925 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
7926 	      break;
7927 	    if (!DECL_P (ovar))
7928 	      {
7929 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7930 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
7931 		  {
7932 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
7933 					 == get_base_address (ovar));
7934 		    nc = OMP_CLAUSE_CHAIN (c);
7935 		    ovar = OMP_CLAUSE_DECL (nc);
7936 		  }
7937 		else
7938 		  {
7939 		    tree x = build_sender_ref (ovar, ctx);
7940 		    tree v
7941 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
7942 		    gimplify_assign (x, v, &ilist);
7943 		    nc = NULL_TREE;
7944 		  }
7945 	      }
7946 	    else
7947 	      {
7948 		if (DECL_SIZE (ovar)
7949 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
7950 		  {
7951 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
7952 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
7953 		    ovar2 = TREE_OPERAND (ovar2, 0);
7954 		    gcc_assert (DECL_P (ovar2));
7955 		    ovar = ovar2;
7956 		  }
7957 		if (!maybe_lookup_field (ovar, ctx))
7958 		  continue;
7959 	      }
7960 
7961 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
7962 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
7963 	      talign = DECL_ALIGN_UNIT (ovar);
7964 	    if (nc)
7965 	      {
7966 		var = lookup_decl_in_outer_ctx (ovar, ctx);
7967 		x = build_sender_ref (ovar, ctx);
7968 
7969 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
7970 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
7971 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
7972 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
7973 		  {
7974 		    gcc_assert (offloaded);
7975 		    tree avar
7976 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
7977 		    mark_addressable (avar);
7978 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
7979 		    talign = DECL_ALIGN_UNIT (avar);
7980 		    avar = build_fold_addr_expr (avar);
7981 		    gimplify_assign (x, avar, &ilist);
7982 		  }
7983 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
7984 		  {
7985 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
7986 		    if (!omp_is_reference (var))
7987 		      {
7988 			if (is_gimple_reg (var)
7989 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
7990 			  TREE_NO_WARNING (var) = 1;
7991 			var = build_fold_addr_expr (var);
7992 		      }
7993 		    else
7994 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
7995 		    gimplify_assign (x, var, &ilist);
7996 		  }
7997 		else if (is_gimple_reg (var))
7998 		  {
7999 		    gcc_assert (offloaded);
8000 		    tree avar = create_tmp_var (TREE_TYPE (var));
8001 		    mark_addressable (avar);
8002 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
8003 		    if (GOMP_MAP_COPY_TO_P (map_kind)
8004 			|| map_kind == GOMP_MAP_POINTER
8005 			|| map_kind == GOMP_MAP_TO_PSET
8006 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8007 		      {
8008 			/* If we need to initialize a temporary
8009 			   with VAR because it is not addressable, and
8010 			   the variable hasn't been initialized yet, then
8011 			   we'll get a warning for the store to avar.
8012 			   Don't warn in that case, the mapping might
8013 			   be implicit.  */
8014 			TREE_NO_WARNING (var) = 1;
8015 			gimplify_assign (avar, var, &ilist);
8016 		      }
8017 		    avar = build_fold_addr_expr (avar);
8018 		    gimplify_assign (x, avar, &ilist);
8019 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
8020 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
8021 			&& !TYPE_READONLY (TREE_TYPE (var)))
8022 		      {
8023 			x = unshare_expr (x);
8024 			x = build_simple_mem_ref (x);
8025 			gimplify_assign (var, x, &olist);
8026 		      }
8027 		  }
8028 		else
8029 		  {
8030 		    var = build_fold_addr_expr (var);
8031 		    gimplify_assign (x, var, &ilist);
8032 		  }
8033 	      }
8034 	    s = NULL_TREE;
8035 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
8036 	      {
8037 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8038 		s = TREE_TYPE (ovar);
8039 		if (TREE_CODE (s) == REFERENCE_TYPE)
8040 		  s = TREE_TYPE (s);
8041 		s = TYPE_SIZE_UNIT (s);
8042 	      }
8043 	    else
8044 	      s = OMP_CLAUSE_SIZE (c);
8045 	    if (s == NULL_TREE)
8046 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8047 	    s = fold_convert (size_type_node, s);
8048 	    purpose = size_int (map_idx++);
8049 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8050 	    if (TREE_CODE (s) != INTEGER_CST)
8051 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8052 
8053 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
8054 	    switch (OMP_CLAUSE_CODE (c))
8055 	      {
8056 	      case OMP_CLAUSE_MAP:
8057 		tkind = OMP_CLAUSE_MAP_KIND (c);
8058 		tkind_zero = tkind;
8059 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
8060 		  switch (tkind)
8061 		    {
8062 		    case GOMP_MAP_ALLOC:
8063 		    case GOMP_MAP_TO:
8064 		    case GOMP_MAP_FROM:
8065 		    case GOMP_MAP_TOFROM:
8066 		    case GOMP_MAP_ALWAYS_TO:
8067 		    case GOMP_MAP_ALWAYS_FROM:
8068 		    case GOMP_MAP_ALWAYS_TOFROM:
8069 		    case GOMP_MAP_RELEASE:
8070 		    case GOMP_MAP_FORCE_TO:
8071 		    case GOMP_MAP_FORCE_FROM:
8072 		    case GOMP_MAP_FORCE_TOFROM:
8073 		    case GOMP_MAP_FORCE_PRESENT:
8074 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
8075 		      break;
8076 		    case GOMP_MAP_DELETE:
8077 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
8078 		    default:
8079 		      break;
8080 		    }
8081 		if (tkind_zero != tkind)
8082 		  {
8083 		    if (integer_zerop (s))
8084 		      tkind = tkind_zero;
8085 		    else if (integer_nonzerop (s))
8086 		      tkind_zero = tkind;
8087 		  }
8088 		break;
8089 	      case OMP_CLAUSE_FIRSTPRIVATE:
8090 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
8091 		tkind = GOMP_MAP_TO;
8092 		tkind_zero = tkind;
8093 		break;
8094 	      case OMP_CLAUSE_TO:
8095 		tkind = GOMP_MAP_TO;
8096 		tkind_zero = tkind;
8097 		break;
8098 	      case OMP_CLAUSE_FROM:
8099 		tkind = GOMP_MAP_FROM;
8100 		tkind_zero = tkind;
8101 		break;
8102 	      default:
8103 		gcc_unreachable ();
8104 	      }
8105 	    gcc_checking_assert (tkind
8106 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8107 	    gcc_checking_assert (tkind_zero
8108 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8109 	    talign = ceil_log2 (talign);
8110 	    tkind |= talign << talign_shift;
8111 	    tkind_zero |= talign << talign_shift;
8112 	    gcc_checking_assert (tkind
8113 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8114 	    gcc_checking_assert (tkind_zero
8115 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8116 	    if (tkind == tkind_zero)
8117 	      x = build_int_cstu (tkind_type, tkind);
8118 	    else
8119 	      {
8120 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
8121 		x = build3 (COND_EXPR, tkind_type,
8122 			    fold_build2 (EQ_EXPR, boolean_type_node,
8123 					 unshare_expr (s), size_zero_node),
8124 			    build_int_cstu (tkind_type, tkind_zero),
8125 			    build_int_cstu (tkind_type, tkind));
8126 	      }
8127 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
8128 	    if (nc && nc != c)
8129 	      c = nc;
8130 	    break;
8131 
8132 	  case OMP_CLAUSE_FIRSTPRIVATE:
8133 	    if (is_oacc_parallel (ctx))
8134 	      goto oacc_firstprivate_map;
8135 	    ovar = OMP_CLAUSE_DECL (c);
8136 	    if (omp_is_reference (ovar))
8137 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8138 	    else
8139 	      talign = DECL_ALIGN_UNIT (ovar);
8140 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8141 	    x = build_sender_ref (ovar, ctx);
8142 	    tkind = GOMP_MAP_FIRSTPRIVATE;
8143 	    type = TREE_TYPE (ovar);
8144 	    if (omp_is_reference (ovar))
8145 	      type = TREE_TYPE (type);
8146 	    if ((INTEGRAL_TYPE_P (type)
8147 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
8148 		|| TREE_CODE (type) == POINTER_TYPE)
8149 	      {
8150 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8151 		tree t = var;
8152 		if (omp_is_reference (var))
8153 		  t = build_simple_mem_ref (var);
8154 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8155 		  TREE_NO_WARNING (var) = 1;
8156 		if (TREE_CODE (type) != POINTER_TYPE)
8157 		  t = fold_convert (pointer_sized_int_node, t);
8158 		t = fold_convert (TREE_TYPE (x), t);
8159 		gimplify_assign (x, t, &ilist);
8160 	      }
8161 	    else if (omp_is_reference (var))
8162 	      gimplify_assign (x, var, &ilist);
8163 	    else if (is_gimple_reg (var))
8164 	      {
8165 		tree avar = create_tmp_var (TREE_TYPE (var));
8166 		mark_addressable (avar);
8167 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
8168 		  TREE_NO_WARNING (var) = 1;
8169 		gimplify_assign (avar, var, &ilist);
8170 		avar = build_fold_addr_expr (avar);
8171 		gimplify_assign (x, avar, &ilist);
8172 	      }
8173 	    else
8174 	      {
8175 		var = build_fold_addr_expr (var);
8176 		gimplify_assign (x, var, &ilist);
8177 	      }
8178 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
8179 	      s = size_int (0);
8180 	    else if (omp_is_reference (ovar))
8181 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
8182 	    else
8183 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
8184 	    s = fold_convert (size_type_node, s);
8185 	    purpose = size_int (map_idx++);
8186 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8187 	    if (TREE_CODE (s) != INTEGER_CST)
8188 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
8189 
8190 	    gcc_checking_assert (tkind
8191 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8192 	    talign = ceil_log2 (talign);
8193 	    tkind |= talign << talign_shift;
8194 	    gcc_checking_assert (tkind
8195 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8196 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8197 				    build_int_cstu (tkind_type, tkind));
8198 	    break;
8199 
8200 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8201 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8202 	    ovar = OMP_CLAUSE_DECL (c);
8203 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
8204 	    x = build_sender_ref (ovar, ctx);
8205 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8206 	      tkind = GOMP_MAP_USE_DEVICE_PTR;
8207 	    else
8208 	      tkind = GOMP_MAP_FIRSTPRIVATE_INT;
8209 	    type = TREE_TYPE (ovar);
8210 	    if (TREE_CODE (type) == ARRAY_TYPE)
8211 	      var = build_fold_addr_expr (var);
8212 	    else
8213 	      {
8214 		if (omp_is_reference (ovar))
8215 		  {
8216 		    type = TREE_TYPE (type);
8217 		    if (TREE_CODE (type) != ARRAY_TYPE)
8218 		      var = build_simple_mem_ref (var);
8219 		    var = fold_convert (TREE_TYPE (x), var);
8220 		  }
8221 	      }
8222 	    gimplify_assign (x, var, &ilist);
8223 	    s = size_int (0);
8224 	    purpose = size_int (map_idx++);
8225 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
8226 	    gcc_checking_assert (tkind
8227 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
8228 	    gcc_checking_assert (tkind
8229 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
8230 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
8231 				    build_int_cstu (tkind_type, tkind));
8232 	    break;
8233 	  }
8234 
8235       gcc_assert (map_idx == map_cnt);
8236 
8237       DECL_INITIAL (TREE_VEC_ELT (t, 1))
8238 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
8239       DECL_INITIAL (TREE_VEC_ELT (t, 2))
8240 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
8241       for (int i = 1; i <= 2; i++)
8242 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
8243 	  {
8244 	    gimple_seq initlist = NULL;
8245 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
8246 					  TREE_VEC_ELT (t, i)),
8247 				  &initlist, true, NULL_TREE);
8248 	    gimple_seq_add_seq (&ilist, initlist);
8249 
8250 	    tree clobber = build_constructor (TREE_TYPE (TREE_VEC_ELT (t, i)),
8251 					      NULL);
8252 	    TREE_THIS_VOLATILE (clobber) = 1;
8253 	    gimple_seq_add_stmt (&olist,
8254 				 gimple_build_assign (TREE_VEC_ELT (t, i),
8255 						      clobber));
8256 	  }
8257 
8258       tree clobber = build_constructor (ctx->record_type, NULL);
8259       TREE_THIS_VOLATILE (clobber) = 1;
8260       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
8261 							clobber));
8262     }
8263 
8264   /* Once all the expansions are done, sequence all the different
8265      fragments inside gimple_omp_body.  */
8266 
8267   new_body = NULL;
8268 
8269   if (offloaded
8270       && ctx->record_type)
8271     {
8272       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8273       /* fixup_child_record_type might have changed receiver_decl's type.  */
8274       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
8275       gimple_seq_add_stmt (&new_body,
8276 	  		   gimple_build_assign (ctx->receiver_decl, t));
8277     }
8278   gimple_seq_add_seq (&new_body, fplist);
8279 
8280   if (offloaded || data_region)
8281     {
8282       tree prev = NULL_TREE;
8283       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
8284 	switch (OMP_CLAUSE_CODE (c))
8285 	  {
8286 	    tree var, x;
8287 	  default:
8288 	    break;
8289 	  case OMP_CLAUSE_FIRSTPRIVATE:
8290 	    if (is_gimple_omp_oacc (ctx->stmt))
8291 	      break;
8292 	    var = OMP_CLAUSE_DECL (c);
8293 	    if (omp_is_reference (var)
8294 		|| is_gimple_reg_type (TREE_TYPE (var)))
8295 	      {
8296 		tree new_var = lookup_decl (var, ctx);
8297 		tree type;
8298 		type = TREE_TYPE (var);
8299 		if (omp_is_reference (var))
8300 		  type = TREE_TYPE (type);
8301 		if ((INTEGRAL_TYPE_P (type)
8302 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
8303 		    || TREE_CODE (type) == POINTER_TYPE)
8304 		  {
8305 		    x = build_receiver_ref (var, false, ctx);
8306 		    if (TREE_CODE (type) != POINTER_TYPE)
8307 		      x = fold_convert (pointer_sized_int_node, x);
8308 		    x = fold_convert (type, x);
8309 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8310 				   fb_rvalue);
8311 		    if (omp_is_reference (var))
8312 		      {
8313 			tree v = create_tmp_var_raw (type, get_name (var));
8314 			gimple_add_tmp_var (v);
8315 			TREE_ADDRESSABLE (v) = 1;
8316 			gimple_seq_add_stmt (&new_body,
8317 					     gimple_build_assign (v, x));
8318 			x = build_fold_addr_expr (v);
8319 		      }
8320 		    gimple_seq_add_stmt (&new_body,
8321 					 gimple_build_assign (new_var, x));
8322 		  }
8323 		else
8324 		  {
8325 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
8326 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8327 				   fb_rvalue);
8328 		    gimple_seq_add_stmt (&new_body,
8329 					 gimple_build_assign (new_var, x));
8330 		  }
8331 	      }
8332 	    else if (is_variable_sized (var))
8333 	      {
8334 		tree pvar = DECL_VALUE_EXPR (var);
8335 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8336 		pvar = TREE_OPERAND (pvar, 0);
8337 		gcc_assert (DECL_P (pvar));
8338 		tree new_var = lookup_decl (pvar, ctx);
8339 		x = build_receiver_ref (var, false, ctx);
8340 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8341 		gimple_seq_add_stmt (&new_body,
8342 				     gimple_build_assign (new_var, x));
8343 	      }
8344 	    break;
8345 	  case OMP_CLAUSE_PRIVATE:
8346 	    if (is_gimple_omp_oacc (ctx->stmt))
8347 	      break;
8348 	    var = OMP_CLAUSE_DECL (c);
8349 	    if (omp_is_reference (var))
8350 	      {
8351 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8352 		tree new_var = lookup_decl (var, ctx);
8353 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8354 		if (TREE_CONSTANT (x))
8355 		  {
8356 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
8357 					    get_name (var));
8358 		    gimple_add_tmp_var (x);
8359 		    TREE_ADDRESSABLE (x) = 1;
8360 		    x = build_fold_addr_expr_loc (clause_loc, x);
8361 		  }
8362 		else
8363 		  break;
8364 
8365 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8366 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8367 		gimple_seq_add_stmt (&new_body,
8368 				     gimple_build_assign (new_var, x));
8369 	      }
8370 	    break;
8371 	  case OMP_CLAUSE_USE_DEVICE_PTR:
8372 	  case OMP_CLAUSE_IS_DEVICE_PTR:
8373 	    var = OMP_CLAUSE_DECL (c);
8374 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
8375 	      x = build_sender_ref (var, ctx);
8376 	    else
8377 	      x = build_receiver_ref (var, false, ctx);
8378 	    if (is_variable_sized (var))
8379 	      {
8380 		tree pvar = DECL_VALUE_EXPR (var);
8381 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8382 		pvar = TREE_OPERAND (pvar, 0);
8383 		gcc_assert (DECL_P (pvar));
8384 		tree new_var = lookup_decl (pvar, ctx);
8385 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8386 		gimple_seq_add_stmt (&new_body,
8387 				     gimple_build_assign (new_var, x));
8388 	      }
8389 	    else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
8390 	      {
8391 		tree new_var = lookup_decl (var, ctx);
8392 		new_var = DECL_VALUE_EXPR (new_var);
8393 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
8394 		new_var = TREE_OPERAND (new_var, 0);
8395 		gcc_assert (DECL_P (new_var));
8396 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8397 		gimple_seq_add_stmt (&new_body,
8398 				     gimple_build_assign (new_var, x));
8399 	      }
8400 	    else
8401 	      {
8402 		tree type = TREE_TYPE (var);
8403 		tree new_var = lookup_decl (var, ctx);
8404 		if (omp_is_reference (var))
8405 		  {
8406 		    type = TREE_TYPE (type);
8407 		    if (TREE_CODE (type) != ARRAY_TYPE)
8408 		      {
8409 			tree v = create_tmp_var_raw (type, get_name (var));
8410 			gimple_add_tmp_var (v);
8411 			TREE_ADDRESSABLE (v) = 1;
8412 			x = fold_convert (type, x);
8413 			gimplify_expr (&x, &new_body, NULL, is_gimple_val,
8414 				       fb_rvalue);
8415 			gimple_seq_add_stmt (&new_body,
8416 					     gimple_build_assign (v, x));
8417 			x = build_fold_addr_expr (v);
8418 		      }
8419 		  }
8420 		new_var = DECL_VALUE_EXPR (new_var);
8421 		x = fold_convert (TREE_TYPE (new_var), x);
8422 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8423 		gimple_seq_add_stmt (&new_body,
8424 				     gimple_build_assign (new_var, x));
8425 	      }
8426 	    break;
8427 	  }
8428       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
8429 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
8430 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
8431 	 or references to VLAs.  */
8432       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8433 	switch (OMP_CLAUSE_CODE (c))
8434 	  {
8435 	    tree var;
8436 	  default:
8437 	    break;
8438 	  case OMP_CLAUSE_MAP:
8439 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8440 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8441 	      {
8442 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8443 		poly_int64 offset = 0;
8444 		gcc_assert (prev);
8445 		var = OMP_CLAUSE_DECL (c);
8446 		if (DECL_P (var)
8447 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
8448 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
8449 								      ctx))
8450 		    && varpool_node::get_create (var)->offloadable)
8451 		  break;
8452 		if (TREE_CODE (var) == INDIRECT_REF
8453 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
8454 		  var = TREE_OPERAND (var, 0);
8455 		if (TREE_CODE (var) == COMPONENT_REF)
8456 		  {
8457 		    var = get_addr_base_and_unit_offset (var, &offset);
8458 		    gcc_assert (var != NULL_TREE && DECL_P (var));
8459 		  }
8460 		else if (DECL_SIZE (var)
8461 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
8462 		  {
8463 		    tree var2 = DECL_VALUE_EXPR (var);
8464 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
8465 		    var2 = TREE_OPERAND (var2, 0);
8466 		    gcc_assert (DECL_P (var2));
8467 		    var = var2;
8468 		  }
8469 		tree new_var = lookup_decl (var, ctx), x;
8470 		tree type = TREE_TYPE (new_var);
8471 		bool is_ref;
8472 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
8473 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8474 			== COMPONENT_REF))
8475 		  {
8476 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
8477 		    is_ref = true;
8478 		    new_var = build2 (MEM_REF, type,
8479 				      build_fold_addr_expr (new_var),
8480 				      build_int_cst (build_pointer_type (type),
8481 						     offset));
8482 		  }
8483 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
8484 		  {
8485 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
8486 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
8487 		    new_var = build2 (MEM_REF, type,
8488 				      build_fold_addr_expr (new_var),
8489 				      build_int_cst (build_pointer_type (type),
8490 						     offset));
8491 		  }
8492 		else
8493 		  is_ref = omp_is_reference (var);
8494 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
8495 		  is_ref = false;
8496 		bool ref_to_array = false;
8497 		if (is_ref)
8498 		  {
8499 		    type = TREE_TYPE (type);
8500 		    if (TREE_CODE (type) == ARRAY_TYPE)
8501 		      {
8502 			type = build_pointer_type (type);
8503 			ref_to_array = true;
8504 		      }
8505 		  }
8506 		else if (TREE_CODE (type) == ARRAY_TYPE)
8507 		  {
8508 		    tree decl2 = DECL_VALUE_EXPR (new_var);
8509 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
8510 		    decl2 = TREE_OPERAND (decl2, 0);
8511 		    gcc_assert (DECL_P (decl2));
8512 		    new_var = decl2;
8513 		    type = TREE_TYPE (new_var);
8514 		  }
8515 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
8516 		x = fold_convert_loc (clause_loc, type, x);
8517 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
8518 		  {
8519 		    tree bias = OMP_CLAUSE_SIZE (c);
8520 		    if (DECL_P (bias))
8521 		      bias = lookup_decl (bias, ctx);
8522 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
8523 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
8524 					    bias);
8525 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
8526 					 TREE_TYPE (x), x, bias);
8527 		  }
8528 		if (ref_to_array)
8529 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8530 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8531 		if (is_ref && !ref_to_array)
8532 		  {
8533 		    tree t = create_tmp_var_raw (type, get_name (var));
8534 		    gimple_add_tmp_var (t);
8535 		    TREE_ADDRESSABLE (t) = 1;
8536 		    gimple_seq_add_stmt (&new_body,
8537 					 gimple_build_assign (t, x));
8538 		    x = build_fold_addr_expr_loc (clause_loc, t);
8539 		  }
8540 		gimple_seq_add_stmt (&new_body,
8541 				     gimple_build_assign (new_var, x));
8542 		prev = NULL_TREE;
8543 	      }
8544 	    else if (OMP_CLAUSE_CHAIN (c)
8545 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
8546 			== OMP_CLAUSE_MAP
8547 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8548 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
8549 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8550 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
8551 	      prev = c;
8552 	    break;
8553 	  case OMP_CLAUSE_PRIVATE:
8554 	    var = OMP_CLAUSE_DECL (c);
8555 	    if (is_variable_sized (var))
8556 	      {
8557 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8558 		tree new_var = lookup_decl (var, ctx);
8559 		tree pvar = DECL_VALUE_EXPR (var);
8560 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
8561 		pvar = TREE_OPERAND (pvar, 0);
8562 		gcc_assert (DECL_P (pvar));
8563 		tree new_pvar = lookup_decl (pvar, ctx);
8564 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8565 		tree al = size_int (DECL_ALIGN (var));
8566 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
8567 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8568 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
8569 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8570 		gimple_seq_add_stmt (&new_body,
8571 				     gimple_build_assign (new_pvar, x));
8572 	      }
8573 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
8574 	      {
8575 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
8576 		tree new_var = lookup_decl (var, ctx);
8577 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
8578 		if (TREE_CONSTANT (x))
8579 		  break;
8580 		else
8581 		  {
8582 		    tree atmp
8583 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
8584 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
8585 		    tree al = size_int (TYPE_ALIGN (rtype));
8586 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
8587 		  }
8588 
8589 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
8590 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
8591 		gimple_seq_add_stmt (&new_body,
8592 				     gimple_build_assign (new_var, x));
8593 	      }
8594 	    break;
8595 	  }
8596 
8597       gimple_seq fork_seq = NULL;
8598       gimple_seq join_seq = NULL;
8599 
8600       if (is_oacc_parallel (ctx))
8601 	{
8602 	  /* If there are reductions on the offloaded region itself, treat
8603 	     them as a dummy GANG loop.  */
8604 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
8605 
8606 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
8607 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
8608 	}
8609 
8610       gimple_seq_add_seq (&new_body, fork_seq);
8611       gimple_seq_add_seq (&new_body, tgt_body);
8612       gimple_seq_add_seq (&new_body, join_seq);
8613 
8614       if (offloaded)
8615 	{
8616 	  new_body = maybe_catch_exception (new_body);
8617 	  gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
8618 	}
8619       gimple_omp_set_body (stmt, new_body);
8620     }
8621 
8622   bind = gimple_build_bind (NULL, NULL,
8623 			    tgt_bind ? gimple_bind_block (tgt_bind)
8624 				     : NULL_TREE);
8625   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
8626   gimple_bind_add_seq (bind, ilist);
8627   gimple_bind_add_stmt (bind, stmt);
8628   gimple_bind_add_seq (bind, olist);
8629 
8630   pop_gimplify_context (NULL);
8631 
8632   if (dep_bind)
8633     {
8634       gimple_bind_add_seq (dep_bind, dep_ilist);
8635       gimple_bind_add_stmt (dep_bind, bind);
8636       gimple_bind_add_seq (dep_bind, dep_olist);
8637       pop_gimplify_context (dep_bind);
8638     }
8639 }
8640 
8641 /* Expand code for an OpenMP teams directive.  */
8642 
8643 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)8644 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8645 {
8646   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
8647   push_gimplify_context ();
8648 
8649   tree block = make_node (BLOCK);
8650   gbind *bind = gimple_build_bind (NULL, NULL, block);
8651   gsi_replace (gsi_p, bind, true);
8652   gimple_seq bind_body = NULL;
8653   gimple_seq dlist = NULL;
8654   gimple_seq olist = NULL;
8655 
8656   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8657 				    OMP_CLAUSE_NUM_TEAMS);
8658   if (num_teams == NULL_TREE)
8659     num_teams = build_int_cst (unsigned_type_node, 0);
8660   else
8661     {
8662       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
8663       num_teams = fold_convert (unsigned_type_node, num_teams);
8664       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
8665     }
8666   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
8667 				       OMP_CLAUSE_THREAD_LIMIT);
8668   if (thread_limit == NULL_TREE)
8669     thread_limit = build_int_cst (unsigned_type_node, 0);
8670   else
8671     {
8672       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
8673       thread_limit = fold_convert (unsigned_type_node, thread_limit);
8674       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
8675 		     fb_rvalue);
8676     }
8677 
8678   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
8679 			   &bind_body, &dlist, ctx, NULL);
8680   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
8681   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist, ctx);
8682   if (!gimple_omp_teams_grid_phony (teams_stmt))
8683     {
8684       gimple_seq_add_stmt (&bind_body, teams_stmt);
8685       location_t loc = gimple_location (teams_stmt);
8686       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
8687       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
8688       gimple_set_location (call, loc);
8689       gimple_seq_add_stmt (&bind_body, call);
8690     }
8691 
8692   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
8693   gimple_omp_set_body (teams_stmt, NULL);
8694   gimple_seq_add_seq (&bind_body, olist);
8695   gimple_seq_add_seq (&bind_body, dlist);
8696   if (!gimple_omp_teams_grid_phony (teams_stmt))
8697     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
8698   gimple_bind_set_body (bind, bind_body);
8699 
8700   pop_gimplify_context (bind);
8701 
8702   gimple_bind_append_vars (bind, ctx->block_vars);
8703   BLOCK_VARS (block) = ctx->block_vars;
8704   if (BLOCK_VARS (block))
8705     TREE_USED (block) = 1;
8706 }
8707 
8708 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
8709 
8710 static void
lower_omp_grid_body(gimple_stmt_iterator * gsi_p,omp_context * ctx)8711 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8712 {
8713   gimple *stmt = gsi_stmt (*gsi_p);
8714   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8715   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
8716 		       gimple_build_omp_return (false));
8717 }
8718 
8719 
8720 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
8721    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
8722    of OMP context, but with task_shared_vars set.  */
8723 
8724 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)8725 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
8726     			void *data)
8727 {
8728   tree t = *tp;
8729 
8730   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
8731   if (VAR_P (t) && data == NULL && DECL_HAS_VALUE_EXPR_P (t))
8732     return t;
8733 
8734   if (task_shared_vars
8735       && DECL_P (t)
8736       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
8737     return t;
8738 
8739   /* If a global variable has been privatized, TREE_CONSTANT on
8740      ADDR_EXPR might be wrong.  */
8741   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
8742     recompute_tree_invariant_for_addr_expr (t);
8743 
8744   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
8745   return NULL_TREE;
8746 }
8747 
8748 /* Data to be communicated between lower_omp_regimplify_operands and
8749    lower_omp_regimplify_operands_p.  */
8750 
8751 struct lower_omp_regimplify_operands_data
8752 {
8753   omp_context *ctx;
8754   vec<tree> *decls;
8755 };
8756 
8757 /* Helper function for lower_omp_regimplify_operands.  Find
8758    omp_member_access_dummy_var vars and adjust temporarily their
8759    DECL_VALUE_EXPRs if needed.  */
8760 
8761 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)8762 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
8763 				 void *data)
8764 {
8765   tree t = omp_member_access_dummy_var (*tp);
8766   if (t)
8767     {
8768       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8769       lower_omp_regimplify_operands_data *ldata
8770 	= (lower_omp_regimplify_operands_data *) wi->info;
8771       tree o = maybe_lookup_decl (t, ldata->ctx);
8772       if (o != t)
8773 	{
8774 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
8775 	  ldata->decls->safe_push (*tp);
8776 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
8777 	  SET_DECL_VALUE_EXPR (*tp, v);
8778 	}
8779     }
8780   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
8781   return NULL_TREE;
8782 }
8783 
8784 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
8785    of omp_member_access_dummy_var vars during regimplification.  */
8786 
8787 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)8788 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
8789 			       gimple_stmt_iterator *gsi_p)
8790 {
8791   auto_vec<tree, 10> decls;
8792   if (ctx)
8793     {
8794       struct walk_stmt_info wi;
8795       memset (&wi, '\0', sizeof (wi));
8796       struct lower_omp_regimplify_operands_data data;
8797       data.ctx = ctx;
8798       data.decls = &decls;
8799       wi.info = &data;
8800       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
8801     }
8802   gimple_regimplify_operands (stmt, gsi_p);
8803   while (!decls.is_empty ())
8804     {
8805       tree t = decls.pop ();
8806       tree v = decls.pop ();
8807       SET_DECL_VALUE_EXPR (t, v);
8808     }
8809 }
8810 
8811 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)8812 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8813 {
8814   gimple *stmt = gsi_stmt (*gsi_p);
8815   struct walk_stmt_info wi;
8816   gcall *call_stmt;
8817 
8818   if (gimple_has_location (stmt))
8819     input_location = gimple_location (stmt);
8820 
8821   if (task_shared_vars)
8822     memset (&wi, '\0', sizeof (wi));
8823 
8824   /* If we have issued syntax errors, avoid doing any heavy lifting.
8825      Just replace the OMP directives with a NOP to avoid
8826      confusing RTL expansion.  */
8827   if (seen_error () && is_gimple_omp (stmt))
8828     {
8829       gsi_replace (gsi_p, gimple_build_nop (), true);
8830       return;
8831     }
8832 
8833   switch (gimple_code (stmt))
8834     {
8835     case GIMPLE_COND:
8836       {
8837 	gcond *cond_stmt = as_a <gcond *> (stmt);
8838 	if ((ctx || task_shared_vars)
8839 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
8840 			   lower_omp_regimplify_p,
8841 			   ctx ? NULL : &wi, NULL)
8842 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
8843 			      lower_omp_regimplify_p,
8844 			      ctx ? NULL : &wi, NULL)))
8845 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
8846       }
8847       break;
8848     case GIMPLE_CATCH:
8849       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
8850       break;
8851     case GIMPLE_EH_FILTER:
8852       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
8853       break;
8854     case GIMPLE_TRY:
8855       lower_omp (gimple_try_eval_ptr (stmt), ctx);
8856       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
8857       break;
8858     case GIMPLE_TRANSACTION:
8859       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
8860 		 ctx);
8861       break;
8862     case GIMPLE_BIND:
8863       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
8864       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
8865       break;
8866     case GIMPLE_OMP_PARALLEL:
8867     case GIMPLE_OMP_TASK:
8868       ctx = maybe_lookup_ctx (stmt);
8869       gcc_assert (ctx);
8870       if (ctx->cancellable)
8871 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8872       lower_omp_taskreg (gsi_p, ctx);
8873       break;
8874     case GIMPLE_OMP_FOR:
8875       ctx = maybe_lookup_ctx (stmt);
8876       gcc_assert (ctx);
8877       if (ctx->cancellable)
8878 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8879       lower_omp_for (gsi_p, ctx);
8880       break;
8881     case GIMPLE_OMP_SECTIONS:
8882       ctx = maybe_lookup_ctx (stmt);
8883       gcc_assert (ctx);
8884       if (ctx->cancellable)
8885 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
8886       lower_omp_sections (gsi_p, ctx);
8887       break;
8888     case GIMPLE_OMP_SINGLE:
8889       ctx = maybe_lookup_ctx (stmt);
8890       gcc_assert (ctx);
8891       lower_omp_single (gsi_p, ctx);
8892       break;
8893     case GIMPLE_OMP_MASTER:
8894       ctx = maybe_lookup_ctx (stmt);
8895       gcc_assert (ctx);
8896       lower_omp_master (gsi_p, ctx);
8897       break;
8898     case GIMPLE_OMP_TASKGROUP:
8899       ctx = maybe_lookup_ctx (stmt);
8900       gcc_assert (ctx);
8901       lower_omp_taskgroup (gsi_p, ctx);
8902       break;
8903     case GIMPLE_OMP_ORDERED:
8904       ctx = maybe_lookup_ctx (stmt);
8905       gcc_assert (ctx);
8906       lower_omp_ordered (gsi_p, ctx);
8907       break;
8908     case GIMPLE_OMP_CRITICAL:
8909       ctx = maybe_lookup_ctx (stmt);
8910       gcc_assert (ctx);
8911       lower_omp_critical (gsi_p, ctx);
8912       break;
8913     case GIMPLE_OMP_ATOMIC_LOAD:
8914       if ((ctx || task_shared_vars)
8915 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
8916 			  as_a <gomp_atomic_load *> (stmt)),
8917 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
8918 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
8919       break;
8920     case GIMPLE_OMP_TARGET:
8921       ctx = maybe_lookup_ctx (stmt);
8922       gcc_assert (ctx);
8923       lower_omp_target (gsi_p, ctx);
8924       break;
8925     case GIMPLE_OMP_TEAMS:
8926       ctx = maybe_lookup_ctx (stmt);
8927       gcc_assert (ctx);
8928       lower_omp_teams (gsi_p, ctx);
8929       break;
8930     case GIMPLE_OMP_GRID_BODY:
8931       ctx = maybe_lookup_ctx (stmt);
8932       gcc_assert (ctx);
8933       lower_omp_grid_body (gsi_p, ctx);
8934       break;
8935     case GIMPLE_CALL:
8936       tree fndecl;
8937       call_stmt = as_a <gcall *> (stmt);
8938       fndecl = gimple_call_fndecl (call_stmt);
8939       if (fndecl
8940 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
8941 	switch (DECL_FUNCTION_CODE (fndecl))
8942 	  {
8943 	  case BUILT_IN_GOMP_BARRIER:
8944 	    if (ctx == NULL)
8945 	      break;
8946 	    /* FALLTHRU */
8947 	  case BUILT_IN_GOMP_CANCEL:
8948 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
8949 	    omp_context *cctx;
8950 	    cctx = ctx;
8951 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
8952 	      cctx = cctx->outer;
8953 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
8954 	    if (!cctx->cancellable)
8955 	      {
8956 		if (DECL_FUNCTION_CODE (fndecl)
8957 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
8958 		  {
8959 		    stmt = gimple_build_nop ();
8960 		    gsi_replace (gsi_p, stmt, false);
8961 		  }
8962 		break;
8963 	      }
8964 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
8965 	      {
8966 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
8967 		gimple_call_set_fndecl (call_stmt, fndecl);
8968 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
8969 	      }
8970 	    tree lhs;
8971 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
8972 	    gimple_call_set_lhs (call_stmt, lhs);
8973 	    tree fallthru_label;
8974 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8975 	    gimple *g;
8976 	    g = gimple_build_label (fallthru_label);
8977 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8978 	    g = gimple_build_cond (NE_EXPR, lhs,
8979 				   fold_convert (TREE_TYPE (lhs),
8980 						 boolean_false_node),
8981 				   cctx->cancel_label, fallthru_label);
8982 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
8983 	    break;
8984 	  default:
8985 	    break;
8986 	  }
8987       /* FALLTHRU */
8988     default:
8989       if ((ctx || task_shared_vars)
8990 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
8991 			     ctx ? NULL : &wi))
8992 	{
8993 	  /* Just remove clobbers, this should happen only if we have
8994 	     "privatized" local addressable variables in SIMD regions,
8995 	     the clobber isn't needed in that case and gimplifying address
8996 	     of the ARRAY_REF into a pointer and creating MEM_REF based
8997 	     clobber would create worse code than we get with the clobber
8998 	     dropped.  */
8999 	  if (gimple_clobber_p (stmt))
9000 	    {
9001 	      gsi_replace (gsi_p, gimple_build_nop (), true);
9002 	      break;
9003 	    }
9004 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
9005 	}
9006       break;
9007     }
9008 }
9009 
9010 static void
lower_omp(gimple_seq * body,omp_context * ctx)9011 lower_omp (gimple_seq *body, omp_context *ctx)
9012 {
9013   location_t saved_location = input_location;
9014   gimple_stmt_iterator gsi;
9015   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9016     lower_omp_1 (&gsi, ctx);
9017   /* During gimplification, we haven't folded statments inside offloading
9018      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
9019   if (target_nesting_level || taskreg_nesting_level)
9020     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
9021       fold_stmt (&gsi);
9022   input_location = saved_location;
9023 }
9024 
9025 /* Main entry point.  */
9026 
9027 static unsigned int
execute_lower_omp(void)9028 execute_lower_omp (void)
9029 {
9030   gimple_seq body;
9031   int i;
9032   omp_context *ctx;
9033 
9034   /* This pass always runs, to provide PROP_gimple_lomp.
9035      But often, there is nothing to do.  */
9036   if (flag_openacc == 0 && flag_openmp == 0
9037       && flag_openmp_simd == 0)
9038     return 0;
9039 
9040   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
9041 				 delete_omp_context);
9042 
9043   body = gimple_body (current_function_decl);
9044 
9045   if (hsa_gen_requested_p ())
9046     omp_grid_gridify_all_targets (&body);
9047 
9048   scan_omp (&body, NULL);
9049   gcc_assert (taskreg_nesting_level == 0);
9050   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
9051     finish_taskreg_scan (ctx);
9052   taskreg_contexts.release ();
9053 
9054   if (all_contexts->root)
9055     {
9056       if (task_shared_vars)
9057 	push_gimplify_context ();
9058       lower_omp (&body, NULL);
9059       if (task_shared_vars)
9060 	pop_gimplify_context (NULL);
9061     }
9062 
9063   if (all_contexts)
9064     {
9065       splay_tree_delete (all_contexts);
9066       all_contexts = NULL;
9067     }
9068   BITMAP_FREE (task_shared_vars);
9069 
9070   /* If current function is a method, remove artificial dummy VAR_DECL created
9071      for non-static data member privatization, they aren't needed for
9072      debuginfo nor anything else, have been already replaced everywhere in the
9073      IL and cause problems with LTO.  */
9074   if (DECL_ARGUMENTS (current_function_decl)
9075       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
9076       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
9077 	  == POINTER_TYPE))
9078     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
9079   return 0;
9080 }
9081 
9082 namespace {
9083 
9084 const pass_data pass_data_lower_omp =
9085 {
9086   GIMPLE_PASS, /* type */
9087   "omplower", /* name */
9088   OPTGROUP_OMP, /* optinfo_flags */
9089   TV_NONE, /* tv_id */
9090   PROP_gimple_any, /* properties_required */
9091   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
9092   0, /* properties_destroyed */
9093   0, /* todo_flags_start */
9094   0, /* todo_flags_finish */
9095 };
9096 
9097 class pass_lower_omp : public gimple_opt_pass
9098 {
9099 public:
pass_lower_omp(gcc::context * ctxt)9100   pass_lower_omp (gcc::context *ctxt)
9101     : gimple_opt_pass (pass_data_lower_omp, ctxt)
9102   {}
9103 
9104   /* opt_pass methods: */
execute(function *)9105   virtual unsigned int execute (function *) { return execute_lower_omp (); }
9106 
9107 }; // class pass_lower_omp
9108 
9109 } // anon namespace
9110 
9111 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)9112 make_pass_lower_omp (gcc::context *ctxt)
9113 {
9114   return new pass_lower_omp (ctxt);
9115 }
9116 
9117 /* The following is a utility to diagnose structured block violations.
9118    It is not part of the "omplower" pass, as that's invoked too late.  It
9119    should be invoked by the respective front ends after gimplification.  */
9120 
9121 static splay_tree all_labels;
9122 
9123 /* Check for mismatched contexts and generate an error if needed.  Return
9124    true if an error is detected.  */
9125 
9126 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)9127 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
9128 	       gimple *branch_ctx, gimple *label_ctx)
9129 {
9130   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
9131   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
9132 
9133   if (label_ctx == branch_ctx)
9134     return false;
9135 
9136   const char* kind = NULL;
9137 
9138   if (flag_openacc)
9139     {
9140       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
9141 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
9142 	{
9143 	  gcc_checking_assert (kind == NULL);
9144 	  kind = "OpenACC";
9145 	}
9146     }
9147   if (kind == NULL)
9148     {
9149       gcc_checking_assert (flag_openmp || flag_openmp_simd);
9150       kind = "OpenMP";
9151     }
9152 
9153   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
9154      so we could traverse it and issue a correct "exit" or "enter" error
9155      message upon a structured block violation.
9156 
9157      We built the context by building a list with tree_cons'ing, but there is
9158      no easy counterpart in gimple tuples.  It seems like far too much work
9159      for issuing exit/enter error messages.  If someone really misses the
9160      distinct error message... patches welcome.  */
9161 
9162 #if 0
9163   /* Try to avoid confusing the user by producing and error message
9164      with correct "exit" or "enter" verbiage.  We prefer "exit"
9165      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
9166   if (branch_ctx == NULL)
9167     exit_p = false;
9168   else
9169     {
9170       while (label_ctx)
9171 	{
9172 	  if (TREE_VALUE (label_ctx) == branch_ctx)
9173 	    {
9174 	      exit_p = false;
9175 	      break;
9176 	    }
9177 	  label_ctx = TREE_CHAIN (label_ctx);
9178 	}
9179     }
9180 
9181   if (exit_p)
9182     error ("invalid exit from %s structured block", kind);
9183   else
9184     error ("invalid entry to %s structured block", kind);
9185 #endif
9186 
9187   /* If it's obvious we have an invalid entry, be specific about the error.  */
9188   if (branch_ctx == NULL)
9189     error ("invalid entry to %s structured block", kind);
9190   else
9191     {
9192       /* Otherwise, be vague and lazy, but efficient.  */
9193       error ("invalid branch to/from %s structured block", kind);
9194     }
9195 
9196   gsi_replace (gsi_p, gimple_build_nop (), false);
9197   return true;
9198 }
9199 
9200 /* Pass 1: Create a minimal tree of structured blocks, and record
9201    where each label is found.  */
9202 
9203 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9204 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9205     	       struct walk_stmt_info *wi)
9206 {
9207   gimple *context = (gimple *) wi->info;
9208   gimple *inner_context;
9209   gimple *stmt = gsi_stmt (*gsi_p);
9210 
9211   *handled_ops_p = true;
9212 
9213   switch (gimple_code (stmt))
9214     {
9215     WALK_SUBSTMTS;
9216 
9217     case GIMPLE_OMP_PARALLEL:
9218     case GIMPLE_OMP_TASK:
9219     case GIMPLE_OMP_SECTIONS:
9220     case GIMPLE_OMP_SINGLE:
9221     case GIMPLE_OMP_SECTION:
9222     case GIMPLE_OMP_MASTER:
9223     case GIMPLE_OMP_ORDERED:
9224     case GIMPLE_OMP_CRITICAL:
9225     case GIMPLE_OMP_TARGET:
9226     case GIMPLE_OMP_TEAMS:
9227     case GIMPLE_OMP_TASKGROUP:
9228       /* The minimal context here is just the current OMP construct.  */
9229       inner_context = stmt;
9230       wi->info = inner_context;
9231       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9232       wi->info = context;
9233       break;
9234 
9235     case GIMPLE_OMP_FOR:
9236       inner_context = stmt;
9237       wi->info = inner_context;
9238       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9239 	 walk them.  */
9240       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9241 	  	       diagnose_sb_1, NULL, wi);
9242       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
9243       wi->info = context;
9244       break;
9245 
9246     case GIMPLE_LABEL:
9247       splay_tree_insert (all_labels,
9248 			 (splay_tree_key) gimple_label_label (
9249 					    as_a <glabel *> (stmt)),
9250 			 (splay_tree_value) context);
9251       break;
9252 
9253     default:
9254       break;
9255     }
9256 
9257   return NULL_TREE;
9258 }
9259 
9260 /* Pass 2: Check each branch and see if its context differs from that of
9261    the destination label's context.  */
9262 
9263 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9264 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9265     	       struct walk_stmt_info *wi)
9266 {
9267   gimple *context = (gimple *) wi->info;
9268   splay_tree_node n;
9269   gimple *stmt = gsi_stmt (*gsi_p);
9270 
9271   *handled_ops_p = true;
9272 
9273   switch (gimple_code (stmt))
9274     {
9275     WALK_SUBSTMTS;
9276 
9277     case GIMPLE_OMP_PARALLEL:
9278     case GIMPLE_OMP_TASK:
9279     case GIMPLE_OMP_SECTIONS:
9280     case GIMPLE_OMP_SINGLE:
9281     case GIMPLE_OMP_SECTION:
9282     case GIMPLE_OMP_MASTER:
9283     case GIMPLE_OMP_ORDERED:
9284     case GIMPLE_OMP_CRITICAL:
9285     case GIMPLE_OMP_TARGET:
9286     case GIMPLE_OMP_TEAMS:
9287     case GIMPLE_OMP_TASKGROUP:
9288       wi->info = stmt;
9289       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9290       wi->info = context;
9291       break;
9292 
9293     case GIMPLE_OMP_FOR:
9294       wi->info = stmt;
9295       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
9296 	 walk them.  */
9297       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
9298 			   diagnose_sb_2, NULL, wi);
9299       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
9300       wi->info = context;
9301       break;
9302 
9303     case GIMPLE_COND:
9304 	{
9305 	  gcond *cond_stmt = as_a <gcond *> (stmt);
9306 	  tree lab = gimple_cond_true_label (cond_stmt);
9307 	  if (lab)
9308 	    {
9309 	      n = splay_tree_lookup (all_labels,
9310 				     (splay_tree_key) lab);
9311 	      diagnose_sb_0 (gsi_p, context,
9312 			     n ? (gimple *) n->value : NULL);
9313 	    }
9314 	  lab = gimple_cond_false_label (cond_stmt);
9315 	  if (lab)
9316 	    {
9317 	      n = splay_tree_lookup (all_labels,
9318 				     (splay_tree_key) lab);
9319 	      diagnose_sb_0 (gsi_p, context,
9320 			     n ? (gimple *) n->value : NULL);
9321 	    }
9322 	}
9323       break;
9324 
9325     case GIMPLE_GOTO:
9326       {
9327 	tree lab = gimple_goto_dest (stmt);
9328 	if (TREE_CODE (lab) != LABEL_DECL)
9329 	  break;
9330 
9331 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9332 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
9333       }
9334       break;
9335 
9336     case GIMPLE_SWITCH:
9337       {
9338 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
9339 	unsigned int i;
9340 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
9341 	  {
9342 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
9343 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
9344 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
9345 	      break;
9346 	  }
9347       }
9348       break;
9349 
9350     case GIMPLE_RETURN:
9351       diagnose_sb_0 (gsi_p, context, NULL);
9352       break;
9353 
9354     default:
9355       break;
9356     }
9357 
9358   return NULL_TREE;
9359 }
9360 
9361 static unsigned int
diagnose_omp_structured_block_errors(void)9362 diagnose_omp_structured_block_errors (void)
9363 {
9364   struct walk_stmt_info wi;
9365   gimple_seq body = gimple_body (current_function_decl);
9366 
9367   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
9368 
9369   memset (&wi, 0, sizeof (wi));
9370   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
9371 
9372   memset (&wi, 0, sizeof (wi));
9373   wi.want_locations = true;
9374   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
9375 
9376   gimple_set_body (current_function_decl, body);
9377 
9378   splay_tree_delete (all_labels);
9379   all_labels = NULL;
9380 
9381   return 0;
9382 }
9383 
9384 namespace {
9385 
9386 const pass_data pass_data_diagnose_omp_blocks =
9387 {
9388   GIMPLE_PASS, /* type */
9389   "*diagnose_omp_blocks", /* name */
9390   OPTGROUP_OMP, /* optinfo_flags */
9391   TV_NONE, /* tv_id */
9392   PROP_gimple_any, /* properties_required */
9393   0, /* properties_provided */
9394   0, /* properties_destroyed */
9395   0, /* todo_flags_start */
9396   0, /* todo_flags_finish */
9397 };
9398 
9399 class pass_diagnose_omp_blocks : public gimple_opt_pass
9400 {
9401 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)9402   pass_diagnose_omp_blocks (gcc::context *ctxt)
9403     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
9404   {}
9405 
9406   /* opt_pass methods: */
gate(function *)9407   virtual bool gate (function *)
9408   {
9409     return flag_openacc || flag_openmp || flag_openmp_simd;
9410   }
execute(function *)9411   virtual unsigned int execute (function *)
9412     {
9413       return diagnose_omp_structured_block_errors ();
9414     }
9415 
9416 }; // class pass_diagnose_omp_blocks
9417 
9418 } // anon namespace
9419 
9420 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)9421 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
9422 {
9423   return new pass_diagnose_omp_blocks (ctxt);
9424 }
9425 
9426 
9427 #include "gt-omp-low.h"
9428