xref: /netbsd/external/gpl3/gcc/dist/gcc/omp-low.c (revision 48163f69)
1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2020 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "omp-grid.h"
54 #include "gimple-low.h"
55 #include "alloc-pool.h"
56 #include "symbol-summary.h"
57 #include "tree-nested.h"
58 #include "context.h"
59 #include "gomp-constants.h"
60 #include "gimple-pretty-print.h"
61 #include "hsa-common.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 
65 /* Lowering of OMP parallel and workshare constructs proceeds in two
66    phases.  The first phase scans the function looking for OMP statements
67    and then for variables that must be replaced to satisfy data sharing
68    clauses.  The second phase expands code for the constructs, as well as
69    re-gimplifying things when variables have been replaced with complex
70    expressions.
71 
72    Final code generation is done by pass_expand_omp.  The flowgraph is
73    scanned for regions which are then moved to a new
74    function, to be invoked by the thread library, or offloaded.  */
75 
76 /* Context structure.  Used to store information about each parallel
77    directive in the code.  */
78 
79 struct omp_context
80 {
81   /* This field must be at the beginning, as we do "inheritance": Some
82      callback functions for tree-inline.c (e.g., omp_copy_decl)
83      receive a copy_body_data pointer that is up-casted to an
84      omp_context pointer.  */
85   copy_body_data cb;
86 
87   /* The tree of contexts corresponding to the encountered constructs.  */
88   struct omp_context *outer;
89   gimple *stmt;
90 
91   /* Map variables to fields in a structure that allows communication
92      between sending and receiving threads.  */
93   splay_tree field_map;
94   tree record_type;
95   tree sender_decl;
96   tree receiver_decl;
97 
98   /* These are used just by task contexts, if task firstprivate fn is
99      needed.  srecord_type is used to communicate from the thread
100      that encountered the task construct to task firstprivate fn,
101      record_type is allocated by GOMP_task, initialized by task firstprivate
102      fn and passed to the task body fn.  */
103   splay_tree sfield_map;
104   tree srecord_type;
105 
106   /* A chain of variables to add to the top-level block surrounding the
107      construct.  In the case of a parallel, this is in the child function.  */
108   tree block_vars;
109 
110   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
111      barriers should jump to during omplower pass.  */
112   tree cancel_label;
113 
114   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
115      otherwise.  */
116   gimple *simt_stmt;
117 
118   /* For task reductions registered in this context, a vector containing
119      the length of the private copies block (if constant, otherwise NULL)
120      and then offsets (if constant, otherwise NULL) for each entry.  */
121   vec<tree> task_reductions;
122 
123   /* A hash map from the reduction clauses to the registered array
124      elts.  */
125   hash_map<tree, unsigned> *task_reduction_map;
126 
127   /* And a hash map from the lastprivate(conditional:) variables to their
128      corresponding tracking loop iteration variables.  */
129   hash_map<tree, tree> *lastprivate_conditional_map;
130 
131   /* A tree_list of the reduction clauses in this context. This is
132     only used for checking the consistency of OpenACC reduction
133     clauses in scan_omp_for and is not guaranteed to contain a valid
134     value outside of this function. */
135   tree local_reduction_clauses;
136 
137   /* A tree_list of the reduction clauses in outer contexts. This is
138     only used for checking the consistency of OpenACC reduction
139     clauses in scan_omp_for and is not guaranteed to contain a valid
140     value outside of this function. */
141   tree outer_reduction_clauses;
142 
143   /* Nesting depth of this context.  Used to beautify error messages re
144      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
145      reserved for the main body of the function.  */
146   int depth;
147 
148   /* True if this parallel directive is nested within another.  */
149   bool is_nested;
150 
151   /* True if this construct can be cancelled.  */
152   bool cancellable;
153 
154   /* True if lower_omp_1 should look up lastprivate conditional in parent
155      context.  */
156   bool combined_into_simd_safelen1;
157 
158   /* True if there is nested scan context with inclusive clause.  */
159   bool scan_inclusive;
160 
161   /* True if there is nested scan context with exclusive clause.  */
162   bool scan_exclusive;
163 
164   /* True in the second simd loop of for simd with inscan reductions.  */
165   bool for_simd_scan_phase;
166 
167   /* True if there is order(concurrent) clause on the construct.  */
168   bool order_concurrent;
169 
170   /* True if there is bind clause on the construct (i.e. a loop construct).  */
171   bool loop_p;
172 };
173 
174 static splay_tree all_contexts;
175 static int taskreg_nesting_level;
176 static int target_nesting_level;
177 static bitmap task_shared_vars;
178 static bitmap global_nonaddressable_vars;
179 static vec<omp_context *> taskreg_contexts;
180 static vec<gomp_task *> task_cpyfns;
181 
182 static void scan_omp (gimple_seq *, omp_context *);
183 static tree scan_omp_1_op (tree *, int *, void *);
184 
185 #define WALK_SUBSTMTS  \
186     case GIMPLE_BIND: \
187     case GIMPLE_TRY: \
188     case GIMPLE_CATCH: \
189     case GIMPLE_EH_FILTER: \
190     case GIMPLE_TRANSACTION: \
191       /* The sub-statements for these should be walked.  */ \
192       *handled_ops_p = false; \
193       break;
194 
195 /* Return true if CTX corresponds to an OpenACC 'parallel' or 'serial'
196    region.  */
197 
198 static bool
is_oacc_parallel_or_serial(omp_context * ctx)199 is_oacc_parallel_or_serial (omp_context *ctx)
200 {
201   enum gimple_code outer_type = gimple_code (ctx->stmt);
202   return ((outer_type == GIMPLE_OMP_TARGET)
203 	  && ((gimple_omp_target_kind (ctx->stmt)
204 	       == GF_OMP_TARGET_KIND_OACC_PARALLEL)
205 	      || (gimple_omp_target_kind (ctx->stmt)
206 		  == GF_OMP_TARGET_KIND_OACC_SERIAL)));
207 }
208 
209 /* Return true if CTX corresponds to an oacc kernels region.  */
210 
211 static bool
is_oacc_kernels(omp_context * ctx)212 is_oacc_kernels (omp_context *ctx)
213 {
214   enum gimple_code outer_type = gimple_code (ctx->stmt);
215   return ((outer_type == GIMPLE_OMP_TARGET)
216 	  && (gimple_omp_target_kind (ctx->stmt)
217 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
218 }
219 
220 /* If DECL is the artificial dummy VAR_DECL created for non-static
221    data member privatization, return the underlying "this" parameter,
222    otherwise return NULL.  */
223 
224 tree
omp_member_access_dummy_var(tree decl)225 omp_member_access_dummy_var (tree decl)
226 {
227   if (!VAR_P (decl)
228       || !DECL_ARTIFICIAL (decl)
229       || !DECL_IGNORED_P (decl)
230       || !DECL_HAS_VALUE_EXPR_P (decl)
231       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
232     return NULL_TREE;
233 
234   tree v = DECL_VALUE_EXPR (decl);
235   if (TREE_CODE (v) != COMPONENT_REF)
236     return NULL_TREE;
237 
238   while (1)
239     switch (TREE_CODE (v))
240       {
241       case COMPONENT_REF:
242       case MEM_REF:
243       case INDIRECT_REF:
244       CASE_CONVERT:
245       case POINTER_PLUS_EXPR:
246 	v = TREE_OPERAND (v, 0);
247 	continue;
248       case PARM_DECL:
249 	if (DECL_CONTEXT (v) == current_function_decl
250 	    && DECL_ARTIFICIAL (v)
251 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
252 	  return v;
253 	return NULL_TREE;
254       default:
255 	return NULL_TREE;
256       }
257 }
258 
259 /* Helper for unshare_and_remap, called through walk_tree.  */
260 
261 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)262 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
263 {
264   tree *pair = (tree *) data;
265   if (*tp == pair[0])
266     {
267       *tp = unshare_expr (pair[1]);
268       *walk_subtrees = 0;
269     }
270   else if (IS_TYPE_OR_DECL_P (*tp))
271     *walk_subtrees = 0;
272   return NULL_TREE;
273 }
274 
275 /* Return unshare_expr (X) with all occurrences of FROM
276    replaced with TO.  */
277 
278 static tree
unshare_and_remap(tree x,tree from,tree to)279 unshare_and_remap (tree x, tree from, tree to)
280 {
281   tree pair[2] = { from, to };
282   x = unshare_expr (x);
283   walk_tree (&x, unshare_and_remap_1, pair, NULL);
284   return x;
285 }
286 
287 /* Convenience function for calling scan_omp_1_op on tree operands.  */
288 
289 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)290 scan_omp_op (tree *tp, omp_context *ctx)
291 {
292   struct walk_stmt_info wi;
293 
294   memset (&wi, 0, sizeof (wi));
295   wi.info = ctx;
296   wi.want_locations = true;
297 
298   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
299 }
300 
301 static void lower_omp (gimple_seq *, omp_context *);
302 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
303 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
304 
305 /* Return true if CTX is for an omp parallel.  */
306 
307 static inline bool
is_parallel_ctx(omp_context * ctx)308 is_parallel_ctx (omp_context *ctx)
309 {
310   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
311 }
312 
313 
314 /* Return true if CTX is for an omp task.  */
315 
316 static inline bool
is_task_ctx(omp_context * ctx)317 is_task_ctx (omp_context *ctx)
318 {
319   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
320 }
321 
322 
323 /* Return true if CTX is for an omp taskloop.  */
324 
325 static inline bool
is_taskloop_ctx(omp_context * ctx)326 is_taskloop_ctx (omp_context *ctx)
327 {
328   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
329 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
330 }
331 
332 
333 /* Return true if CTX is for a host omp teams.  */
334 
335 static inline bool
is_host_teams_ctx(omp_context * ctx)336 is_host_teams_ctx (omp_context *ctx)
337 {
338   return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
339 	 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
340 }
341 
342 /* Return true if CTX is for an omp parallel or omp task or host omp teams
343    (the last one is strictly not a task region in OpenMP speak, but we
344    need to treat it similarly).  */
345 
346 static inline bool
is_taskreg_ctx(omp_context * ctx)347 is_taskreg_ctx (omp_context *ctx)
348 {
349   return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
350 }
351 
352 /* Return true if EXPR is variable sized.  */
353 
354 static inline bool
is_variable_sized(const_tree expr)355 is_variable_sized (const_tree expr)
356 {
357   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
358 }
359 
360 /* Lookup variables.  The "maybe" form
361    allows for the variable form to not have been entered, otherwise we
362    assert that the variable must have been entered.  */
363 
364 static inline tree
lookup_decl(tree var,omp_context * ctx)365 lookup_decl (tree var, omp_context *ctx)
366 {
367   tree *n = ctx->cb.decl_map->get (var);
368   return *n;
369 }
370 
371 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)372 maybe_lookup_decl (const_tree var, omp_context *ctx)
373 {
374   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
375   return n ? *n : NULL_TREE;
376 }
377 
378 static inline tree
lookup_field(tree var,omp_context * ctx)379 lookup_field (tree var, omp_context *ctx)
380 {
381   splay_tree_node n;
382   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
383   return (tree) n->value;
384 }
385 
386 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)387 lookup_sfield (splay_tree_key key, omp_context *ctx)
388 {
389   splay_tree_node n;
390   n = splay_tree_lookup (ctx->sfield_map
391 			 ? ctx->sfield_map : ctx->field_map, key);
392   return (tree) n->value;
393 }
394 
395 static inline tree
lookup_sfield(tree var,omp_context * ctx)396 lookup_sfield (tree var, omp_context *ctx)
397 {
398   return lookup_sfield ((splay_tree_key) var, ctx);
399 }
400 
401 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)402 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
403 {
404   splay_tree_node n;
405   n = splay_tree_lookup (ctx->field_map, key);
406   return n ? (tree) n->value : NULL_TREE;
407 }
408 
409 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)410 maybe_lookup_field (tree var, omp_context *ctx)
411 {
412   return maybe_lookup_field ((splay_tree_key) var, ctx);
413 }
414 
415 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
416    the parallel context if DECL is to be shared.  */
417 
418 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)419 use_pointer_for_field (tree decl, omp_context *shared_ctx)
420 {
421   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
422       || TYPE_ATOMIC (TREE_TYPE (decl)))
423     return true;
424 
425   /* We can only use copy-in/copy-out semantics for shared variables
426      when we know the value is not accessible from an outer scope.  */
427   if (shared_ctx)
428     {
429       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
430 
431       /* ??? Trivially accessible from anywhere.  But why would we even
432 	 be passing an address in this case?  Should we simply assert
433 	 this to be false, or should we have a cleanup pass that removes
434 	 these from the list of mappings?  */
435       if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
436 	return true;
437 
438       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
439 	 without analyzing the expression whether or not its location
440 	 is accessible to anyone else.  In the case of nested parallel
441 	 regions it certainly may be.  */
442       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
443 	return true;
444 
445       /* Do not use copy-in/copy-out for variables that have their
446 	 address taken.  */
447       if (is_global_var (decl))
448 	{
449 	  /* For file scope vars, track whether we've seen them as
450 	     non-addressable initially and in that case, keep the same
451 	     answer for the duration of the pass, even when they are made
452 	     addressable later on e.g. through reduction expansion.  Global
453 	     variables which weren't addressable before the pass will not
454 	     have their privatized copies address taken.  See PR91216.  */
455 	  if (!TREE_ADDRESSABLE (decl))
456 	    {
457 	      if (!global_nonaddressable_vars)
458 		global_nonaddressable_vars = BITMAP_ALLOC (NULL);
459 	      bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
460 	    }
461 	  else if (!global_nonaddressable_vars
462 		   || !bitmap_bit_p (global_nonaddressable_vars,
463 				     DECL_UID (decl)))
464 	    return true;
465 	}
466       else if (TREE_ADDRESSABLE (decl))
467 	return true;
468 
469       /* lower_send_shared_vars only uses copy-in, but not copy-out
470 	 for these.  */
471       if (TREE_READONLY (decl)
472 	  || ((TREE_CODE (decl) == RESULT_DECL
473 	       || TREE_CODE (decl) == PARM_DECL)
474 	      && DECL_BY_REFERENCE (decl)))
475 	return false;
476 
477       /* Disallow copy-in/out in nested parallel if
478 	 decl is shared in outer parallel, otherwise
479 	 each thread could store the shared variable
480 	 in its own copy-in location, making the
481 	 variable no longer really shared.  */
482       if (shared_ctx->is_nested)
483 	{
484 	  omp_context *up;
485 
486 	  for (up = shared_ctx->outer; up; up = up->outer)
487 	    if ((is_taskreg_ctx (up)
488 		 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
489 		     && is_gimple_omp_offloaded (up->stmt)))
490 		&& maybe_lookup_decl (decl, up))
491 	      break;
492 
493 	  if (up)
494 	    {
495 	      tree c;
496 
497 	      if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
498 		{
499 		  for (c = gimple_omp_target_clauses (up->stmt);
500 		       c; c = OMP_CLAUSE_CHAIN (c))
501 		    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
502 			&& OMP_CLAUSE_DECL (c) == decl)
503 		      break;
504 		}
505 	      else
506 		for (c = gimple_omp_taskreg_clauses (up->stmt);
507 		     c; c = OMP_CLAUSE_CHAIN (c))
508 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
509 		      && OMP_CLAUSE_DECL (c) == decl)
510 		    break;
511 
512 	      if (c)
513 		goto maybe_mark_addressable_and_ret;
514 	    }
515 	}
516 
517       /* For tasks avoid using copy-in/out.  As tasks can be
518 	 deferred or executed in different thread, when GOMP_task
519 	 returns, the task hasn't necessarily terminated.  */
520       if (is_task_ctx (shared_ctx))
521 	{
522 	  tree outer;
523 	maybe_mark_addressable_and_ret:
524 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
525 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
526 	    {
527 	      /* Taking address of OUTER in lower_send_shared_vars
528 		 might need regimplification of everything that uses the
529 		 variable.  */
530 	      if (!task_shared_vars)
531 		task_shared_vars = BITMAP_ALLOC (NULL);
532 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
533 	      TREE_ADDRESSABLE (outer) = 1;
534 	    }
535 	  return true;
536 	}
537     }
538 
539   return false;
540 }
541 
542 /* Construct a new automatic decl similar to VAR.  */
543 
544 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)545 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
546 {
547   tree copy = copy_var_decl (var, name, type);
548 
549   DECL_CONTEXT (copy) = current_function_decl;
550   DECL_CHAIN (copy) = ctx->block_vars;
551   /* If VAR is listed in task_shared_vars, it means it wasn't
552      originally addressable and is just because task needs to take
553      it's address.  But we don't need to take address of privatizations
554      from that var.  */
555   if (TREE_ADDRESSABLE (var)
556       && ((task_shared_vars
557 	   && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
558 	  || (global_nonaddressable_vars
559 	      && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
560     TREE_ADDRESSABLE (copy) = 0;
561   ctx->block_vars = copy;
562 
563   return copy;
564 }
565 
566 static tree
omp_copy_decl_1(tree var,omp_context * ctx)567 omp_copy_decl_1 (tree var, omp_context *ctx)
568 {
569   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
570 }
571 
572 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
573    as appropriate.  */
574 static tree
omp_build_component_ref(tree obj,tree field)575 omp_build_component_ref (tree obj, tree field)
576 {
577   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
578   if (TREE_THIS_VOLATILE (field))
579     TREE_THIS_VOLATILE (ret) |= 1;
580   if (TREE_READONLY (field))
581     TREE_READONLY (ret) |= 1;
582   return ret;
583 }
584 
585 /* Build tree nodes to access the field for VAR on the receiver side.  */
586 
587 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)588 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
589 {
590   tree x, field = lookup_field (var, ctx);
591 
592   /* If the receiver record type was remapped in the child function,
593      remap the field into the new record type.  */
594   x = maybe_lookup_field (field, ctx);
595   if (x != NULL)
596     field = x;
597 
598   x = build_simple_mem_ref (ctx->receiver_decl);
599   TREE_THIS_NOTRAP (x) = 1;
600   x = omp_build_component_ref (x, field);
601   if (by_ref)
602     {
603       x = build_simple_mem_ref (x);
604       TREE_THIS_NOTRAP (x) = 1;
605     }
606 
607   return x;
608 }
609 
610 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
611    of a parallel, this is a component reference; for workshare constructs
612    this is some variable.  */
613 
614 static tree
615 build_outer_var_ref (tree var, omp_context *ctx,
616 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
617 {
618   tree x;
619   omp_context *outer = ctx->outer;
620   while (outer && gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
621     outer = outer->outer;
622 
623   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
624     x = var;
625   else if (is_variable_sized (var))
626     {
627       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
628       x = build_outer_var_ref (x, ctx, code);
629       x = build_simple_mem_ref (x);
630     }
631   else if (is_taskreg_ctx (ctx))
632     {
633       bool by_ref = use_pointer_for_field (var, NULL);
634       x = build_receiver_ref (var, by_ref, ctx);
635     }
636   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
637 	    && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
638 	   || ctx->loop_p
639 	   || (code == OMP_CLAUSE_PRIVATE
640 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
641 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
642 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
643     {
644       /* #pragma omp simd isn't a worksharing construct, and can reference
645 	 even private vars in its linear etc. clauses.
646 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
647 	 to private vars in all worksharing constructs.  */
648       x = NULL_TREE;
649       if (outer && is_taskreg_ctx (outer))
650 	x = lookup_decl (var, outer);
651       else if (outer)
652 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
653       if (x == NULL_TREE)
654 	x = var;
655     }
656   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
657     {
658       gcc_assert (outer);
659       splay_tree_node n
660 	= splay_tree_lookup (outer->field_map,
661 			     (splay_tree_key) &DECL_UID (var));
662       if (n == NULL)
663 	{
664 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
665 	    x = var;
666 	  else
667 	    x = lookup_decl (var, outer);
668 	}
669       else
670 	{
671 	  tree field = (tree) n->value;
672 	  /* If the receiver record type was remapped in the child function,
673 	     remap the field into the new record type.  */
674 	  x = maybe_lookup_field (field, outer);
675 	  if (x != NULL)
676 	    field = x;
677 
678 	  x = build_simple_mem_ref (outer->receiver_decl);
679 	  x = omp_build_component_ref (x, field);
680 	  if (use_pointer_for_field (var, outer))
681 	    x = build_simple_mem_ref (x);
682 	}
683     }
684   else if (outer)
685     {
686       if (gimple_code (outer->stmt) == GIMPLE_OMP_GRID_BODY)
687 	{
688 	  outer = outer->outer;
689 	  gcc_assert (outer
690 		      && gimple_code (outer->stmt) != GIMPLE_OMP_GRID_BODY);
691 	}
692       x = lookup_decl (var, outer);
693     }
694   else if (omp_is_reference (var))
695     /* This can happen with orphaned constructs.  If var is reference, it is
696        possible it is shared and as such valid.  */
697     x = var;
698   else if (omp_member_access_dummy_var (var))
699     x = var;
700   else
701     gcc_unreachable ();
702 
703   if (x == var)
704     {
705       tree t = omp_member_access_dummy_var (var);
706       if (t)
707 	{
708 	  x = DECL_VALUE_EXPR (var);
709 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
710 	  if (o != t)
711 	    x = unshare_and_remap (x, t, o);
712 	  else
713 	    x = unshare_expr (x);
714 	}
715     }
716 
717   if (omp_is_reference (var))
718     x = build_simple_mem_ref (x);
719 
720   return x;
721 }
722 
723 /* Build tree nodes to access the field for VAR on the sender side.  */
724 
725 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)726 build_sender_ref (splay_tree_key key, omp_context *ctx)
727 {
728   tree field = lookup_sfield (key, ctx);
729   return omp_build_component_ref (ctx->sender_decl, field);
730 }
731 
732 static tree
build_sender_ref(tree var,omp_context * ctx)733 build_sender_ref (tree var, omp_context *ctx)
734 {
735   return build_sender_ref ((splay_tree_key) var, ctx);
736 }
737 
738 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
739    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
740 
741 static void
install_var_field(tree var,bool by_ref,int mask,omp_context * ctx)742 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
743 {
744   tree field, type, sfield = NULL_TREE;
745   splay_tree_key key = (splay_tree_key) var;
746 
747   if ((mask & 16) != 0)
748     {
749       key = (splay_tree_key) &DECL_NAME (var);
750       gcc_checking_assert (key != (splay_tree_key) var);
751     }
752   if ((mask & 8) != 0)
753     {
754       key = (splay_tree_key) &DECL_UID (var);
755       gcc_checking_assert (key != (splay_tree_key) var);
756     }
757   gcc_assert ((mask & 1) == 0
758 	      || !splay_tree_lookup (ctx->field_map, key));
759   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
760 	      || !splay_tree_lookup (ctx->sfield_map, key));
761   gcc_assert ((mask & 3) == 3
762 	      || !is_gimple_omp_oacc (ctx->stmt));
763 
764   type = TREE_TYPE (var);
765   if ((mask & 16) != 0)
766     type = lang_hooks.decls.omp_array_data (var, true);
767 
768   /* Prevent redeclaring the var in the split-off function with a restrict
769      pointer type.  Note that we only clear type itself, restrict qualifiers in
770      the pointed-to type will be ignored by points-to analysis.  */
771   if (POINTER_TYPE_P (type)
772       && TYPE_RESTRICT (type))
773     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
774 
775   if (mask & 4)
776     {
777       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
778       type = build_pointer_type (build_pointer_type (type));
779     }
780   else if (by_ref)
781     type = build_pointer_type (type);
782   else if ((mask & 3) == 1 && omp_is_reference (var))
783     type = TREE_TYPE (type);
784 
785   field = build_decl (DECL_SOURCE_LOCATION (var),
786 		      FIELD_DECL, DECL_NAME (var), type);
787 
788   /* Remember what variable this field was created for.  This does have a
789      side effect of making dwarf2out ignore this member, so for helpful
790      debugging we clear it later in delete_omp_context.  */
791   DECL_ABSTRACT_ORIGIN (field) = var;
792   if ((mask & 16) == 0 && type == TREE_TYPE (var))
793     {
794       SET_DECL_ALIGN (field, DECL_ALIGN (var));
795       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
796       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
797     }
798   else
799     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
800 
801   if ((mask & 3) == 3)
802     {
803       insert_field_into_struct (ctx->record_type, field);
804       if (ctx->srecord_type)
805 	{
806 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
807 			       FIELD_DECL, DECL_NAME (var), type);
808 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
809 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
810 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
811 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
812 	  insert_field_into_struct (ctx->srecord_type, sfield);
813 	}
814     }
815   else
816     {
817       if (ctx->srecord_type == NULL_TREE)
818 	{
819 	  tree t;
820 
821 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
822 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
823 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
824 	    {
825 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
826 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
827 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
828 	      insert_field_into_struct (ctx->srecord_type, sfield);
829 	      splay_tree_insert (ctx->sfield_map,
830 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
831 				 (splay_tree_value) sfield);
832 	    }
833 	}
834       sfield = field;
835       insert_field_into_struct ((mask & 1) ? ctx->record_type
836 				: ctx->srecord_type, field);
837     }
838 
839   if (mask & 1)
840     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
841   if ((mask & 2) && ctx->sfield_map)
842     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
843 }
844 
845 static tree
install_var_local(tree var,omp_context * ctx)846 install_var_local (tree var, omp_context *ctx)
847 {
848   tree new_var = omp_copy_decl_1 (var, ctx);
849   insert_decl_map (&ctx->cb, var, new_var);
850   return new_var;
851 }
852 
853 /* Adjust the replacement for DECL in CTX for the new context.  This means
854    copying the DECL_VALUE_EXPR, and fixing up the type.  */
855 
856 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)857 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
858 {
859   tree new_decl, size;
860 
861   new_decl = lookup_decl (decl, ctx);
862 
863   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
864 
865   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
866       && DECL_HAS_VALUE_EXPR_P (decl))
867     {
868       tree ve = DECL_VALUE_EXPR (decl);
869       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
870       SET_DECL_VALUE_EXPR (new_decl, ve);
871       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
872     }
873 
874   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
875     {
876       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
877       if (size == error_mark_node)
878 	size = TYPE_SIZE (TREE_TYPE (new_decl));
879       DECL_SIZE (new_decl) = size;
880 
881       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
882       if (size == error_mark_node)
883 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
884       DECL_SIZE_UNIT (new_decl) = size;
885     }
886 }
887 
888 /* The callback for remap_decl.  Search all containing contexts for a
889    mapping of the variable; this avoids having to duplicate the splay
890    tree ahead of time.  We know a mapping doesn't already exist in the
891    given context.  Create new mappings to implement default semantics.  */
892 
893 static tree
omp_copy_decl(tree var,copy_body_data * cb)894 omp_copy_decl (tree var, copy_body_data *cb)
895 {
896   omp_context *ctx = (omp_context *) cb;
897   tree new_var;
898 
899   if (TREE_CODE (var) == LABEL_DECL)
900     {
901       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
902 	return var;
903       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
904       DECL_CONTEXT (new_var) = current_function_decl;
905       insert_decl_map (&ctx->cb, var, new_var);
906       return new_var;
907     }
908 
909   while (!is_taskreg_ctx (ctx))
910     {
911       ctx = ctx->outer;
912       if (ctx == NULL)
913 	return var;
914       new_var = maybe_lookup_decl (var, ctx);
915       if (new_var)
916 	return new_var;
917     }
918 
919   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
920     return var;
921 
922   return error_mark_node;
923 }
924 
925 /* Create a new context, with OUTER_CTX being the surrounding context.  */
926 
927 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)928 new_omp_context (gimple *stmt, omp_context *outer_ctx)
929 {
930   omp_context *ctx = XCNEW (omp_context);
931 
932   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
933 		     (splay_tree_value) ctx);
934   ctx->stmt = stmt;
935 
936   if (outer_ctx)
937     {
938       ctx->outer = outer_ctx;
939       ctx->cb = outer_ctx->cb;
940       ctx->cb.block = NULL;
941       ctx->depth = outer_ctx->depth + 1;
942     }
943   else
944     {
945       ctx->cb.src_fn = current_function_decl;
946       ctx->cb.dst_fn = current_function_decl;
947       ctx->cb.src_node = cgraph_node::get (current_function_decl);
948       gcc_checking_assert (ctx->cb.src_node);
949       ctx->cb.dst_node = ctx->cb.src_node;
950       ctx->cb.src_cfun = cfun;
951       ctx->cb.copy_decl = omp_copy_decl;
952       ctx->cb.eh_lp_nr = 0;
953       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
954       ctx->cb.adjust_array_error_bounds = true;
955       ctx->cb.dont_remap_vla_if_no_change = true;
956       ctx->depth = 1;
957     }
958 
959   ctx->cb.decl_map = new hash_map<tree, tree>;
960 
961   return ctx;
962 }
963 
964 static gimple_seq maybe_catch_exception (gimple_seq);
965 
966 /* Finalize task copyfn.  */
967 
968 static void
finalize_task_copyfn(gomp_task * task_stmt)969 finalize_task_copyfn (gomp_task *task_stmt)
970 {
971   struct function *child_cfun;
972   tree child_fn;
973   gimple_seq seq = NULL, new_seq;
974   gbind *bind;
975 
976   child_fn = gimple_omp_task_copy_fn (task_stmt);
977   if (child_fn == NULL_TREE)
978     return;
979 
980   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
981   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
982 
983   push_cfun (child_cfun);
984   bind = gimplify_body (child_fn, false);
985   gimple_seq_add_stmt (&seq, bind);
986   new_seq = maybe_catch_exception (seq);
987   if (new_seq != seq)
988     {
989       bind = gimple_build_bind (NULL, new_seq, NULL);
990       seq = NULL;
991       gimple_seq_add_stmt (&seq, bind);
992     }
993   gimple_set_body (child_fn, seq);
994   pop_cfun ();
995 
996   /* Inform the callgraph about the new function.  */
997   cgraph_node *node = cgraph_node::get_create (child_fn);
998   node->parallelized_function = 1;
999   cgraph_node::add_new_function (child_fn, false);
1000 }
1001 
1002 /* Destroy a omp_context data structures.  Called through the splay tree
1003    value delete callback.  */
1004 
1005 static void
delete_omp_context(splay_tree_value value)1006 delete_omp_context (splay_tree_value value)
1007 {
1008   omp_context *ctx = (omp_context *) value;
1009 
1010   delete ctx->cb.decl_map;
1011 
1012   if (ctx->field_map)
1013     splay_tree_delete (ctx->field_map);
1014   if (ctx->sfield_map)
1015     splay_tree_delete (ctx->sfield_map);
1016 
1017   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
1018      it produces corrupt debug information.  */
1019   if (ctx->record_type)
1020     {
1021       tree t;
1022       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1023 	DECL_ABSTRACT_ORIGIN (t) = NULL;
1024     }
1025   if (ctx->srecord_type)
1026     {
1027       tree t;
1028       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1029 	DECL_ABSTRACT_ORIGIN (t) = NULL;
1030     }
1031 
1032   if (ctx->task_reduction_map)
1033     {
1034       ctx->task_reductions.release ();
1035       delete ctx->task_reduction_map;
1036     }
1037 
1038   delete ctx->lastprivate_conditional_map;
1039 
1040   XDELETE (ctx);
1041 }
1042 
1043 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1044    context.  */
1045 
1046 static void
fixup_child_record_type(omp_context * ctx)1047 fixup_child_record_type (omp_context *ctx)
1048 {
1049   tree f, type = ctx->record_type;
1050 
1051   if (!ctx->receiver_decl)
1052     return;
1053   /* ??? It isn't sufficient to just call remap_type here, because
1054      variably_modified_type_p doesn't work the way we expect for
1055      record types.  Testing each field for whether it needs remapping
1056      and creating a new record by hand works, however.  */
1057   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1058     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1059       break;
1060   if (f)
1061     {
1062       tree name, new_fields = NULL;
1063 
1064       type = lang_hooks.types.make_type (RECORD_TYPE);
1065       name = DECL_NAME (TYPE_NAME (ctx->record_type));
1066       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1067 			 TYPE_DECL, name, type);
1068       TYPE_NAME (type) = name;
1069 
1070       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1071 	{
1072 	  tree new_f = copy_node (f);
1073 	  DECL_CONTEXT (new_f) = type;
1074 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1075 	  DECL_CHAIN (new_f) = new_fields;
1076 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1077 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1078 		     &ctx->cb, NULL);
1079 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1080 		     &ctx->cb, NULL);
1081 	  new_fields = new_f;
1082 
1083 	  /* Arrange to be able to look up the receiver field
1084 	     given the sender field.  */
1085 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1086 			     (splay_tree_value) new_f);
1087 	}
1088       TYPE_FIELDS (type) = nreverse (new_fields);
1089       layout_type (type);
1090     }
1091 
1092   /* In a target region we never modify any of the pointers in *.omp_data_i,
1093      so attempt to help the optimizers.  */
1094   if (is_gimple_omp_offloaded (ctx->stmt))
1095     type = build_qualified_type (type, TYPE_QUAL_CONST);
1096 
1097   TREE_TYPE (ctx->receiver_decl)
1098     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1099 }
1100 
1101 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1102    specified by CLAUSES.  */
1103 
1104 static void
scan_sharing_clauses(tree clauses,omp_context * ctx)1105 scan_sharing_clauses (tree clauses, omp_context *ctx)
1106 {
1107   tree c, decl;
1108   bool scan_array_reductions = false;
1109 
1110   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1111     {
1112       bool by_ref;
1113 
1114       switch (OMP_CLAUSE_CODE (c))
1115 	{
1116 	case OMP_CLAUSE_PRIVATE:
1117 	  decl = OMP_CLAUSE_DECL (c);
1118 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1119 	    goto do_private;
1120 	  else if (!is_variable_sized (decl))
1121 	    install_var_local (decl, ctx);
1122 	  break;
1123 
1124 	case OMP_CLAUSE_SHARED:
1125 	  decl = OMP_CLAUSE_DECL (c);
1126 	  /* Ignore shared directives in teams construct inside of
1127 	     target construct.  */
1128 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1129 	      && !is_host_teams_ctx (ctx))
1130 	    {
1131 	      /* Global variables don't need to be copied,
1132 		 the receiver side will use them directly.  */
1133 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1134 	      if (is_global_var (odecl))
1135 		break;
1136 	      insert_decl_map (&ctx->cb, decl, odecl);
1137 	      break;
1138 	    }
1139 	  gcc_assert (is_taskreg_ctx (ctx));
1140 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1141 		      || !is_variable_sized (decl));
1142 	  /* Global variables don't need to be copied,
1143 	     the receiver side will use them directly.  */
1144 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1145 	    break;
1146 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1147 	    {
1148 	      use_pointer_for_field (decl, ctx);
1149 	      break;
1150 	    }
1151 	  by_ref = use_pointer_for_field (decl, NULL);
1152 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1153 	      || TREE_ADDRESSABLE (decl)
1154 	      || by_ref
1155 	      || omp_is_reference (decl))
1156 	    {
1157 	      by_ref = use_pointer_for_field (decl, ctx);
1158 	      install_var_field (decl, by_ref, 3, ctx);
1159 	      install_var_local (decl, ctx);
1160 	      break;
1161 	    }
1162 	  /* We don't need to copy const scalar vars back.  */
1163 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1164 	  goto do_private;
1165 
1166 	case OMP_CLAUSE_REDUCTION:
1167 	  /* Collect 'reduction' clauses on OpenACC compute construct.  */
1168 	  if (is_gimple_omp_oacc (ctx->stmt)
1169 	      && is_gimple_omp_offloaded (ctx->stmt))
1170 	    {
1171 	      /* No 'reduction' clauses on OpenACC 'kernels'.  */
1172 	      gcc_checking_assert (!is_oacc_kernels (ctx));
1173 
1174 	      ctx->local_reduction_clauses
1175 		= tree_cons (NULL, c, ctx->local_reduction_clauses);
1176 	    }
1177 	  /* FALLTHRU */
1178 
1179 	case OMP_CLAUSE_IN_REDUCTION:
1180 	  decl = OMP_CLAUSE_DECL (c);
1181 	  if (TREE_CODE (decl) == MEM_REF)
1182 	    {
1183 	      tree t = TREE_OPERAND (decl, 0);
1184 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1185 		t = TREE_OPERAND (t, 0);
1186 	      if (TREE_CODE (t) == INDIRECT_REF
1187 		  || TREE_CODE (t) == ADDR_EXPR)
1188 		t = TREE_OPERAND (t, 0);
1189 	      install_var_local (t, ctx);
1190 	      if (is_taskreg_ctx (ctx)
1191 		  && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1192 		      || (is_task_ctx (ctx)
1193 			  && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1194 			      || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1195 				  && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1196 				      == POINTER_TYPE)))))
1197 		  && !is_variable_sized (t)
1198 		  && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1199 		      || (!OMP_CLAUSE_REDUCTION_TASK (c)
1200 			  && !is_task_ctx (ctx))))
1201 		{
1202 		  by_ref = use_pointer_for_field (t, NULL);
1203 		  if (is_task_ctx (ctx)
1204 		      && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1205 		      && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1206 		    {
1207 		      install_var_field (t, false, 1, ctx);
1208 		      install_var_field (t, by_ref, 2, ctx);
1209 		    }
1210 		  else
1211 		    install_var_field (t, by_ref, 3, ctx);
1212 		}
1213 	      break;
1214 	    }
1215 	  if (is_task_ctx (ctx)
1216 	      || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1217 		  && OMP_CLAUSE_REDUCTION_TASK (c)
1218 		  && is_parallel_ctx (ctx)))
1219 	    {
1220 	      /* Global variables don't need to be copied,
1221 		 the receiver side will use them directly.  */
1222 	      if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1223 		{
1224 		  by_ref = use_pointer_for_field (decl, ctx);
1225 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1226 		    install_var_field (decl, by_ref, 3, ctx);
1227 		}
1228 	      install_var_local (decl, ctx);
1229 	      break;
1230 	    }
1231 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1232 	      && OMP_CLAUSE_REDUCTION_TASK (c))
1233 	    {
1234 	      install_var_local (decl, ctx);
1235 	      break;
1236 	    }
1237 	  goto do_private;
1238 
1239 	case OMP_CLAUSE_LASTPRIVATE:
1240 	  /* Let the corresponding firstprivate clause create
1241 	     the variable.  */
1242 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1243 	    break;
1244 	  /* FALLTHRU */
1245 
1246 	case OMP_CLAUSE_FIRSTPRIVATE:
1247 	case OMP_CLAUSE_LINEAR:
1248 	  decl = OMP_CLAUSE_DECL (c);
1249 	do_private:
1250 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1251 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1252 	      && is_gimple_omp_offloaded (ctx->stmt))
1253 	    {
1254 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1255 		install_var_field (decl, !omp_is_reference (decl), 3, ctx);
1256 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1257 		install_var_field (decl, true, 3, ctx);
1258 	      else
1259 		install_var_field (decl, false, 3, ctx);
1260 	    }
1261 	  if (is_variable_sized (decl))
1262 	    {
1263 	      if (is_task_ctx (ctx))
1264 		install_var_field (decl, false, 1, ctx);
1265 	      break;
1266 	    }
1267 	  else if (is_taskreg_ctx (ctx))
1268 	    {
1269 	      bool global
1270 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1271 	      by_ref = use_pointer_for_field (decl, NULL);
1272 
1273 	      if (is_task_ctx (ctx)
1274 		  && (global || by_ref || omp_is_reference (decl)))
1275 		{
1276 		  install_var_field (decl, false, 1, ctx);
1277 		  if (!global)
1278 		    install_var_field (decl, by_ref, 2, ctx);
1279 		}
1280 	      else if (!global)
1281 		install_var_field (decl, by_ref, 3, ctx);
1282 	    }
1283 	  install_var_local (decl, ctx);
1284 	  break;
1285 
1286 	case OMP_CLAUSE_USE_DEVICE_PTR:
1287 	case OMP_CLAUSE_USE_DEVICE_ADDR:
1288 	  decl = OMP_CLAUSE_DECL (c);
1289 
1290 	  /* Fortran array descriptors.  */
1291 	  if (lang_hooks.decls.omp_array_data (decl, true))
1292 	    install_var_field (decl, false, 19, ctx);
1293 	  else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1294 		    && !omp_is_reference (decl)
1295 		    && !omp_is_allocatable_or_ptr (decl))
1296 		   || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1297 	    install_var_field (decl, true, 11, ctx);
1298 	  else
1299 	    install_var_field (decl, false, 11, ctx);
1300 	  if (DECL_SIZE (decl)
1301 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1302 	    {
1303 	      tree decl2 = DECL_VALUE_EXPR (decl);
1304 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1305 	      decl2 = TREE_OPERAND (decl2, 0);
1306 	      gcc_assert (DECL_P (decl2));
1307 	      install_var_local (decl2, ctx);
1308 	    }
1309 	  install_var_local (decl, ctx);
1310 	  break;
1311 
1312 	case OMP_CLAUSE_IS_DEVICE_PTR:
1313 	  decl = OMP_CLAUSE_DECL (c);
1314 	  goto do_private;
1315 
1316 	case OMP_CLAUSE__LOOPTEMP_:
1317 	case OMP_CLAUSE__REDUCTEMP_:
1318 	  gcc_assert (is_taskreg_ctx (ctx));
1319 	  decl = OMP_CLAUSE_DECL (c);
1320 	  install_var_field (decl, false, 3, ctx);
1321 	  install_var_local (decl, ctx);
1322 	  break;
1323 
1324 	case OMP_CLAUSE_COPYPRIVATE:
1325 	case OMP_CLAUSE_COPYIN:
1326 	  decl = OMP_CLAUSE_DECL (c);
1327 	  by_ref = use_pointer_for_field (decl, NULL);
1328 	  install_var_field (decl, by_ref, 3, ctx);
1329 	  break;
1330 
1331 	case OMP_CLAUSE_FINAL:
1332 	case OMP_CLAUSE_IF:
1333 	case OMP_CLAUSE_NUM_THREADS:
1334 	case OMP_CLAUSE_NUM_TEAMS:
1335 	case OMP_CLAUSE_THREAD_LIMIT:
1336 	case OMP_CLAUSE_DEVICE:
1337 	case OMP_CLAUSE_SCHEDULE:
1338 	case OMP_CLAUSE_DIST_SCHEDULE:
1339 	case OMP_CLAUSE_DEPEND:
1340 	case OMP_CLAUSE_PRIORITY:
1341 	case OMP_CLAUSE_GRAINSIZE:
1342 	case OMP_CLAUSE_NUM_TASKS:
1343 	case OMP_CLAUSE_NUM_GANGS:
1344 	case OMP_CLAUSE_NUM_WORKERS:
1345 	case OMP_CLAUSE_VECTOR_LENGTH:
1346 	  if (ctx->outer)
1347 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1348 	  break;
1349 
1350 	case OMP_CLAUSE_TO:
1351 	case OMP_CLAUSE_FROM:
1352 	case OMP_CLAUSE_MAP:
1353 	  if (ctx->outer)
1354 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1355 	  decl = OMP_CLAUSE_DECL (c);
1356 	  /* Global variables with "omp declare target" attribute
1357 	     don't need to be copied, the receiver side will use them
1358 	     directly.  However, global variables with "omp declare target link"
1359 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1360 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1361 	      && DECL_P (decl)
1362 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1363 		   && (OMP_CLAUSE_MAP_KIND (c)
1364 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1365 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1366 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1367 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1368 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1369 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1370 	      && varpool_node::get_create (decl)->offloadable
1371 	      && !lookup_attribute ("omp declare target link",
1372 				    DECL_ATTRIBUTES (decl)))
1373 	    break;
1374 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1375 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1376 	    {
1377 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1378 		 not offloaded; there is nothing to map for those.  */
1379 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1380 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1381 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1382 		break;
1383 	    }
1384 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1385 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1386 		  || (OMP_CLAUSE_MAP_KIND (c)
1387 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1388 	    {
1389 	      if (TREE_CODE (decl) == COMPONENT_REF
1390 		  || (TREE_CODE (decl) == INDIRECT_REF
1391 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1392 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1393 			  == REFERENCE_TYPE)))
1394 		break;
1395 	      if (DECL_SIZE (decl)
1396 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1397 		{
1398 		  tree decl2 = DECL_VALUE_EXPR (decl);
1399 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1400 		  decl2 = TREE_OPERAND (decl2, 0);
1401 		  gcc_assert (DECL_P (decl2));
1402 		  install_var_local (decl2, ctx);
1403 		}
1404 	      install_var_local (decl, ctx);
1405 	      break;
1406 	    }
1407 	  if (DECL_P (decl))
1408 	    {
1409 	      if (DECL_SIZE (decl)
1410 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1411 		{
1412 		  tree decl2 = DECL_VALUE_EXPR (decl);
1413 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1414 		  decl2 = TREE_OPERAND (decl2, 0);
1415 		  gcc_assert (DECL_P (decl2));
1416 		  install_var_field (decl2, true, 3, ctx);
1417 		  install_var_local (decl2, ctx);
1418 		  install_var_local (decl, ctx);
1419 		}
1420 	      else
1421 		{
1422 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1423 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1424 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1425 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1426 		    install_var_field (decl, true, 7, ctx);
1427 		  else
1428 		    install_var_field (decl, true, 3, ctx);
1429 		  if (is_gimple_omp_offloaded (ctx->stmt)
1430 		      && !OMP_CLAUSE_MAP_IN_REDUCTION (c))
1431 		    install_var_local (decl, ctx);
1432 		}
1433 	    }
1434 	  else
1435 	    {
1436 	      tree base = get_base_address (decl);
1437 	      tree nc = OMP_CLAUSE_CHAIN (c);
1438 	      if (DECL_P (base)
1439 		  && nc != NULL_TREE
1440 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1441 		  && OMP_CLAUSE_DECL (nc) == base
1442 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1443 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1444 		{
1445 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1446 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1447 		}
1448 	      else
1449 		{
1450 		  if (ctx->outer)
1451 		    {
1452 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1453 		      decl = OMP_CLAUSE_DECL (c);
1454 		    }
1455 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1456 						  (splay_tree_key) decl));
1457 		  tree field
1458 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1459 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1460 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1461 		  insert_field_into_struct (ctx->record_type, field);
1462 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1463 				     (splay_tree_value) field);
1464 		}
1465 	    }
1466 	  break;
1467 
1468 	case OMP_CLAUSE__GRIDDIM_:
1469 	  if (ctx->outer)
1470 	    {
1471 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__SIZE (c), ctx->outer);
1472 	      scan_omp_op (&OMP_CLAUSE__GRIDDIM__GROUP (c), ctx->outer);
1473 	    }
1474 	  break;
1475 
1476 	case OMP_CLAUSE_ORDER:
1477 	  ctx->order_concurrent = true;
1478 	  break;
1479 
1480 	case OMP_CLAUSE_BIND:
1481 	  ctx->loop_p = true;
1482 	  break;
1483 
1484 	case OMP_CLAUSE_NOWAIT:
1485 	case OMP_CLAUSE_ORDERED:
1486 	case OMP_CLAUSE_COLLAPSE:
1487 	case OMP_CLAUSE_UNTIED:
1488 	case OMP_CLAUSE_MERGEABLE:
1489 	case OMP_CLAUSE_PROC_BIND:
1490 	case OMP_CLAUSE_SAFELEN:
1491 	case OMP_CLAUSE_SIMDLEN:
1492 	case OMP_CLAUSE_THREADS:
1493 	case OMP_CLAUSE_SIMD:
1494 	case OMP_CLAUSE_NOGROUP:
1495 	case OMP_CLAUSE_DEFAULTMAP:
1496 	case OMP_CLAUSE_ASYNC:
1497 	case OMP_CLAUSE_WAIT:
1498 	case OMP_CLAUSE_GANG:
1499 	case OMP_CLAUSE_WORKER:
1500 	case OMP_CLAUSE_VECTOR:
1501 	case OMP_CLAUSE_INDEPENDENT:
1502 	case OMP_CLAUSE_AUTO:
1503 	case OMP_CLAUSE_SEQ:
1504 	case OMP_CLAUSE_TILE:
1505 	case OMP_CLAUSE__SIMT_:
1506 	case OMP_CLAUSE_DEFAULT:
1507 	case OMP_CLAUSE_NONTEMPORAL:
1508 	case OMP_CLAUSE_IF_PRESENT:
1509 	case OMP_CLAUSE_FINALIZE:
1510 	case OMP_CLAUSE_TASK_REDUCTION:
1511 	  break;
1512 
1513 	case OMP_CLAUSE_ALIGNED:
1514 	  decl = OMP_CLAUSE_DECL (c);
1515 	  if (is_global_var (decl)
1516 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1517 	    install_var_local (decl, ctx);
1518 	  break;
1519 
1520 	case OMP_CLAUSE__CONDTEMP_:
1521 	  decl = OMP_CLAUSE_DECL (c);
1522 	  if (is_parallel_ctx (ctx))
1523 	    {
1524 	      install_var_field (decl, false, 3, ctx);
1525 	      install_var_local (decl, ctx);
1526 	    }
1527 	  else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1528 		   && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1529 		   && !OMP_CLAUSE__CONDTEMP__ITER (c))
1530 	    install_var_local (decl, ctx);
1531 	  break;
1532 
1533 	case OMP_CLAUSE__CACHE_:
1534 	default:
1535 	  gcc_unreachable ();
1536 	}
1537     }
1538 
1539   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1540     {
1541       switch (OMP_CLAUSE_CODE (c))
1542 	{
1543 	case OMP_CLAUSE_LASTPRIVATE:
1544 	  /* Let the corresponding firstprivate clause create
1545 	     the variable.  */
1546 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1547 	    scan_array_reductions = true;
1548 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1549 	    break;
1550 	  /* FALLTHRU */
1551 
1552 	case OMP_CLAUSE_FIRSTPRIVATE:
1553 	case OMP_CLAUSE_PRIVATE:
1554 	case OMP_CLAUSE_LINEAR:
1555 	case OMP_CLAUSE_IS_DEVICE_PTR:
1556 	  decl = OMP_CLAUSE_DECL (c);
1557 	  if (is_variable_sized (decl))
1558 	    {
1559 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1560 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1561 		  && is_gimple_omp_offloaded (ctx->stmt))
1562 		{
1563 		  tree decl2 = DECL_VALUE_EXPR (decl);
1564 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1565 		  decl2 = TREE_OPERAND (decl2, 0);
1566 		  gcc_assert (DECL_P (decl2));
1567 		  install_var_local (decl2, ctx);
1568 		  fixup_remapped_decl (decl2, ctx, false);
1569 		}
1570 	      install_var_local (decl, ctx);
1571 	    }
1572 	  fixup_remapped_decl (decl, ctx,
1573 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1574 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1575 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1576 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1577 	    scan_array_reductions = true;
1578 	  break;
1579 
1580 	case OMP_CLAUSE_REDUCTION:
1581 	case OMP_CLAUSE_IN_REDUCTION:
1582 	  decl = OMP_CLAUSE_DECL (c);
1583 	  if (TREE_CODE (decl) != MEM_REF)
1584 	    {
1585 	      if (is_variable_sized (decl))
1586 		install_var_local (decl, ctx);
1587 	      fixup_remapped_decl (decl, ctx, false);
1588 	    }
1589 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1590 	    scan_array_reductions = true;
1591 	  break;
1592 
1593 	case OMP_CLAUSE_TASK_REDUCTION:
1594 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1595 	    scan_array_reductions = true;
1596 	  break;
1597 
1598 	case OMP_CLAUSE_SHARED:
1599 	  /* Ignore shared directives in teams construct inside of
1600 	     target construct.  */
1601 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1602 	      && !is_host_teams_ctx (ctx))
1603 	    break;
1604 	  decl = OMP_CLAUSE_DECL (c);
1605 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1606 	    break;
1607 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1608 	    {
1609 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1610 								 ctx->outer)))
1611 		break;
1612 	      bool by_ref = use_pointer_for_field (decl, ctx);
1613 	      install_var_field (decl, by_ref, 11, ctx);
1614 	      break;
1615 	    }
1616 	  fixup_remapped_decl (decl, ctx, false);
1617 	  break;
1618 
1619 	case OMP_CLAUSE_MAP:
1620 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1621 	    break;
1622 	  decl = OMP_CLAUSE_DECL (c);
1623 	  if (DECL_P (decl)
1624 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1625 		   && (OMP_CLAUSE_MAP_KIND (c)
1626 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1627 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1628 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1629 	      && varpool_node::get_create (decl)->offloadable)
1630 	    break;
1631 	  if (DECL_P (decl))
1632 	    {
1633 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1634 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1635 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1636 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1637 		{
1638 		  tree new_decl = lookup_decl (decl, ctx);
1639 		  TREE_TYPE (new_decl)
1640 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1641 		}
1642 	      else if (DECL_SIZE (decl)
1643 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1644 		{
1645 		  tree decl2 = DECL_VALUE_EXPR (decl);
1646 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1647 		  decl2 = TREE_OPERAND (decl2, 0);
1648 		  gcc_assert (DECL_P (decl2));
1649 		  fixup_remapped_decl (decl2, ctx, false);
1650 		  fixup_remapped_decl (decl, ctx, true);
1651 		}
1652 	      else
1653 		fixup_remapped_decl (decl, ctx, false);
1654 	    }
1655 	  break;
1656 
1657 	case OMP_CLAUSE_COPYPRIVATE:
1658 	case OMP_CLAUSE_COPYIN:
1659 	case OMP_CLAUSE_DEFAULT:
1660 	case OMP_CLAUSE_IF:
1661 	case OMP_CLAUSE_NUM_THREADS:
1662 	case OMP_CLAUSE_NUM_TEAMS:
1663 	case OMP_CLAUSE_THREAD_LIMIT:
1664 	case OMP_CLAUSE_DEVICE:
1665 	case OMP_CLAUSE_SCHEDULE:
1666 	case OMP_CLAUSE_DIST_SCHEDULE:
1667 	case OMP_CLAUSE_NOWAIT:
1668 	case OMP_CLAUSE_ORDERED:
1669 	case OMP_CLAUSE_COLLAPSE:
1670 	case OMP_CLAUSE_UNTIED:
1671 	case OMP_CLAUSE_FINAL:
1672 	case OMP_CLAUSE_MERGEABLE:
1673 	case OMP_CLAUSE_PROC_BIND:
1674 	case OMP_CLAUSE_SAFELEN:
1675 	case OMP_CLAUSE_SIMDLEN:
1676 	case OMP_CLAUSE_ALIGNED:
1677 	case OMP_CLAUSE_DEPEND:
1678 	case OMP_CLAUSE__LOOPTEMP_:
1679 	case OMP_CLAUSE__REDUCTEMP_:
1680 	case OMP_CLAUSE_TO:
1681 	case OMP_CLAUSE_FROM:
1682 	case OMP_CLAUSE_PRIORITY:
1683 	case OMP_CLAUSE_GRAINSIZE:
1684 	case OMP_CLAUSE_NUM_TASKS:
1685 	case OMP_CLAUSE_THREADS:
1686 	case OMP_CLAUSE_SIMD:
1687 	case OMP_CLAUSE_NOGROUP:
1688 	case OMP_CLAUSE_DEFAULTMAP:
1689 	case OMP_CLAUSE_ORDER:
1690 	case OMP_CLAUSE_BIND:
1691 	case OMP_CLAUSE_USE_DEVICE_PTR:
1692 	case OMP_CLAUSE_USE_DEVICE_ADDR:
1693 	case OMP_CLAUSE_NONTEMPORAL:
1694 	case OMP_CLAUSE_ASYNC:
1695 	case OMP_CLAUSE_WAIT:
1696 	case OMP_CLAUSE_NUM_GANGS:
1697 	case OMP_CLAUSE_NUM_WORKERS:
1698 	case OMP_CLAUSE_VECTOR_LENGTH:
1699 	case OMP_CLAUSE_GANG:
1700 	case OMP_CLAUSE_WORKER:
1701 	case OMP_CLAUSE_VECTOR:
1702 	case OMP_CLAUSE_INDEPENDENT:
1703 	case OMP_CLAUSE_AUTO:
1704 	case OMP_CLAUSE_SEQ:
1705 	case OMP_CLAUSE_TILE:
1706 	case OMP_CLAUSE__GRIDDIM_:
1707 	case OMP_CLAUSE__SIMT_:
1708 	case OMP_CLAUSE_IF_PRESENT:
1709 	case OMP_CLAUSE_FINALIZE:
1710 	case OMP_CLAUSE__CONDTEMP_:
1711 	  break;
1712 
1713 	case OMP_CLAUSE__CACHE_:
1714 	default:
1715 	  gcc_unreachable ();
1716 	}
1717     }
1718 
1719   gcc_checking_assert (!scan_array_reductions
1720 		       || !is_gimple_omp_oacc (ctx->stmt));
1721   if (scan_array_reductions)
1722     {
1723       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1724 	if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1725 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1726 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1727 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1728 	  {
1729 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), ctx);
1730 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
1731 	  }
1732 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1733 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1734 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1735 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1736 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1737 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1738     }
1739 }
1740 
1741 /* Create a new name for omp child function.  Returns an identifier. */
1742 
1743 static tree
create_omp_child_function_name(bool task_copy)1744 create_omp_child_function_name (bool task_copy)
1745 {
1746   return clone_function_name_numbered (current_function_decl,
1747 				       task_copy ? "_omp_cpyfn" : "_omp_fn");
1748 }
1749 
1750 /* Return true if CTX may belong to offloaded code: either if current function
1751    is offloaded, or any enclosing context corresponds to a target region.  */
1752 
1753 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1754 omp_maybe_offloaded_ctx (omp_context *ctx)
1755 {
1756   if (cgraph_node::get (current_function_decl)->offloadable)
1757     return true;
1758   for (; ctx; ctx = ctx->outer)
1759     if (is_gimple_omp_offloaded (ctx->stmt))
1760       return true;
1761   return false;
1762 }
1763 
1764 /* Build a decl for the omp child function.  It'll not contain a body
1765    yet, just the bare decl.  */
1766 
1767 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1768 create_omp_child_function (omp_context *ctx, bool task_copy)
1769 {
1770   tree decl, type, name, t;
1771 
1772   name = create_omp_child_function_name (task_copy);
1773   if (task_copy)
1774     type = build_function_type_list (void_type_node, ptr_type_node,
1775 				     ptr_type_node, NULL_TREE);
1776   else
1777     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1778 
1779   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1780 
1781   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1782 		       || !task_copy);
1783   if (!task_copy)
1784     ctx->cb.dst_fn = decl;
1785   else
1786     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1787 
1788   TREE_STATIC (decl) = 1;
1789   TREE_USED (decl) = 1;
1790   DECL_ARTIFICIAL (decl) = 1;
1791   DECL_IGNORED_P (decl) = 0;
1792   TREE_PUBLIC (decl) = 0;
1793   DECL_UNINLINABLE (decl) = 1;
1794   DECL_EXTERNAL (decl) = 0;
1795   DECL_CONTEXT (decl) = NULL_TREE;
1796   DECL_INITIAL (decl) = make_node (BLOCK);
1797   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1798   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1799   /* Remove omp declare simd attribute from the new attributes.  */
1800   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1801     {
1802       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1803 	a = a2;
1804       a = TREE_CHAIN (a);
1805       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1806 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1807 	  *p = TREE_CHAIN (*p);
1808 	else
1809 	  {
1810 	    tree chain = TREE_CHAIN (*p);
1811 	    *p = copy_node (*p);
1812 	    p = &TREE_CHAIN (*p);
1813 	    *p = chain;
1814 	  }
1815     }
1816   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
1817     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
1818   DECL_FUNCTION_SPECIFIC_TARGET (decl)
1819     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
1820   DECL_FUNCTION_VERSIONED (decl)
1821     = DECL_FUNCTION_VERSIONED (current_function_decl);
1822 
1823   if (omp_maybe_offloaded_ctx (ctx))
1824     {
1825       cgraph_node::get_create (decl)->offloadable = 1;
1826       if (ENABLE_OFFLOADING)
1827 	g->have_offload = true;
1828     }
1829 
1830   if (cgraph_node::get_create (decl)->offloadable
1831       && !lookup_attribute ("omp declare target",
1832                            DECL_ATTRIBUTES (current_function_decl)))
1833     {
1834       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
1835 				 ? "omp target entrypoint"
1836 				 : "omp declare target");
1837       DECL_ATTRIBUTES (decl)
1838 	= tree_cons (get_identifier (target_attr),
1839 		     NULL_TREE, DECL_ATTRIBUTES (decl));
1840     }
1841 
1842   t = build_decl (DECL_SOURCE_LOCATION (decl),
1843 		  RESULT_DECL, NULL_TREE, void_type_node);
1844   DECL_ARTIFICIAL (t) = 1;
1845   DECL_IGNORED_P (t) = 1;
1846   DECL_CONTEXT (t) = decl;
1847   DECL_RESULT (decl) = t;
1848 
1849   tree data_name = get_identifier (".omp_data_i");
1850   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
1851 		  ptr_type_node);
1852   DECL_ARTIFICIAL (t) = 1;
1853   DECL_NAMELESS (t) = 1;
1854   DECL_ARG_TYPE (t) = ptr_type_node;
1855   DECL_CONTEXT (t) = current_function_decl;
1856   TREE_USED (t) = 1;
1857   TREE_READONLY (t) = 1;
1858   DECL_ARGUMENTS (decl) = t;
1859   if (!task_copy)
1860     ctx->receiver_decl = t;
1861   else
1862     {
1863       t = build_decl (DECL_SOURCE_LOCATION (decl),
1864 		      PARM_DECL, get_identifier (".omp_data_o"),
1865 		      ptr_type_node);
1866       DECL_ARTIFICIAL (t) = 1;
1867       DECL_NAMELESS (t) = 1;
1868       DECL_ARG_TYPE (t) = ptr_type_node;
1869       DECL_CONTEXT (t) = current_function_decl;
1870       TREE_USED (t) = 1;
1871       TREE_ADDRESSABLE (t) = 1;
1872       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
1873       DECL_ARGUMENTS (decl) = t;
1874     }
1875 
1876   /* Allocate memory for the function structure.  The call to
1877      allocate_struct_function clobbers CFUN, so we need to restore
1878      it afterward.  */
1879   push_struct_function (decl);
1880   cfun->function_end_locus = gimple_location (ctx->stmt);
1881   init_tree_ssa (cfun);
1882   pop_cfun ();
1883 }
1884 
1885 /* Callback for walk_gimple_seq.  Check if combined parallel
1886    contains gimple_omp_for_combined_into_p OMP_FOR.  */
1887 
1888 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1889 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
1890 		       bool *handled_ops_p,
1891 		       struct walk_stmt_info *wi)
1892 {
1893   gimple *stmt = gsi_stmt (*gsi_p);
1894 
1895   *handled_ops_p = true;
1896   switch (gimple_code (stmt))
1897     {
1898     WALK_SUBSTMTS;
1899 
1900     case GIMPLE_OMP_FOR:
1901       if (gimple_omp_for_combined_into_p (stmt)
1902 	  && gimple_omp_for_kind (stmt)
1903 	     == *(const enum gf_mask *) (wi->info))
1904 	{
1905 	  wi->info = stmt;
1906 	  return integer_zero_node;
1907 	}
1908       break;
1909     default:
1910       break;
1911     }
1912   return NULL;
1913 }
1914 
1915 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task.  */
1916 
1917 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)1918 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
1919 			      omp_context *outer_ctx)
1920 {
1921   struct walk_stmt_info wi;
1922 
1923   memset (&wi, 0, sizeof (wi));
1924   wi.val_only = true;
1925   wi.info = (void *) &msk;
1926   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
1927   if (wi.info != (void *) &msk)
1928     {
1929       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
1930       struct omp_for_data fd;
1931       omp_extract_for_data (for_stmt, &fd, NULL);
1932       /* We need two temporaries with fd.loop.v type (istart/iend)
1933 	 and then (fd.collapse - 1) temporaries with the same
1934 	 type for count2 ... countN-1 vars if not constant.  */
1935       size_t count = 2, i;
1936       tree type = fd.iter_type;
1937       if (fd.collapse > 1
1938 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
1939 	{
1940 	  count += fd.collapse - 1;
1941 	  /* If there are lastprivate clauses on the inner
1942 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
1943 	     of iterations (product of count1 ... countN-1).  */
1944 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
1945 			       OMP_CLAUSE_LASTPRIVATE))
1946 	    count++;
1947 	  else if (msk == GF_OMP_FOR_KIND_FOR
1948 		   && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1949 				       OMP_CLAUSE_LASTPRIVATE))
1950 	    count++;
1951 	}
1952       for (i = 0; i < count; i++)
1953 	{
1954 	  tree temp = create_tmp_var (type);
1955 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
1956 	  insert_decl_map (&outer_ctx->cb, temp, temp);
1957 	  OMP_CLAUSE_DECL (c) = temp;
1958 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
1959 	  gimple_omp_taskreg_set_clauses (stmt, c);
1960 	}
1961     }
1962   if (msk == GF_OMP_FOR_KIND_TASKLOOP
1963       && omp_find_clause (gimple_omp_task_clauses (stmt),
1964 			  OMP_CLAUSE_REDUCTION))
1965     {
1966       tree type = build_pointer_type (pointer_sized_int_node);
1967       tree temp = create_tmp_var (type);
1968       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
1969       insert_decl_map (&outer_ctx->cb, temp, temp);
1970       OMP_CLAUSE_DECL (c) = temp;
1971       OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
1972       gimple_omp_task_set_clauses (stmt, c);
1973     }
1974 }
1975 
1976 /* Scan an OpenMP parallel directive.  */
1977 
1978 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)1979 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
1980 {
1981   omp_context *ctx;
1982   tree name;
1983   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
1984 
1985   /* Ignore parallel directives with empty bodies, unless there
1986      are copyin clauses.  */
1987   if (optimize > 0
1988       && empty_body_p (gimple_omp_body (stmt))
1989       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
1990 			  OMP_CLAUSE_COPYIN) == NULL)
1991     {
1992       gsi_replace (gsi, gimple_build_nop (), false);
1993       return;
1994     }
1995 
1996   if (gimple_omp_parallel_combined_p (stmt))
1997     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
1998   for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
1999 				 OMP_CLAUSE_REDUCTION);
2000        c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2001     if (OMP_CLAUSE_REDUCTION_TASK (c))
2002       {
2003 	tree type = build_pointer_type (pointer_sized_int_node);
2004 	tree temp = create_tmp_var (type);
2005 	tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2006 	if (outer_ctx)
2007 	  insert_decl_map (&outer_ctx->cb, temp, temp);
2008 	OMP_CLAUSE_DECL (c) = temp;
2009 	OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2010 	gimple_omp_parallel_set_clauses (stmt, c);
2011 	break;
2012       }
2013     else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2014       break;
2015 
2016   ctx = new_omp_context (stmt, outer_ctx);
2017   taskreg_contexts.safe_push (ctx);
2018   if (taskreg_nesting_level > 1)
2019     ctx->is_nested = true;
2020   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2021   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2022   name = create_tmp_var_name (".omp_data_s");
2023   name = build_decl (gimple_location (stmt),
2024 		     TYPE_DECL, name, ctx->record_type);
2025   DECL_ARTIFICIAL (name) = 1;
2026   DECL_NAMELESS (name) = 1;
2027   TYPE_NAME (ctx->record_type) = name;
2028   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2029   if (!gimple_omp_parallel_grid_phony (stmt))
2030     {
2031       create_omp_child_function (ctx, false);
2032       gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2033     }
2034 
2035   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2036   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2037 
2038   if (TYPE_FIELDS (ctx->record_type) == NULL)
2039     ctx->record_type = ctx->receiver_decl = NULL;
2040 }
2041 
2042 /* Scan an OpenMP task directive.  */
2043 
2044 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2045 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2046 {
2047   omp_context *ctx;
2048   tree name, t;
2049   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2050 
2051   /* Ignore task directives with empty bodies, unless they have depend
2052      clause.  */
2053   if (optimize > 0
2054       && gimple_omp_body (stmt)
2055       && empty_body_p (gimple_omp_body (stmt))
2056       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2057     {
2058       gsi_replace (gsi, gimple_build_nop (), false);
2059       return;
2060     }
2061 
2062   if (gimple_omp_task_taskloop_p (stmt))
2063     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2064 
2065   ctx = new_omp_context (stmt, outer_ctx);
2066 
2067   if (gimple_omp_task_taskwait_p (stmt))
2068     {
2069       scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2070       return;
2071     }
2072 
2073   taskreg_contexts.safe_push (ctx);
2074   if (taskreg_nesting_level > 1)
2075     ctx->is_nested = true;
2076   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2077   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2078   name = create_tmp_var_name (".omp_data_s");
2079   name = build_decl (gimple_location (stmt),
2080 		     TYPE_DECL, name, ctx->record_type);
2081   DECL_ARTIFICIAL (name) = 1;
2082   DECL_NAMELESS (name) = 1;
2083   TYPE_NAME (ctx->record_type) = name;
2084   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2085   create_omp_child_function (ctx, false);
2086   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2087 
2088   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2089 
2090   if (ctx->srecord_type)
2091     {
2092       name = create_tmp_var_name (".omp_data_a");
2093       name = build_decl (gimple_location (stmt),
2094 			 TYPE_DECL, name, ctx->srecord_type);
2095       DECL_ARTIFICIAL (name) = 1;
2096       DECL_NAMELESS (name) = 1;
2097       TYPE_NAME (ctx->srecord_type) = name;
2098       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2099       create_omp_child_function (ctx, true);
2100     }
2101 
2102   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2103 
2104   if (TYPE_FIELDS (ctx->record_type) == NULL)
2105     {
2106       ctx->record_type = ctx->receiver_decl = NULL;
2107       t = build_int_cst (long_integer_type_node, 0);
2108       gimple_omp_task_set_arg_size (stmt, t);
2109       t = build_int_cst (long_integer_type_node, 1);
2110       gimple_omp_task_set_arg_align (stmt, t);
2111     }
2112 }
2113 
2114 /* Helper function for finish_taskreg_scan, called through walk_tree.
2115    If maybe_lookup_decl_in_outer_context returns non-NULL for some
2116    tree, replace it in the expression.  */
2117 
2118 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)2119 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2120 {
2121   if (VAR_P (*tp))
2122     {
2123       omp_context *ctx = (omp_context *) data;
2124       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2125       if (t != *tp)
2126 	{
2127 	  if (DECL_HAS_VALUE_EXPR_P (t))
2128 	    t = unshare_expr (DECL_VALUE_EXPR (t));
2129 	  *tp = t;
2130 	}
2131       *walk_subtrees = 0;
2132     }
2133   else if (IS_TYPE_OR_DECL_P (*tp))
2134     *walk_subtrees = 0;
2135   return NULL_TREE;
2136 }
2137 
2138 /* If any decls have been made addressable during scan_omp,
2139    adjust their fields if needed, and layout record types
2140    of parallel/task constructs.  */
2141 
2142 static void
finish_taskreg_scan(omp_context * ctx)2143 finish_taskreg_scan (omp_context *ctx)
2144 {
2145   if (ctx->record_type == NULL_TREE)
2146     return;
2147 
2148   /* If any task_shared_vars were needed, verify all
2149      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2150      statements if use_pointer_for_field hasn't changed
2151      because of that.  If it did, update field types now.  */
2152   if (task_shared_vars)
2153     {
2154       tree c;
2155 
2156       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2157 	   c; c = OMP_CLAUSE_CHAIN (c))
2158 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2159 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2160 	  {
2161 	    tree decl = OMP_CLAUSE_DECL (c);
2162 
2163 	    /* Global variables don't need to be copied,
2164 	       the receiver side will use them directly.  */
2165 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2166 	      continue;
2167 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2168 		|| !use_pointer_for_field (decl, ctx))
2169 	      continue;
2170 	    tree field = lookup_field (decl, ctx);
2171 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2172 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2173 	      continue;
2174 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2175 	    TREE_THIS_VOLATILE (field) = 0;
2176 	    DECL_USER_ALIGN (field) = 0;
2177 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2178 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2179 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2180 	    if (ctx->srecord_type)
2181 	      {
2182 		tree sfield = lookup_sfield (decl, ctx);
2183 		TREE_TYPE (sfield) = TREE_TYPE (field);
2184 		TREE_THIS_VOLATILE (sfield) = 0;
2185 		DECL_USER_ALIGN (sfield) = 0;
2186 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2187 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2188 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2189 	      }
2190 	  }
2191     }
2192 
2193   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2194     {
2195       tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2196       tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2197       if (c)
2198 	{
2199 	  /* Move the _reductemp_ clause first.  GOMP_parallel_reductions
2200 	     expects to find it at the start of data.  */
2201 	  tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2202 	  tree *p = &TYPE_FIELDS (ctx->record_type);
2203 	  while (*p)
2204 	    if (*p == f)
2205 	      {
2206 		*p = DECL_CHAIN (*p);
2207 		break;
2208 	      }
2209 	    else
2210 	      p = &DECL_CHAIN (*p);
2211 	  DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2212 	  TYPE_FIELDS (ctx->record_type) = f;
2213 	}
2214       layout_type (ctx->record_type);
2215       fixup_child_record_type (ctx);
2216     }
2217   else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2218     {
2219       layout_type (ctx->record_type);
2220       fixup_child_record_type (ctx);
2221     }
2222   else
2223     {
2224       location_t loc = gimple_location (ctx->stmt);
2225       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2226       /* Move VLA fields to the end.  */
2227       p = &TYPE_FIELDS (ctx->record_type);
2228       while (*p)
2229 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2230 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2231 	  {
2232 	    *q = *p;
2233 	    *p = TREE_CHAIN (*p);
2234 	    TREE_CHAIN (*q) = NULL_TREE;
2235 	    q = &TREE_CHAIN (*q);
2236 	  }
2237 	else
2238 	  p = &DECL_CHAIN (*p);
2239       *p = vla_fields;
2240       if (gimple_omp_task_taskloop_p (ctx->stmt))
2241 	{
2242 	  /* Move fields corresponding to first and second _looptemp_
2243 	     clause first.  There are filled by GOMP_taskloop
2244 	     and thus need to be in specific positions.  */
2245 	  tree clauses = gimple_omp_task_clauses (ctx->stmt);
2246 	  tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2247 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2248 				     OMP_CLAUSE__LOOPTEMP_);
2249 	  tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2250 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2251 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2252 	  tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2253 	  p = &TYPE_FIELDS (ctx->record_type);
2254 	  while (*p)
2255 	    if (*p == f1 || *p == f2 || *p == f3)
2256 	      *p = DECL_CHAIN (*p);
2257 	    else
2258 	      p = &DECL_CHAIN (*p);
2259 	  DECL_CHAIN (f1) = f2;
2260 	  if (c3)
2261 	    {
2262 	      DECL_CHAIN (f2) = f3;
2263 	      DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2264 	    }
2265 	  else
2266 	    DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2267 	  TYPE_FIELDS (ctx->record_type) = f1;
2268 	  if (ctx->srecord_type)
2269 	    {
2270 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2271 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2272 	      if (c3)
2273 		f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2274 	      p = &TYPE_FIELDS (ctx->srecord_type);
2275 	      while (*p)
2276 		if (*p == f1 || *p == f2 || *p == f3)
2277 		  *p = DECL_CHAIN (*p);
2278 		else
2279 		  p = &DECL_CHAIN (*p);
2280 	      DECL_CHAIN (f1) = f2;
2281 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2282 	      if (c3)
2283 		{
2284 		  DECL_CHAIN (f2) = f3;
2285 		  DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2286 		}
2287 	      else
2288 		DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2289 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2290 	    }
2291 	}
2292       layout_type (ctx->record_type);
2293       fixup_child_record_type (ctx);
2294       if (ctx->srecord_type)
2295 	layout_type (ctx->srecord_type);
2296       tree t = fold_convert_loc (loc, long_integer_type_node,
2297 				 TYPE_SIZE_UNIT (ctx->record_type));
2298       if (TREE_CODE (t) != INTEGER_CST)
2299 	{
2300 	  t = unshare_expr (t);
2301 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2302 	}
2303       gimple_omp_task_set_arg_size (ctx->stmt, t);
2304       t = build_int_cst (long_integer_type_node,
2305 			 TYPE_ALIGN_UNIT (ctx->record_type));
2306       gimple_omp_task_set_arg_align (ctx->stmt, t);
2307     }
2308 }
2309 
2310 /* Find the enclosing offload context.  */
2311 
2312 static omp_context *
enclosing_target_ctx(omp_context * ctx)2313 enclosing_target_ctx (omp_context *ctx)
2314 {
2315   for (; ctx; ctx = ctx->outer)
2316     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2317       break;
2318 
2319   return ctx;
2320 }
2321 
2322 /* Return true if ctx is part of an oacc kernels region.  */
2323 
2324 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2325 ctx_in_oacc_kernels_region (omp_context *ctx)
2326 {
2327   for (;ctx != NULL; ctx = ctx->outer)
2328     {
2329       gimple *stmt = ctx->stmt;
2330       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2331 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2332 	return true;
2333     }
2334 
2335   return false;
2336 }
2337 
2338 /* Check the parallelism clauses inside a kernels regions.
2339    Until kernels handling moves to use the same loop indirection
2340    scheme as parallel, we need to do this checking early.  */
2341 
2342 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2343 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2344 {
2345   bool checking = true;
2346   unsigned outer_mask = 0;
2347   unsigned this_mask = 0;
2348   bool has_seq = false, has_auto = false;
2349 
2350   if (ctx->outer)
2351     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2352   if (!stmt)
2353     {
2354       checking = false;
2355       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2356 	return outer_mask;
2357       stmt = as_a <gomp_for *> (ctx->stmt);
2358     }
2359 
2360   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2361     {
2362       switch (OMP_CLAUSE_CODE (c))
2363 	{
2364 	case OMP_CLAUSE_GANG:
2365 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2366 	  break;
2367 	case OMP_CLAUSE_WORKER:
2368 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2369 	  break;
2370 	case OMP_CLAUSE_VECTOR:
2371 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2372 	  break;
2373 	case OMP_CLAUSE_SEQ:
2374 	  has_seq = true;
2375 	  break;
2376 	case OMP_CLAUSE_AUTO:
2377 	  has_auto = true;
2378 	  break;
2379 	default:
2380 	  break;
2381 	}
2382     }
2383 
2384   if (checking)
2385     {
2386       if (has_seq && (this_mask || has_auto))
2387 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2388 		  " OpenACC loop specifiers");
2389       else if (has_auto && this_mask)
2390 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2391 		  " OpenACC loop specifiers");
2392 
2393       if (this_mask & outer_mask)
2394 	error_at (gimple_location (stmt), "inner loop uses same"
2395 		  " OpenACC parallelism as containing loop");
2396     }
2397 
2398   return outer_mask | this_mask;
2399 }
2400 
2401 /* Scan a GIMPLE_OMP_FOR.  */
2402 
2403 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2404 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2405 {
2406   omp_context *ctx;
2407   size_t i;
2408   tree clauses = gimple_omp_for_clauses (stmt);
2409 
2410   ctx = new_omp_context (stmt, outer_ctx);
2411 
2412   if (is_gimple_omp_oacc (stmt))
2413     {
2414       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2415 
2416       if (!(tgt && is_oacc_kernels (tgt)))
2417 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2418 	  {
2419 	    tree c_op0;
2420 	    switch (OMP_CLAUSE_CODE (c))
2421 	      {
2422 	      case OMP_CLAUSE_GANG:
2423 		c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2424 		break;
2425 
2426 	      case OMP_CLAUSE_WORKER:
2427 		c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2428 		break;
2429 
2430 	      case OMP_CLAUSE_VECTOR:
2431 		c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2432 		break;
2433 
2434 	      default:
2435 		continue;
2436 	      }
2437 
2438 	    if (c_op0)
2439 	      {
2440 		error_at (OMP_CLAUSE_LOCATION (c),
2441 			  "argument not permitted on %qs clause",
2442 			  omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2443 		if (tgt)
2444 		  inform (gimple_location (tgt->stmt),
2445 			  "enclosing parent compute construct");
2446 		else if (oacc_get_fn_attrib (current_function_decl))
2447 		  inform (DECL_SOURCE_LOCATION (current_function_decl),
2448 			  "enclosing routine");
2449 		else
2450 		  gcc_unreachable ();
2451 	      }
2452 	  }
2453 
2454       if (tgt && is_oacc_kernels (tgt))
2455 	check_oacc_kernel_gwv (stmt, ctx);
2456 
2457       /* Collect all variables named in reductions on this loop.  Ensure
2458 	 that, if this loop has a reduction on some variable v, and there is
2459 	 a reduction on v somewhere in an outer context, then there is a
2460 	 reduction on v on all intervening loops as well.  */
2461       tree local_reduction_clauses = NULL;
2462       for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2463 	{
2464 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2465 	    local_reduction_clauses
2466 	      = tree_cons (NULL, c, local_reduction_clauses);
2467 	}
2468       if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2469 	ctx->outer_reduction_clauses
2470 	  = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2471 		     ctx->outer->outer_reduction_clauses);
2472       tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2473       tree local_iter = local_reduction_clauses;
2474       for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2475 	{
2476 	  tree local_clause = TREE_VALUE (local_iter);
2477 	  tree local_var = OMP_CLAUSE_DECL (local_clause);
2478 	  tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2479 	  bool have_outer_reduction = false;
2480 	  tree ctx_iter = outer_reduction_clauses;
2481 	  for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2482 	    {
2483 	      tree outer_clause = TREE_VALUE (ctx_iter);
2484 	      tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2485 	      tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2486 	      if (outer_var == local_var && outer_op != local_op)
2487 		{
2488 		  warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2489 			      "conflicting reduction operations for %qE",
2490 			      local_var);
2491 		  inform (OMP_CLAUSE_LOCATION (outer_clause),
2492 			  "location of the previous reduction for %qE",
2493 			  outer_var);
2494 		}
2495 	      if (outer_var == local_var)
2496 		{
2497 		  have_outer_reduction = true;
2498 		  break;
2499 		}
2500 	    }
2501 	  if (have_outer_reduction)
2502 	    {
2503 	      /* There is a reduction on outer_var both on this loop and on
2504 		 some enclosing loop.  Walk up the context tree until such a
2505 		 loop with a reduction on outer_var is found, and complain
2506 		 about all intervening loops that do not have such a
2507 		 reduction.  */
2508 	      struct omp_context *curr_loop = ctx->outer;
2509 	      bool found = false;
2510 	      while (curr_loop != NULL)
2511 		{
2512 		  tree curr_iter = curr_loop->local_reduction_clauses;
2513 		  for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2514 		    {
2515 		      tree curr_clause = TREE_VALUE (curr_iter);
2516 		      tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2517 		      if (curr_var == local_var)
2518 			{
2519 			  found = true;
2520 			  break;
2521 			}
2522 		    }
2523 		  if (!found)
2524 		    warning_at (gimple_location (curr_loop->stmt), 0,
2525 				"nested loop in reduction needs "
2526 				"reduction clause for %qE",
2527 				local_var);
2528 		  else
2529 		    break;
2530 		  curr_loop = curr_loop->outer;
2531 		}
2532 	    }
2533 	}
2534       ctx->local_reduction_clauses = local_reduction_clauses;
2535       ctx->outer_reduction_clauses
2536 	= chainon (unshare_expr (ctx->local_reduction_clauses),
2537 		   ctx->outer_reduction_clauses);
2538 
2539       if (tgt && is_oacc_kernels (tgt))
2540 	{
2541 	  /* Strip out reductions, as they are not handled yet.  */
2542 	  tree *prev_ptr = &clauses;
2543 
2544 	  while (tree probe = *prev_ptr)
2545 	    {
2546 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2547 
2548 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2549 		*prev_ptr = *next_ptr;
2550 	      else
2551 		prev_ptr = next_ptr;
2552 	    }
2553 
2554 	  gimple_omp_for_set_clauses (stmt, clauses);
2555 	}
2556     }
2557 
2558   scan_sharing_clauses (clauses, ctx);
2559 
2560   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2561   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2562     {
2563       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2564       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2565       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2566       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2567     }
2568   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2569   return ctx;
2570 }
2571 
2572 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2573 
2574 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2575 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2576 	       omp_context *outer_ctx)
2577 {
2578   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2579   gsi_replace (gsi, bind, false);
2580   gimple_seq seq = NULL;
2581   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2582   tree cond = create_tmp_var_raw (integer_type_node);
2583   DECL_CONTEXT (cond) = current_function_decl;
2584   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2585   gimple_bind_set_vars (bind, cond);
2586   gimple_call_set_lhs (g, cond);
2587   gimple_seq_add_stmt (&seq, g);
2588   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2589   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2590   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2591   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2592   gimple_seq_add_stmt (&seq, g);
2593   g = gimple_build_label (lab1);
2594   gimple_seq_add_stmt (&seq, g);
2595   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2596   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2597   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2598   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2599   gimple_omp_for_set_clauses (new_stmt, clause);
2600   gimple_seq_add_stmt (&seq, new_stmt);
2601   g = gimple_build_goto (lab3);
2602   gimple_seq_add_stmt (&seq, g);
2603   g = gimple_build_label (lab2);
2604   gimple_seq_add_stmt (&seq, g);
2605   gimple_seq_add_stmt (&seq, stmt);
2606   g = gimple_build_label (lab3);
2607   gimple_seq_add_stmt (&seq, g);
2608   gimple_bind_set_body (bind, seq);
2609   update_stmt (bind);
2610   scan_omp_for (new_stmt, outer_ctx);
2611   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2612 }
2613 
2614 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2615 			   struct walk_stmt_info *);
2616 static omp_context *maybe_lookup_ctx (gimple *);
2617 
2618 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2619    for scan phase loop.  */
2620 
2621 static void
scan_omp_simd_scan(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2622 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2623 		    omp_context *outer_ctx)
2624 {
2625   /* The only change between inclusive and exclusive scan will be
2626      within the first simd loop, so just use inclusive in the
2627      worksharing loop.  */
2628   outer_ctx->scan_inclusive = true;
2629   tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2630   OMP_CLAUSE_DECL (c) = integer_zero_node;
2631 
2632   gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2633   gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2634   gsi_replace (gsi, input_stmt, false);
2635   gimple_seq input_body = NULL;
2636   gimple_seq_add_stmt (&input_body, stmt);
2637   gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2638 
2639   gimple_stmt_iterator input1_gsi = gsi_none ();
2640   struct walk_stmt_info wi;
2641   memset (&wi, 0, sizeof (wi));
2642   wi.val_only = true;
2643   wi.info = (void *) &input1_gsi;
2644   walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2645   gcc_assert (!gsi_end_p (input1_gsi));
2646 
2647   gimple *input_stmt1 = gsi_stmt (input1_gsi);
2648   gsi_next (&input1_gsi);
2649   gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2650   gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2651   c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2652   if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2653     std::swap (input_stmt1, scan_stmt1);
2654 
2655   gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2656   gimple_omp_set_body (input_stmt1, NULL);
2657 
2658   gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2659   gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2660 
2661   gimple_omp_set_body (input_stmt1, input_body1);
2662   gimple_omp_set_body (scan_stmt1, NULL);
2663 
2664   gimple_stmt_iterator input2_gsi = gsi_none ();
2665   memset (&wi, 0, sizeof (wi));
2666   wi.val_only = true;
2667   wi.info = (void *) &input2_gsi;
2668   walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2669 		       NULL, &wi);
2670   gcc_assert (!gsi_end_p (input2_gsi));
2671 
2672   gimple *input_stmt2 = gsi_stmt (input2_gsi);
2673   gsi_next (&input2_gsi);
2674   gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2675   gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2676   if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2677     std::swap (input_stmt2, scan_stmt2);
2678 
2679   gimple_omp_set_body (input_stmt2, NULL);
2680 
2681   gimple_omp_set_body (input_stmt, input_body);
2682   gimple_omp_set_body (scan_stmt, scan_body);
2683 
2684   omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2685   scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2686 
2687   ctx = new_omp_context (scan_stmt, outer_ctx);
2688   scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2689 
2690   maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2691 }
2692 
2693 /* Scan an OpenMP sections directive.  */
2694 
2695 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2696 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2697 {
2698   omp_context *ctx;
2699 
2700   ctx = new_omp_context (stmt, outer_ctx);
2701   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2702   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2703 }
2704 
2705 /* Scan an OpenMP single directive.  */
2706 
2707 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2708 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2709 {
2710   omp_context *ctx;
2711   tree name;
2712 
2713   ctx = new_omp_context (stmt, outer_ctx);
2714   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2715   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2716   name = create_tmp_var_name (".omp_copy_s");
2717   name = build_decl (gimple_location (stmt),
2718 		     TYPE_DECL, name, ctx->record_type);
2719   TYPE_NAME (ctx->record_type) = name;
2720 
2721   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2722   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2723 
2724   if (TYPE_FIELDS (ctx->record_type) == NULL)
2725     ctx->record_type = NULL;
2726   else
2727     layout_type (ctx->record_type);
2728 }
2729 
2730 /* Scan a GIMPLE_OMP_TARGET.  */
2731 
2732 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)2733 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
2734 {
2735   omp_context *ctx;
2736   tree name;
2737   bool offloaded = is_gimple_omp_offloaded (stmt);
2738   tree clauses = gimple_omp_target_clauses (stmt);
2739 
2740   ctx = new_omp_context (stmt, outer_ctx);
2741   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2742   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2743   name = create_tmp_var_name (".omp_data_t");
2744   name = build_decl (gimple_location (stmt),
2745 		     TYPE_DECL, name, ctx->record_type);
2746   DECL_ARTIFICIAL (name) = 1;
2747   DECL_NAMELESS (name) = 1;
2748   TYPE_NAME (ctx->record_type) = name;
2749   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2750 
2751   if (offloaded)
2752     {
2753       create_omp_child_function (ctx, false);
2754       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
2755     }
2756 
2757   scan_sharing_clauses (clauses, ctx);
2758   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2759 
2760   if (TYPE_FIELDS (ctx->record_type) == NULL)
2761     ctx->record_type = ctx->receiver_decl = NULL;
2762   else
2763     {
2764       TYPE_FIELDS (ctx->record_type)
2765 	= nreverse (TYPE_FIELDS (ctx->record_type));
2766       if (flag_checking)
2767 	{
2768 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
2769 	  for (tree field = TYPE_FIELDS (ctx->record_type);
2770 	       field;
2771 	       field = DECL_CHAIN (field))
2772 	    gcc_assert (DECL_ALIGN (field) == align);
2773 	}
2774       layout_type (ctx->record_type);
2775       if (offloaded)
2776 	fixup_child_record_type (ctx);
2777     }
2778 }
2779 
2780 /* Scan an OpenMP teams directive.  */
2781 
2782 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)2783 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
2784 {
2785   omp_context *ctx = new_omp_context (stmt, outer_ctx);
2786 
2787   if (!gimple_omp_teams_host (stmt))
2788     {
2789       scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2790       scan_omp (gimple_omp_body_ptr (stmt), ctx);
2791       return;
2792     }
2793   taskreg_contexts.safe_push (ctx);
2794   gcc_assert (taskreg_nesting_level == 1);
2795   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2796   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2797   tree name = create_tmp_var_name (".omp_data_s");
2798   name = build_decl (gimple_location (stmt),
2799 		     TYPE_DECL, name, ctx->record_type);
2800   DECL_ARTIFICIAL (name) = 1;
2801   DECL_NAMELESS (name) = 1;
2802   TYPE_NAME (ctx->record_type) = name;
2803   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2804   create_omp_child_function (ctx, false);
2805   gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
2806 
2807   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
2808   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2809 
2810   if (TYPE_FIELDS (ctx->record_type) == NULL)
2811     ctx->record_type = ctx->receiver_decl = NULL;
2812 }
2813 
2814 /* Check nesting restrictions.  */
2815 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)2816 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
2817 {
2818   tree c;
2819 
2820   if (ctx && gimple_code (ctx->stmt) == GIMPLE_OMP_GRID_BODY)
2821     /* GRID_BODY is an artificial construct, nesting rules will be checked in
2822        the original copy of its contents.  */
2823     return true;
2824 
2825   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
2826      inside an OpenACC CTX.  */
2827   if (!(is_gimple_omp (stmt)
2828 	&& is_gimple_omp_oacc (stmt))
2829       /* Except for atomic codes that we share with OpenMP.  */
2830       && !(gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2831 	   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2832     {
2833       if (oacc_get_fn_attrib (cfun->decl) != NULL)
2834 	{
2835 	  error_at (gimple_location (stmt),
2836 		    "non-OpenACC construct inside of OpenACC routine");
2837 	  return false;
2838 	}
2839       else
2840 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
2841 	  if (is_gimple_omp (octx->stmt)
2842 	      && is_gimple_omp_oacc (octx->stmt))
2843 	    {
2844 	      error_at (gimple_location (stmt),
2845 			"non-OpenACC construct inside of OpenACC region");
2846 	      return false;
2847 	    }
2848     }
2849 
2850   if (ctx != NULL)
2851     {
2852       if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
2853 	  && ctx->outer
2854 	  && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
2855 	ctx = ctx->outer;
2856       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
2857 	  && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
2858 	  && !ctx->loop_p)
2859 	{
2860 	  c = NULL_TREE;
2861 	  if (ctx->order_concurrent
2862 	      && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
2863 		  || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2864 		  || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
2865 	    {
2866 	      error_at (gimple_location (stmt),
2867 			"OpenMP constructs other than %<parallel%>, %<loop%>"
2868 			" or %<simd%> may not be nested inside a region with"
2869 			" the %<order(concurrent)%> clause");
2870 	      return false;
2871 	    }
2872 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
2873 	    {
2874 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
2875 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
2876 		{
2877 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
2878 		      && (ctx->outer == NULL
2879 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
2880 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
2881 			  || (gimple_omp_for_kind (ctx->outer->stmt)
2882 			      != GF_OMP_FOR_KIND_FOR)
2883 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
2884 		    {
2885 		      error_at (gimple_location (stmt),
2886 				"%<ordered simd threads%> must be closely "
2887 				"nested inside of %<for simd%> region");
2888 		      return false;
2889 		    }
2890 		  return true;
2891 		}
2892 	    }
2893 	  else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
2894 		   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
2895 		   || gimple_code (stmt) == GIMPLE_OMP_SCAN)
2896 	    return true;
2897 	  else if (gimple_code (stmt) == GIMPLE_OMP_FOR
2898 		   && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
2899 	    return true;
2900 	  error_at (gimple_location (stmt),
2901 		    "OpenMP constructs other than "
2902 		    "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
2903 		    "not be nested inside %<simd%> region");
2904 	  return false;
2905 	}
2906       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2907 	{
2908 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
2909 	       || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
2910 		   && gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
2911 		   && omp_find_clause (gimple_omp_for_clauses (stmt),
2912 				       OMP_CLAUSE_BIND) == NULL_TREE))
2913 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
2914 	    {
2915 	      error_at (gimple_location (stmt),
2916 			"only %<distribute%>, %<parallel%> or %<loop%> "
2917 			"regions are allowed to be strictly nested inside "
2918 			"%<teams%> region");
2919 	      return false;
2920 	    }
2921 	}
2922       else if (ctx->order_concurrent
2923 	       && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
2924 	       && (gimple_code (stmt) != GIMPLE_OMP_FOR
2925 		   || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
2926 	       && gimple_code (stmt) != GIMPLE_OMP_SCAN)
2927 	{
2928 	  if (ctx->loop_p)
2929 	    error_at (gimple_location (stmt),
2930 		      "OpenMP constructs other than %<parallel%>, %<loop%> or "
2931 		      "%<simd%> may not be nested inside a %<loop%> region");
2932 	  else
2933 	    error_at (gimple_location (stmt),
2934 		      "OpenMP constructs other than %<parallel%>, %<loop%> or "
2935 		      "%<simd%> may not be nested inside a region with "
2936 		      "the %<order(concurrent)%> clause");
2937 	  return false;
2938 	}
2939     }
2940   switch (gimple_code (stmt))
2941     {
2942     case GIMPLE_OMP_FOR:
2943       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
2944 	return true;
2945       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
2946 	{
2947 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
2948 	    {
2949 	      error_at (gimple_location (stmt),
2950 			"%<distribute%> region must be strictly nested "
2951 			"inside %<teams%> construct");
2952 	      return false;
2953 	    }
2954 	  return true;
2955 	}
2956       /* We split taskloop into task and nested taskloop in it.  */
2957       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
2958 	return true;
2959       /* For now, hope this will change and loop bind(parallel) will not
2960 	 be allowed in lots of contexts.  */
2961       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
2962 	  && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
2963 	return true;
2964       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
2965 	{
2966 	  bool ok = false;
2967 
2968 	  if (ctx)
2969 	    switch (gimple_code (ctx->stmt))
2970 	      {
2971 	      case GIMPLE_OMP_FOR:
2972 		ok = (gimple_omp_for_kind (ctx->stmt)
2973 		      == GF_OMP_FOR_KIND_OACC_LOOP);
2974 		break;
2975 
2976 	      case GIMPLE_OMP_TARGET:
2977 		switch (gimple_omp_target_kind (ctx->stmt))
2978 		  {
2979 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
2980 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
2981 		  case GF_OMP_TARGET_KIND_OACC_SERIAL:
2982 		    ok = true;
2983 		    break;
2984 
2985 		  default:
2986 		    break;
2987 		  }
2988 
2989 	      default:
2990 		break;
2991 	      }
2992 	  else if (oacc_get_fn_attrib (current_function_decl))
2993 	    ok = true;
2994 	  if (!ok)
2995 	    {
2996 	      error_at (gimple_location (stmt),
2997 			"OpenACC loop directive must be associated with"
2998 			" an OpenACC compute region");
2999 	      return false;
3000 	    }
3001 	}
3002       /* FALLTHRU */
3003     case GIMPLE_CALL:
3004       if (is_gimple_call (stmt)
3005 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3006 	      == BUILT_IN_GOMP_CANCEL
3007 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3008 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
3009 	{
3010 	  const char *bad = NULL;
3011 	  const char *kind = NULL;
3012 	  const char *construct
3013 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3014 	       == BUILT_IN_GOMP_CANCEL)
3015 	      ? "cancel"
3016 	      : "cancellation point";
3017 	  if (ctx == NULL)
3018 	    {
3019 	      error_at (gimple_location (stmt), "orphaned %qs construct",
3020 			construct);
3021 	      return false;
3022 	    }
3023 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3024 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
3025 		  : 0)
3026 	    {
3027 	    case 1:
3028 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3029 		bad = "parallel";
3030 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3031 		       == BUILT_IN_GOMP_CANCEL
3032 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
3033 		ctx->cancellable = true;
3034 	      kind = "parallel";
3035 	      break;
3036 	    case 2:
3037 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3038 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3039 		bad = "for";
3040 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3041 		       == BUILT_IN_GOMP_CANCEL
3042 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
3043 		{
3044 		  ctx->cancellable = true;
3045 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3046 				       OMP_CLAUSE_NOWAIT))
3047 		    warning_at (gimple_location (stmt), 0,
3048 				"%<cancel for%> inside "
3049 				"%<nowait%> for construct");
3050 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3051 				       OMP_CLAUSE_ORDERED))
3052 		    warning_at (gimple_location (stmt), 0,
3053 				"%<cancel for%> inside "
3054 				"%<ordered%> for construct");
3055 		}
3056 	      kind = "for";
3057 	      break;
3058 	    case 4:
3059 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3060 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3061 		bad = "sections";
3062 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3063 		       == BUILT_IN_GOMP_CANCEL
3064 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
3065 		{
3066 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3067 		    {
3068 		      ctx->cancellable = true;
3069 		      if (omp_find_clause (gimple_omp_sections_clauses
3070 								(ctx->stmt),
3071 					   OMP_CLAUSE_NOWAIT))
3072 			warning_at (gimple_location (stmt), 0,
3073 				    "%<cancel sections%> inside "
3074 				    "%<nowait%> sections construct");
3075 		    }
3076 		  else
3077 		    {
3078 		      gcc_assert (ctx->outer
3079 				  && gimple_code (ctx->outer->stmt)
3080 				     == GIMPLE_OMP_SECTIONS);
3081 		      ctx->outer->cancellable = true;
3082 		      if (omp_find_clause (gimple_omp_sections_clauses
3083 							(ctx->outer->stmt),
3084 					   OMP_CLAUSE_NOWAIT))
3085 			warning_at (gimple_location (stmt), 0,
3086 				    "%<cancel sections%> inside "
3087 				    "%<nowait%> sections construct");
3088 		    }
3089 		}
3090 	      kind = "sections";
3091 	      break;
3092 	    case 8:
3093 	      if (!is_task_ctx (ctx)
3094 		  && (!is_taskloop_ctx (ctx)
3095 		      || ctx->outer == NULL
3096 		      || !is_task_ctx (ctx->outer)))
3097 		bad = "task";
3098 	      else
3099 		{
3100 		  for (omp_context *octx = ctx->outer;
3101 		       octx; octx = octx->outer)
3102 		    {
3103 		      switch (gimple_code (octx->stmt))
3104 			{
3105 			case GIMPLE_OMP_TASKGROUP:
3106 			  break;
3107 			case GIMPLE_OMP_TARGET:
3108 			  if (gimple_omp_target_kind (octx->stmt)
3109 			      != GF_OMP_TARGET_KIND_REGION)
3110 			    continue;
3111 			  /* FALLTHRU */
3112 			case GIMPLE_OMP_PARALLEL:
3113 			case GIMPLE_OMP_TEAMS:
3114 			  error_at (gimple_location (stmt),
3115 				    "%<%s taskgroup%> construct not closely "
3116 				    "nested inside of %<taskgroup%> region",
3117 				    construct);
3118 			  return false;
3119 			case GIMPLE_OMP_TASK:
3120 			  if (gimple_omp_task_taskloop_p (octx->stmt)
3121 			      && octx->outer
3122 			      && is_taskloop_ctx (octx->outer))
3123 			    {
3124 			      tree clauses
3125 				= gimple_omp_for_clauses (octx->outer->stmt);
3126 			      if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3127 				break;
3128 			    }
3129 			  continue;
3130 			default:
3131 			  continue;
3132 			}
3133 		      break;
3134 		    }
3135 		  ctx->cancellable = true;
3136 		}
3137 	      kind = "taskgroup";
3138 	      break;
3139 	    default:
3140 	      error_at (gimple_location (stmt), "invalid arguments");
3141 	      return false;
3142 	    }
3143 	  if (bad)
3144 	    {
3145 	      error_at (gimple_location (stmt),
3146 			"%<%s %s%> construct not closely nested inside of %qs",
3147 			construct, kind, bad);
3148 	      return false;
3149 	    }
3150 	}
3151       /* FALLTHRU */
3152     case GIMPLE_OMP_SECTIONS:
3153     case GIMPLE_OMP_SINGLE:
3154       for (; ctx != NULL; ctx = ctx->outer)
3155 	switch (gimple_code (ctx->stmt))
3156 	  {
3157 	  case GIMPLE_OMP_FOR:
3158 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3159 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3160 	      break;
3161 	    /* FALLTHRU */
3162 	  case GIMPLE_OMP_SECTIONS:
3163 	  case GIMPLE_OMP_SINGLE:
3164 	  case GIMPLE_OMP_ORDERED:
3165 	  case GIMPLE_OMP_MASTER:
3166 	  case GIMPLE_OMP_TASK:
3167 	  case GIMPLE_OMP_CRITICAL:
3168 	    if (is_gimple_call (stmt))
3169 	      {
3170 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3171 		    != BUILT_IN_GOMP_BARRIER)
3172 		  return true;
3173 		error_at (gimple_location (stmt),
3174 			  "barrier region may not be closely nested inside "
3175 			  "of work-sharing, %<loop%>, %<critical%>, "
3176 			  "%<ordered%>, %<master%>, explicit %<task%> or "
3177 			  "%<taskloop%> region");
3178 		return false;
3179 	      }
3180 	    error_at (gimple_location (stmt),
3181 		      "work-sharing region may not be closely nested inside "
3182 		      "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3183 		      "%<master%>, explicit %<task%> or %<taskloop%> region");
3184 	    return false;
3185 	  case GIMPLE_OMP_PARALLEL:
3186 	  case GIMPLE_OMP_TEAMS:
3187 	    return true;
3188 	  case GIMPLE_OMP_TARGET:
3189 	    if (gimple_omp_target_kind (ctx->stmt)
3190 		== GF_OMP_TARGET_KIND_REGION)
3191 	      return true;
3192 	    break;
3193 	  default:
3194 	    break;
3195 	  }
3196       break;
3197     case GIMPLE_OMP_MASTER:
3198       for (; ctx != NULL; ctx = ctx->outer)
3199 	switch (gimple_code (ctx->stmt))
3200 	  {
3201 	  case GIMPLE_OMP_FOR:
3202 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3203 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3204 	      break;
3205 	    /* FALLTHRU */
3206 	  case GIMPLE_OMP_SECTIONS:
3207 	  case GIMPLE_OMP_SINGLE:
3208 	  case GIMPLE_OMP_TASK:
3209 	    error_at (gimple_location (stmt),
3210 		      "%<master%> region may not be closely nested inside "
3211 		      "of work-sharing, %<loop%>, explicit %<task%> or "
3212 		      "%<taskloop%> region");
3213 	    return false;
3214 	  case GIMPLE_OMP_PARALLEL:
3215 	  case GIMPLE_OMP_TEAMS:
3216 	    return true;
3217 	  case GIMPLE_OMP_TARGET:
3218 	    if (gimple_omp_target_kind (ctx->stmt)
3219 		== GF_OMP_TARGET_KIND_REGION)
3220 	      return true;
3221 	    break;
3222 	  default:
3223 	    break;
3224 	  }
3225       break;
3226     case GIMPLE_OMP_TASK:
3227       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3228 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3229 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3230 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3231 	  {
3232 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3233 	    error_at (OMP_CLAUSE_LOCATION (c),
3234 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
3235 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3236 	    return false;
3237 	  }
3238       break;
3239     case GIMPLE_OMP_ORDERED:
3240       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3241 	   c; c = OMP_CLAUSE_CHAIN (c))
3242 	{
3243 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3244 	    {
3245 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3246 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3247 	      continue;
3248 	    }
3249 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3250 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
3251 	      || kind == OMP_CLAUSE_DEPEND_SINK)
3252 	    {
3253 	      tree oclause;
3254 	      /* Look for containing ordered(N) loop.  */
3255 	      if (ctx == NULL
3256 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3257 		  || (oclause
3258 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3259 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
3260 		{
3261 		  error_at (OMP_CLAUSE_LOCATION (c),
3262 			    "%<ordered%> construct with %<depend%> clause "
3263 			    "must be closely nested inside an %<ordered%> "
3264 			    "loop");
3265 		  return false;
3266 		}
3267 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3268 		{
3269 		  error_at (OMP_CLAUSE_LOCATION (c),
3270 			    "%<ordered%> construct with %<depend%> clause "
3271 			    "must be closely nested inside a loop with "
3272 			    "%<ordered%> clause with a parameter");
3273 		  return false;
3274 		}
3275 	    }
3276 	  else
3277 	    {
3278 	      error_at (OMP_CLAUSE_LOCATION (c),
3279 			"invalid depend kind in omp %<ordered%> %<depend%>");
3280 	      return false;
3281 	    }
3282 	}
3283       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3284       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3285 	{
3286 	  /* ordered simd must be closely nested inside of simd region,
3287 	     and simd region must not encounter constructs other than
3288 	     ordered simd, therefore ordered simd may be either orphaned,
3289 	     or ctx->stmt must be simd.  The latter case is handled already
3290 	     earlier.  */
3291 	  if (ctx != NULL)
3292 	    {
3293 	      error_at (gimple_location (stmt),
3294 			"%<ordered%> %<simd%> must be closely nested inside "
3295 			"%<simd%> region");
3296 	      return false;
3297 	    }
3298 	}
3299       for (; ctx != NULL; ctx = ctx->outer)
3300 	switch (gimple_code (ctx->stmt))
3301 	  {
3302 	  case GIMPLE_OMP_CRITICAL:
3303 	  case GIMPLE_OMP_TASK:
3304 	  case GIMPLE_OMP_ORDERED:
3305 	  ordered_in_taskloop:
3306 	    error_at (gimple_location (stmt),
3307 		      "%<ordered%> region may not be closely nested inside "
3308 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
3309 		      "%<taskloop%> region");
3310 	    return false;
3311 	  case GIMPLE_OMP_FOR:
3312 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3313 	      goto ordered_in_taskloop;
3314 	    tree o;
3315 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3316 				 OMP_CLAUSE_ORDERED);
3317 	    if (o == NULL)
3318 	      {
3319 		error_at (gimple_location (stmt),
3320 			  "%<ordered%> region must be closely nested inside "
3321 			  "a loop region with an %<ordered%> clause");
3322 		return false;
3323 	      }
3324 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3325 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3326 	      {
3327 		error_at (gimple_location (stmt),
3328 			  "%<ordered%> region without %<depend%> clause may "
3329 			  "not be closely nested inside a loop region with "
3330 			  "an %<ordered%> clause with a parameter");
3331 		return false;
3332 	      }
3333 	    return true;
3334 	  case GIMPLE_OMP_TARGET:
3335 	    if (gimple_omp_target_kind (ctx->stmt)
3336 		!= GF_OMP_TARGET_KIND_REGION)
3337 	      break;
3338 	    /* FALLTHRU */
3339 	  case GIMPLE_OMP_PARALLEL:
3340 	  case GIMPLE_OMP_TEAMS:
3341 	    error_at (gimple_location (stmt),
3342 		      "%<ordered%> region must be closely nested inside "
3343 		      "a loop region with an %<ordered%> clause");
3344 	    return false;
3345 	  default:
3346 	    break;
3347 	  }
3348       break;
3349     case GIMPLE_OMP_CRITICAL:
3350       {
3351 	tree this_stmt_name
3352 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3353 	for (; ctx != NULL; ctx = ctx->outer)
3354 	  if (gomp_critical *other_crit
3355 	        = dyn_cast <gomp_critical *> (ctx->stmt))
3356 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
3357 	      {
3358 		error_at (gimple_location (stmt),
3359 			  "%<critical%> region may not be nested inside "
3360 			   "a %<critical%> region with the same name");
3361 		return false;
3362 	      }
3363       }
3364       break;
3365     case GIMPLE_OMP_TEAMS:
3366       if (ctx == NULL)
3367 	break;
3368       else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3369 	       || (gimple_omp_target_kind (ctx->stmt)
3370 		   != GF_OMP_TARGET_KIND_REGION))
3371 	{
3372 	  /* Teams construct can appear either strictly nested inside of
3373 	     target construct with no intervening stmts, or can be encountered
3374 	     only by initial task (so must not appear inside any OpenMP
3375 	     construct.  */
3376 	  error_at (gimple_location (stmt),
3377 		    "%<teams%> construct must be closely nested inside of "
3378 		    "%<target%> construct or not nested in any OpenMP "
3379 		    "construct");
3380 	  return false;
3381 	}
3382       break;
3383     case GIMPLE_OMP_TARGET:
3384       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3385 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3386 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3387 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3388 	  {
3389 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3390 	    error_at (OMP_CLAUSE_LOCATION (c),
3391 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
3392 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3393 	    return false;
3394 	  }
3395       if (is_gimple_omp_offloaded (stmt)
3396 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
3397 	{
3398 	  error_at (gimple_location (stmt),
3399 		    "OpenACC region inside of OpenACC routine, nested "
3400 		    "parallelism not supported yet");
3401 	  return false;
3402 	}
3403       for (; ctx != NULL; ctx = ctx->outer)
3404 	{
3405 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3406 	    {
3407 	      if (is_gimple_omp (stmt)
3408 		  && is_gimple_omp_oacc (stmt)
3409 		  && is_gimple_omp (ctx->stmt))
3410 		{
3411 		  error_at (gimple_location (stmt),
3412 			    "OpenACC construct inside of non-OpenACC region");
3413 		  return false;
3414 		}
3415 	      continue;
3416 	    }
3417 
3418 	  const char *stmt_name, *ctx_stmt_name;
3419 	  switch (gimple_omp_target_kind (stmt))
3420 	    {
3421 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3422 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3423 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3424 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
3425 	      stmt_name = "target enter data"; break;
3426 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
3427 	      stmt_name = "target exit data"; break;
3428 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3429 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3430 	    case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3431 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3432 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3433 	    case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
3434 	      stmt_name = "enter/exit data"; break;
3435 	    case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3436 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3437 	      break;
3438 	    default: gcc_unreachable ();
3439 	    }
3440 	  switch (gimple_omp_target_kind (ctx->stmt))
3441 	    {
3442 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3443 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3444 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3445 	      ctx_stmt_name = "parallel"; break;
3446 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
3447 	      ctx_stmt_name = "kernels"; break;
3448 	    case GF_OMP_TARGET_KIND_OACC_SERIAL:
3449 	      ctx_stmt_name = "serial"; break;
3450 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3451 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3452 	      ctx_stmt_name = "host_data"; break;
3453 	    default: gcc_unreachable ();
3454 	    }
3455 
3456 	  /* OpenACC/OpenMP mismatch?  */
3457 	  if (is_gimple_omp_oacc (stmt)
3458 	      != is_gimple_omp_oacc (ctx->stmt))
3459 	    {
3460 	      error_at (gimple_location (stmt),
3461 			"%s %qs construct inside of %s %qs region",
3462 			(is_gimple_omp_oacc (stmt)
3463 			 ? "OpenACC" : "OpenMP"), stmt_name,
3464 			(is_gimple_omp_oacc (ctx->stmt)
3465 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3466 	      return false;
3467 	    }
3468 	  if (is_gimple_omp_offloaded (ctx->stmt))
3469 	    {
3470 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
3471 	      if (is_gimple_omp_oacc (ctx->stmt))
3472 		{
3473 		  error_at (gimple_location (stmt),
3474 			    "%qs construct inside of %qs region",
3475 			    stmt_name, ctx_stmt_name);
3476 		  return false;
3477 		}
3478 	      else
3479 		{
3480 		  warning_at (gimple_location (stmt), 0,
3481 			      "%qs construct inside of %qs region",
3482 			      stmt_name, ctx_stmt_name);
3483 		}
3484 	    }
3485 	}
3486       break;
3487     default:
3488       break;
3489     }
3490   return true;
3491 }
3492 
3493 
3494 /* Helper function scan_omp.
3495 
3496    Callback for walk_tree or operators in walk_gimple_stmt used to
3497    scan for OMP directives in TP.  */
3498 
3499 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3500 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3501 {
3502   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3503   omp_context *ctx = (omp_context *) wi->info;
3504   tree t = *tp;
3505 
3506   switch (TREE_CODE (t))
3507     {
3508     case VAR_DECL:
3509     case PARM_DECL:
3510     case LABEL_DECL:
3511     case RESULT_DECL:
3512       if (ctx)
3513 	{
3514 	  tree repl = remap_decl (t, &ctx->cb);
3515 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3516 	  *tp = repl;
3517 	}
3518       break;
3519 
3520     default:
3521       if (ctx && TYPE_P (t))
3522 	*tp = remap_type (t, &ctx->cb);
3523       else if (!DECL_P (t))
3524 	{
3525 	  *walk_subtrees = 1;
3526 	  if (ctx)
3527 	    {
3528 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3529 	      if (tem != TREE_TYPE (t))
3530 		{
3531 		  if (TREE_CODE (t) == INTEGER_CST)
3532 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3533 		  else
3534 		    TREE_TYPE (t) = tem;
3535 		}
3536 	    }
3537 	}
3538       break;
3539     }
3540 
3541   return NULL_TREE;
3542 }
3543 
3544 /* Return true if FNDECL is a setjmp or a longjmp.  */
3545 
3546 static bool
setjmp_or_longjmp_p(const_tree fndecl)3547 setjmp_or_longjmp_p (const_tree fndecl)
3548 {
3549   if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3550       || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3551     return true;
3552 
3553   tree declname = DECL_NAME (fndecl);
3554   if (!declname
3555       || (DECL_CONTEXT (fndecl) != NULL_TREE
3556           && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3557       || !TREE_PUBLIC (fndecl))
3558     return false;
3559 
3560   const char *name = IDENTIFIER_POINTER (declname);
3561   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3562 }
3563 
3564 /* Return true if FNDECL is an omp_* runtime API call.  */
3565 
3566 static bool
omp_runtime_api_call(const_tree fndecl)3567 omp_runtime_api_call (const_tree fndecl)
3568 {
3569   tree declname = DECL_NAME (fndecl);
3570   if (!declname
3571       || (DECL_CONTEXT (fndecl) != NULL_TREE
3572           && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3573       || !TREE_PUBLIC (fndecl))
3574     return false;
3575 
3576   const char *name = IDENTIFIER_POINTER (declname);
3577   if (strncmp (name, "omp_", 4) != 0)
3578     return false;
3579 
3580   static const char *omp_runtime_apis[] =
3581     {
3582       /* This array has 3 sections.  First omp_* calls that don't
3583 	 have any suffixes.  */
3584       "target_alloc",
3585       "target_associate_ptr",
3586       "target_disassociate_ptr",
3587       "target_free",
3588       "target_is_present",
3589       "target_memcpy",
3590       "target_memcpy_rect",
3591       NULL,
3592       /* Now omp_* calls that are available as omp_* and omp_*_.  */
3593       "capture_affinity",
3594       "destroy_lock",
3595       "destroy_nest_lock",
3596       "display_affinity",
3597       "get_active_level",
3598       "get_affinity_format",
3599       "get_cancellation",
3600       "get_default_device",
3601       "get_dynamic",
3602       "get_initial_device",
3603       "get_level",
3604       "get_max_active_levels",
3605       "get_max_task_priority",
3606       "get_max_threads",
3607       "get_nested",
3608       "get_num_devices",
3609       "get_num_places",
3610       "get_num_procs",
3611       "get_num_teams",
3612       "get_num_threads",
3613       "get_partition_num_places",
3614       "get_place_num",
3615       "get_proc_bind",
3616       "get_team_num",
3617       "get_thread_limit",
3618       "get_thread_num",
3619       "get_wtick",
3620       "get_wtime",
3621       "in_final",
3622       "in_parallel",
3623       "init_lock",
3624       "init_nest_lock",
3625       "is_initial_device",
3626       "pause_resource",
3627       "pause_resource_all",
3628       "set_affinity_format",
3629       "set_lock",
3630       "set_nest_lock",
3631       "test_lock",
3632       "test_nest_lock",
3633       "unset_lock",
3634       "unset_nest_lock",
3635       NULL,
3636       /* And finally calls available as omp_*, omp_*_ and omp_*_8_.  */
3637       "get_ancestor_thread_num",
3638       "get_partition_place_nums",
3639       "get_place_num_procs",
3640       "get_place_proc_ids",
3641       "get_schedule",
3642       "get_team_size",
3643       "set_default_device",
3644       "set_dynamic",
3645       "set_max_active_levels",
3646       "set_nested",
3647       "set_num_threads",
3648       "set_schedule"
3649     };
3650 
3651   int mode = 0;
3652   for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
3653     {
3654       if (omp_runtime_apis[i] == NULL)
3655 	{
3656 	  mode++;
3657 	  continue;
3658 	}
3659       size_t len = strlen (omp_runtime_apis[i]);
3660       if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
3661 	  && (name[4 + len] == '\0'
3662 	      || (mode > 0
3663 		  && name[4 + len] == '_'
3664 		  && (name[4 + len + 1] == '\0'
3665 		      || (mode > 1
3666 			  && strcmp (name + 4 + len + 1, "8_") == 0)))))
3667 	return true;
3668     }
3669   return false;
3670 }
3671 
3672 /* Helper function for scan_omp.
3673 
3674    Callback for walk_gimple_stmt used to scan for OMP directives in
3675    the current statement in GSI.  */
3676 
3677 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)3678 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
3679 		 struct walk_stmt_info *wi)
3680 {
3681   gimple *stmt = gsi_stmt (*gsi);
3682   omp_context *ctx = (omp_context *) wi->info;
3683 
3684   if (gimple_has_location (stmt))
3685     input_location = gimple_location (stmt);
3686 
3687   /* Check the nesting restrictions.  */
3688   bool remove = false;
3689   if (is_gimple_omp (stmt))
3690     remove = !check_omp_nesting_restrictions (stmt, ctx);
3691   else if (is_gimple_call (stmt))
3692     {
3693       tree fndecl = gimple_call_fndecl (stmt);
3694       if (fndecl)
3695 	{
3696 	  if (ctx
3697 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3698 	      && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3699 	      && setjmp_or_longjmp_p (fndecl)
3700 	      && !ctx->loop_p)
3701 	    {
3702 	      remove = true;
3703 	      error_at (gimple_location (stmt),
3704 			"setjmp/longjmp inside %<simd%> construct");
3705 	    }
3706 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3707 	    switch (DECL_FUNCTION_CODE (fndecl))
3708 	      {
3709 	      case BUILT_IN_GOMP_BARRIER:
3710 	      case BUILT_IN_GOMP_CANCEL:
3711 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
3712 	      case BUILT_IN_GOMP_TASKYIELD:
3713 	      case BUILT_IN_GOMP_TASKWAIT:
3714 	      case BUILT_IN_GOMP_TASKGROUP_START:
3715 	      case BUILT_IN_GOMP_TASKGROUP_END:
3716 		remove = !check_omp_nesting_restrictions (stmt, ctx);
3717 		break;
3718 	      default:
3719 		break;
3720 	      }
3721 	  else if (ctx)
3722 	    {
3723 	      omp_context *octx = ctx;
3724 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
3725 		octx = ctx->outer;
3726 	      if (octx->order_concurrent && omp_runtime_api_call (fndecl))
3727 		{
3728 		  remove = true;
3729 		  error_at (gimple_location (stmt),
3730 			    "OpenMP runtime API call %qD in a region with "
3731 			    "%<order(concurrent)%> clause", fndecl);
3732 		}
3733 	    }
3734 	}
3735     }
3736   if (remove)
3737     {
3738       stmt = gimple_build_nop ();
3739       gsi_replace (gsi, stmt, false);
3740     }
3741 
3742   *handled_ops_p = true;
3743 
3744   switch (gimple_code (stmt))
3745     {
3746     case GIMPLE_OMP_PARALLEL:
3747       taskreg_nesting_level++;
3748       scan_omp_parallel (gsi, ctx);
3749       taskreg_nesting_level--;
3750       break;
3751 
3752     case GIMPLE_OMP_TASK:
3753       taskreg_nesting_level++;
3754       scan_omp_task (gsi, ctx);
3755       taskreg_nesting_level--;
3756       break;
3757 
3758     case GIMPLE_OMP_FOR:
3759       if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3760 	   == GF_OMP_FOR_KIND_SIMD)
3761 	  && gimple_omp_for_combined_into_p (stmt)
3762 	  && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
3763 	{
3764 	  tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
3765 	  tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
3766 	  if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
3767 	    {
3768 	      scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
3769 	      break;
3770 	    }
3771 	}
3772       if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
3773 	   == GF_OMP_FOR_KIND_SIMD)
3774 	  && omp_maybe_offloaded_ctx (ctx)
3775 	  && omp_max_simt_vf ())
3776 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
3777       else
3778 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
3779       break;
3780 
3781     case GIMPLE_OMP_SECTIONS:
3782       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
3783       break;
3784 
3785     case GIMPLE_OMP_SINGLE:
3786       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
3787       break;
3788 
3789     case GIMPLE_OMP_SCAN:
3790       if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
3791 	{
3792 	  if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
3793 	    ctx->scan_inclusive = true;
3794 	  else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
3795 	    ctx->scan_exclusive = true;
3796 	}
3797       /* FALLTHRU */
3798     case GIMPLE_OMP_SECTION:
3799     case GIMPLE_OMP_MASTER:
3800     case GIMPLE_OMP_ORDERED:
3801     case GIMPLE_OMP_CRITICAL:
3802     case GIMPLE_OMP_GRID_BODY:
3803       ctx = new_omp_context (stmt, ctx);
3804       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3805       break;
3806 
3807     case GIMPLE_OMP_TASKGROUP:
3808       ctx = new_omp_context (stmt, ctx);
3809       scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
3810       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3811       break;
3812 
3813     case GIMPLE_OMP_TARGET:
3814       if (is_gimple_omp_offloaded (stmt))
3815 	{
3816 	  taskreg_nesting_level++;
3817 	  scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3818 	  taskreg_nesting_level--;
3819 	}
3820       else
3821 	scan_omp_target (as_a <gomp_target *> (stmt), ctx);
3822       break;
3823 
3824     case GIMPLE_OMP_TEAMS:
3825       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
3826 	{
3827 	  taskreg_nesting_level++;
3828 	  scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3829 	  taskreg_nesting_level--;
3830 	}
3831       else
3832 	scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
3833       break;
3834 
3835     case GIMPLE_BIND:
3836       {
3837 	tree var;
3838 
3839 	*handled_ops_p = false;
3840 	if (ctx)
3841 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
3842 	       var ;
3843 	       var = DECL_CHAIN (var))
3844 	    insert_decl_map (&ctx->cb, var, var);
3845       }
3846       break;
3847     default:
3848       *handled_ops_p = false;
3849       break;
3850     }
3851 
3852   return NULL_TREE;
3853 }
3854 
3855 
3856 /* Scan all the statements starting at the current statement.  CTX
3857    contains context information about the OMP directives and
3858    clauses found during the scan.  */
3859 
3860 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)3861 scan_omp (gimple_seq *body_p, omp_context *ctx)
3862 {
3863   location_t saved_location;
3864   struct walk_stmt_info wi;
3865 
3866   memset (&wi, 0, sizeof (wi));
3867   wi.info = ctx;
3868   wi.want_locations = true;
3869 
3870   saved_location = input_location;
3871   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
3872   input_location = saved_location;
3873 }
3874 
3875 /* Re-gimplification and code generation routines.  */
3876 
3877 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
3878    of BIND if in a method.  */
3879 
3880 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)3881 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
3882 {
3883   if (DECL_ARGUMENTS (current_function_decl)
3884       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
3885       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
3886 	  == POINTER_TYPE))
3887     {
3888       tree vars = gimple_bind_vars (bind);
3889       for (tree *pvar = &vars; *pvar; )
3890 	if (omp_member_access_dummy_var (*pvar))
3891 	  *pvar = DECL_CHAIN (*pvar);
3892 	else
3893 	  pvar = &DECL_CHAIN (*pvar);
3894       gimple_bind_set_vars (bind, vars);
3895     }
3896 }
3897 
3898 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
3899    block and its subblocks.  */
3900 
3901 static void
remove_member_access_dummy_vars(tree block)3902 remove_member_access_dummy_vars (tree block)
3903 {
3904   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
3905     if (omp_member_access_dummy_var (*pvar))
3906       *pvar = DECL_CHAIN (*pvar);
3907     else
3908       pvar = &DECL_CHAIN (*pvar);
3909 
3910   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
3911     remove_member_access_dummy_vars (block);
3912 }
3913 
3914 /* If a context was created for STMT when it was scanned, return it.  */
3915 
3916 static omp_context *
maybe_lookup_ctx(gimple * stmt)3917 maybe_lookup_ctx (gimple *stmt)
3918 {
3919   splay_tree_node n;
3920   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
3921   return n ? (omp_context *) n->value : NULL;
3922 }
3923 
3924 
3925 /* Find the mapping for DECL in CTX or the immediately enclosing
3926    context that has a mapping for DECL.
3927 
3928    If CTX is a nested parallel directive, we may have to use the decl
3929    mappings created in CTX's parent context.  Suppose that we have the
3930    following parallel nesting (variable UIDs showed for clarity):
3931 
3932 	iD.1562 = 0;
3933      	#omp parallel shared(iD.1562)		-> outer parallel
3934 	  iD.1562 = iD.1562 + 1;
3935 
3936 	  #omp parallel shared (iD.1562)	-> inner parallel
3937 	     iD.1562 = iD.1562 - 1;
3938 
3939    Each parallel structure will create a distinct .omp_data_s structure
3940    for copying iD.1562 in/out of the directive:
3941 
3942   	outer parallel		.omp_data_s.1.i -> iD.1562
3943 	inner parallel		.omp_data_s.2.i -> iD.1562
3944 
3945    A shared variable mapping will produce a copy-out operation before
3946    the parallel directive and a copy-in operation after it.  So, in
3947    this case we would have:
3948 
3949   	iD.1562 = 0;
3950 	.omp_data_o.1.i = iD.1562;
3951 	#omp parallel shared(iD.1562)		-> outer parallel
3952 	  .omp_data_i.1 = &.omp_data_o.1
3953 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
3954 
3955 	  .omp_data_o.2.i = iD.1562;		-> **
3956 	  #omp parallel shared(iD.1562)		-> inner parallel
3957 	    .omp_data_i.2 = &.omp_data_o.2
3958 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
3959 
3960 
3961     ** This is a problem.  The symbol iD.1562 cannot be referenced
3962        inside the body of the outer parallel region.  But since we are
3963        emitting this copy operation while expanding the inner parallel
3964        directive, we need to access the CTX structure of the outer
3965        parallel directive to get the correct mapping:
3966 
3967 	  .omp_data_o.2.i = .omp_data_i.1->i
3968 
3969     Since there may be other workshare or parallel directives enclosing
3970     the parallel directive, it may be necessary to walk up the context
3971     parent chain.  This is not a problem in general because nested
3972     parallelism happens only rarely.  */
3973 
3974 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3975 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3976 {
3977   tree t;
3978   omp_context *up;
3979 
3980   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3981     t = maybe_lookup_decl (decl, up);
3982 
3983   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
3984 
3985   return t ? t : decl;
3986 }
3987 
3988 
3989 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
3990    in outer contexts.  */
3991 
3992 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)3993 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
3994 {
3995   tree t = NULL;
3996   omp_context *up;
3997 
3998   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
3999     t = maybe_lookup_decl (decl, up);
4000 
4001   return t ? t : decl;
4002 }
4003 
4004 
4005 /* Construct the initialization value for reduction operation OP.  */
4006 
4007 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)4008 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4009 {
4010   switch (op)
4011     {
4012     case PLUS_EXPR:
4013     case MINUS_EXPR:
4014     case BIT_IOR_EXPR:
4015     case BIT_XOR_EXPR:
4016     case TRUTH_OR_EXPR:
4017     case TRUTH_ORIF_EXPR:
4018     case TRUTH_XOR_EXPR:
4019     case NE_EXPR:
4020       return build_zero_cst (type);
4021 
4022     case MULT_EXPR:
4023     case TRUTH_AND_EXPR:
4024     case TRUTH_ANDIF_EXPR:
4025     case EQ_EXPR:
4026       return fold_convert_loc (loc, type, integer_one_node);
4027 
4028     case BIT_AND_EXPR:
4029       return fold_convert_loc (loc, type, integer_minus_one_node);
4030 
4031     case MAX_EXPR:
4032       if (SCALAR_FLOAT_TYPE_P (type))
4033 	{
4034 	  REAL_VALUE_TYPE max, min;
4035 	  if (HONOR_INFINITIES (type))
4036 	    {
4037 	      real_inf (&max);
4038 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4039 	    }
4040 	  else
4041 	    real_maxval (&min, 1, TYPE_MODE (type));
4042 	  return build_real (type, min);
4043 	}
4044       else if (POINTER_TYPE_P (type))
4045 	{
4046 	  wide_int min
4047 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4048 	  return wide_int_to_tree (type, min);
4049 	}
4050       else
4051 	{
4052 	  gcc_assert (INTEGRAL_TYPE_P (type));
4053 	  return TYPE_MIN_VALUE (type);
4054 	}
4055 
4056     case MIN_EXPR:
4057       if (SCALAR_FLOAT_TYPE_P (type))
4058 	{
4059 	  REAL_VALUE_TYPE max;
4060 	  if (HONOR_INFINITIES (type))
4061 	    real_inf (&max);
4062 	  else
4063 	    real_maxval (&max, 0, TYPE_MODE (type));
4064 	  return build_real (type, max);
4065 	}
4066       else if (POINTER_TYPE_P (type))
4067 	{
4068 	  wide_int max
4069 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4070 	  return wide_int_to_tree (type, max);
4071 	}
4072       else
4073 	{
4074 	  gcc_assert (INTEGRAL_TYPE_P (type));
4075 	  return TYPE_MAX_VALUE (type);
4076 	}
4077 
4078     default:
4079       gcc_unreachable ();
4080     }
4081 }
4082 
4083 /* Construct the initialization value for reduction CLAUSE.  */
4084 
4085 tree
omp_reduction_init(tree clause,tree type)4086 omp_reduction_init (tree clause, tree type)
4087 {
4088   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4089 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
4090 }
4091 
4092 /* Return alignment to be assumed for var in CLAUSE, which should be
4093    OMP_CLAUSE_ALIGNED.  */
4094 
4095 static tree
omp_clause_aligned_alignment(tree clause)4096 omp_clause_aligned_alignment (tree clause)
4097 {
4098   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4099     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4100 
4101   /* Otherwise return implementation defined alignment.  */
4102   unsigned int al = 1;
4103   opt_scalar_mode mode_iter;
4104   auto_vector_modes modes;
4105   targetm.vectorize.autovectorize_vector_modes (&modes, true);
4106   static enum mode_class classes[]
4107     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4108   for (int i = 0; i < 4; i += 2)
4109     /* The for loop above dictates that we only walk through scalar classes.  */
4110     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4111       {
4112 	scalar_mode mode = mode_iter.require ();
4113 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4114 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
4115 	  continue;
4116 	machine_mode alt_vmode;
4117 	for (unsigned int j = 0; j < modes.length (); ++j)
4118 	  if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4119 	      && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4120 	    vmode = alt_vmode;
4121 
4122 	tree type = lang_hooks.types.type_for_mode (mode, 1);
4123 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
4124 	  continue;
4125 	type = build_vector_type_for_mode (type, vmode);
4126 	if (TYPE_MODE (type) != vmode)
4127 	  continue;
4128 	if (TYPE_ALIGN_UNIT (type) > al)
4129 	  al = TYPE_ALIGN_UNIT (type);
4130       }
4131   return build_int_cst (integer_type_node, al);
4132 }
4133 
4134 
4135 /* This structure is part of the interface between lower_rec_simd_input_clauses
4136    and lower_rec_input_clauses.  */
4137 
4138 class omplow_simd_context {
4139 public:
omplow_simd_context()4140   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4141   tree idx;
4142   tree lane;
4143   tree lastlane;
4144   vec<tree, va_heap> simt_eargs;
4145   gimple_seq simt_dlist;
4146   poly_uint64_pod max_vf;
4147   bool is_simt;
4148 };
4149 
4150 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4151    privatization.  */
4152 
4153 static bool
4154 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4155 			      omplow_simd_context *sctx, tree &ivar,
4156 			      tree &lvar, tree *rvar = NULL,
4157 			      tree *rvar2 = NULL)
4158 {
4159   if (known_eq (sctx->max_vf, 0U))
4160     {
4161       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4162       if (maybe_gt (sctx->max_vf, 1U))
4163 	{
4164 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4165 				    OMP_CLAUSE_SAFELEN);
4166 	  if (c)
4167 	    {
4168 	      poly_uint64 safe_len;
4169 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4170 		  || maybe_lt (safe_len, 1U))
4171 		sctx->max_vf = 1;
4172 	      else
4173 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4174 	    }
4175 	}
4176       if (maybe_gt (sctx->max_vf, 1U))
4177 	{
4178 	  sctx->idx = create_tmp_var (unsigned_type_node);
4179 	  sctx->lane = create_tmp_var (unsigned_type_node);
4180 	}
4181     }
4182   if (known_eq (sctx->max_vf, 1U))
4183     return false;
4184 
4185   if (sctx->is_simt)
4186     {
4187       if (is_gimple_reg (new_var))
4188 	{
4189 	  ivar = lvar = new_var;
4190 	  return true;
4191 	}
4192       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4193       ivar = lvar = create_tmp_var (type);
4194       TREE_ADDRESSABLE (ivar) = 1;
4195       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4196 					  NULL, DECL_ATTRIBUTES (ivar));
4197       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4198       tree clobber = build_clobber (type);
4199       gimple *g = gimple_build_assign (ivar, clobber);
4200       gimple_seq_add_stmt (&sctx->simt_dlist, g);
4201     }
4202   else
4203     {
4204       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4205       tree avar = create_tmp_var_raw (atype);
4206       if (TREE_ADDRESSABLE (new_var))
4207 	TREE_ADDRESSABLE (avar) = 1;
4208       DECL_ATTRIBUTES (avar)
4209 	= tree_cons (get_identifier ("omp simd array"), NULL,
4210 		     DECL_ATTRIBUTES (avar));
4211       gimple_add_tmp_var (avar);
4212       tree iavar = avar;
4213       if (rvar && !ctx->for_simd_scan_phase)
4214 	{
4215 	  /* For inscan reductions, create another array temporary,
4216 	     which will hold the reduced value.  */
4217 	  iavar = create_tmp_var_raw (atype);
4218 	  if (TREE_ADDRESSABLE (new_var))
4219 	    TREE_ADDRESSABLE (iavar) = 1;
4220 	  DECL_ATTRIBUTES (iavar)
4221 	    = tree_cons (get_identifier ("omp simd array"), NULL,
4222 			 tree_cons (get_identifier ("omp simd inscan"), NULL,
4223 				    DECL_ATTRIBUTES (iavar)));
4224 	  gimple_add_tmp_var (iavar);
4225 	  ctx->cb.decl_map->put (avar, iavar);
4226 	  if (sctx->lastlane == NULL_TREE)
4227 	    sctx->lastlane = create_tmp_var (unsigned_type_node);
4228 	  *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4229 			  sctx->lastlane, NULL_TREE, NULL_TREE);
4230 	  TREE_THIS_NOTRAP (*rvar) = 1;
4231 
4232 	  if (ctx->scan_exclusive)
4233 	    {
4234 	      /* And for exclusive scan yet another one, which will
4235 		 hold the value during the scan phase.  */
4236 	      tree savar = create_tmp_var_raw (atype);
4237 	      if (TREE_ADDRESSABLE (new_var))
4238 		TREE_ADDRESSABLE (savar) = 1;
4239 	      DECL_ATTRIBUTES (savar)
4240 		= tree_cons (get_identifier ("omp simd array"), NULL,
4241 			     tree_cons (get_identifier ("omp simd inscan "
4242 							"exclusive"), NULL,
4243 					DECL_ATTRIBUTES (savar)));
4244 	      gimple_add_tmp_var (savar);
4245 	      ctx->cb.decl_map->put (iavar, savar);
4246 	      *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4247 			       sctx->idx, NULL_TREE, NULL_TREE);
4248 	      TREE_THIS_NOTRAP (*rvar2) = 1;
4249 	    }
4250 	}
4251       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4252 		     NULL_TREE, NULL_TREE);
4253       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4254 		     NULL_TREE, NULL_TREE);
4255       TREE_THIS_NOTRAP (ivar) = 1;
4256       TREE_THIS_NOTRAP (lvar) = 1;
4257     }
4258   if (DECL_P (new_var))
4259     {
4260       SET_DECL_VALUE_EXPR (new_var, lvar);
4261       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4262     }
4263   return true;
4264 }
4265 
4266 /* Helper function of lower_rec_input_clauses.  For a reference
4267    in simd reduction, add an underlying variable it will reference.  */
4268 
4269 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)4270 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4271 {
4272   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4273   if (TREE_CONSTANT (z))
4274     {
4275       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4276 			      get_name (new_vard));
4277       gimple_add_tmp_var (z);
4278       TREE_ADDRESSABLE (z) = 1;
4279       z = build_fold_addr_expr_loc (loc, z);
4280       gimplify_assign (new_vard, z, ilist);
4281     }
4282 }
4283 
4284 /* Helper function for lower_rec_input_clauses.  Emit into ilist sequence
4285    code to emit (type) (tskred_temp[idx]).  */
4286 
4287 static tree
task_reduction_read(gimple_seq * ilist,tree tskred_temp,tree type,unsigned idx)4288 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4289 		     unsigned idx)
4290 {
4291   unsigned HOST_WIDE_INT sz
4292     = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4293   tree r = build2 (MEM_REF, pointer_sized_int_node,
4294 		   tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4295 					       idx * sz));
4296   tree v = create_tmp_var (pointer_sized_int_node);
4297   gimple *g = gimple_build_assign (v, r);
4298   gimple_seq_add_stmt (ilist, g);
4299   if (!useless_type_conversion_p (type, pointer_sized_int_node))
4300     {
4301       v = create_tmp_var (type);
4302       g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4303       gimple_seq_add_stmt (ilist, g);
4304     }
4305   return v;
4306 }
4307 
4308 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4309    from the receiver (aka child) side and initializers for REFERENCE_TYPE
4310    private variables.  Initialization statements go in ILIST, while calls
4311    to destructors go in DLIST.  */
4312 
4313 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)4314 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4315 			 omp_context *ctx, struct omp_for_data *fd)
4316 {
4317   tree c, copyin_seq, x, ptr;
4318   bool copyin_by_ref = false;
4319   bool lastprivate_firstprivate = false;
4320   bool reduction_omp_orig_ref = false;
4321   int pass;
4322   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4323 		  && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4324   omplow_simd_context sctx = omplow_simd_context ();
4325   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4326   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4327   gimple_seq llist[4] = { };
4328   tree nonconst_simd_if = NULL_TREE;
4329 
4330   copyin_seq = NULL;
4331   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4332 
4333   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4334      with data sharing clauses referencing variable sized vars.  That
4335      is unnecessarily hard to support and very unlikely to result in
4336      vectorized code anyway.  */
4337   if (is_simd)
4338     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4339       switch (OMP_CLAUSE_CODE (c))
4340 	{
4341 	case OMP_CLAUSE_LINEAR:
4342 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
4343 	    sctx.max_vf = 1;
4344 	  /* FALLTHRU */
4345 	case OMP_CLAUSE_PRIVATE:
4346 	case OMP_CLAUSE_FIRSTPRIVATE:
4347 	case OMP_CLAUSE_LASTPRIVATE:
4348 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4349 	    sctx.max_vf = 1;
4350 	  else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4351 	    {
4352 	      tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4353 	      if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4354 		sctx.max_vf = 1;
4355 	    }
4356 	  break;
4357 	case OMP_CLAUSE_REDUCTION:
4358 	case OMP_CLAUSE_IN_REDUCTION:
4359 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4360 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
4361 	    sctx.max_vf = 1;
4362 	  else if (omp_is_reference (OMP_CLAUSE_DECL (c)))
4363 	    {
4364 	      tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4365 	      if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4366 		sctx.max_vf = 1;
4367 	    }
4368 	  break;
4369 	case OMP_CLAUSE_IF:
4370 	  if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4371 	    sctx.max_vf = 1;
4372 	  else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4373 	    nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4374 	  break;
4375         case OMP_CLAUSE_SIMDLEN:
4376 	  if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4377 	    sctx.max_vf = 1;
4378 	  break;
4379 	case OMP_CLAUSE__CONDTEMP_:
4380 	  /* FIXME: lastprivate(conditional:) not handled for SIMT yet.  */
4381 	  if (sctx.is_simt)
4382 	    sctx.max_vf = 1;
4383 	  break;
4384 	default:
4385 	  continue;
4386 	}
4387 
4388   /* Add a placeholder for simduid.  */
4389   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4390     sctx.simt_eargs.safe_push (NULL_TREE);
4391 
4392   unsigned task_reduction_cnt = 0;
4393   unsigned task_reduction_cntorig = 0;
4394   unsigned task_reduction_cnt_full = 0;
4395   unsigned task_reduction_cntorig_full = 0;
4396   unsigned task_reduction_other_cnt = 0;
4397   tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4398   tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4399   /* Do all the fixed sized types in the first pass, and the variable sized
4400      types in the second pass.  This makes sure that the scalar arguments to
4401      the variable sized types are processed before we use them in the
4402      variable sized operations.  For task reductions we use 4 passes, in the
4403      first two we ignore them, in the third one gather arguments for
4404      GOMP_task_reduction_remap call and in the last pass actually handle
4405      the task reductions.  */
4406   for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4407 			 ? 4 : 2); ++pass)
4408     {
4409       if (pass == 2 && task_reduction_cnt)
4410 	{
4411 	  tskred_atype
4412 	    = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4413 						     + task_reduction_cntorig);
4414 	  tskred_avar = create_tmp_var_raw (tskred_atype);
4415 	  gimple_add_tmp_var (tskred_avar);
4416 	  TREE_ADDRESSABLE (tskred_avar) = 1;
4417 	  task_reduction_cnt_full = task_reduction_cnt;
4418 	  task_reduction_cntorig_full = task_reduction_cntorig;
4419 	}
4420       else if (pass == 3 && task_reduction_cnt)
4421 	{
4422 	  x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4423 	  gimple *g
4424 	    = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4425 				 size_int (task_reduction_cntorig),
4426 				 build_fold_addr_expr (tskred_avar));
4427 	  gimple_seq_add_stmt (ilist, g);
4428 	}
4429       if (pass == 3 && task_reduction_other_cnt)
4430 	{
4431 	  /* For reduction clauses, build
4432 	     tskred_base = (void *) tskred_temp[2]
4433 			   + omp_get_thread_num () * tskred_temp[1]
4434 	     or if tskred_temp[1] is known to be constant, that constant
4435 	     directly.  This is the start of the private reduction copy block
4436 	     for the current thread.  */
4437 	  tree v = create_tmp_var (integer_type_node);
4438 	  x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4439 	  gimple *g = gimple_build_call (x, 0);
4440 	  gimple_call_set_lhs (g, v);
4441 	  gimple_seq_add_stmt (ilist, g);
4442 	  c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4443 	  tskred_temp = OMP_CLAUSE_DECL (c);
4444 	  if (is_taskreg_ctx (ctx))
4445 	    tskred_temp = lookup_decl (tskred_temp, ctx);
4446 	  tree v2 = create_tmp_var (sizetype);
4447 	  g = gimple_build_assign (v2, NOP_EXPR, v);
4448 	  gimple_seq_add_stmt (ilist, g);
4449 	  if (ctx->task_reductions[0])
4450 	    v = fold_convert (sizetype, ctx->task_reductions[0]);
4451 	  else
4452 	    v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4453 	  tree v3 = create_tmp_var (sizetype);
4454 	  g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4455 	  gimple_seq_add_stmt (ilist, g);
4456 	  v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4457 	  tskred_base = create_tmp_var (ptr_type_node);
4458 	  g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4459 	  gimple_seq_add_stmt (ilist, g);
4460 	}
4461       task_reduction_cnt = 0;
4462       task_reduction_cntorig = 0;
4463       task_reduction_other_cnt = 0;
4464       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4465 	{
4466 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4467 	  tree var, new_var;
4468 	  bool by_ref;
4469 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4470 	  bool task_reduction_p = false;
4471 	  bool task_reduction_needs_orig_p = false;
4472 	  tree cond = NULL_TREE;
4473 
4474 	  switch (c_kind)
4475 	    {
4476 	    case OMP_CLAUSE_PRIVATE:
4477 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4478 		continue;
4479 	      break;
4480 	    case OMP_CLAUSE_SHARED:
4481 	      /* Ignore shared directives in teams construct inside
4482 		 of target construct.  */
4483 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4484 		  && !is_host_teams_ctx (ctx))
4485 		continue;
4486 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4487 		{
4488 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4489 			      || is_global_var (OMP_CLAUSE_DECL (c)));
4490 		  continue;
4491 		}
4492 	    case OMP_CLAUSE_FIRSTPRIVATE:
4493 	    case OMP_CLAUSE_COPYIN:
4494 	      break;
4495 	    case OMP_CLAUSE_LINEAR:
4496 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
4497 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
4498 		lastprivate_firstprivate = true;
4499 	      break;
4500 	    case OMP_CLAUSE_REDUCTION:
4501 	    case OMP_CLAUSE_IN_REDUCTION:
4502 	      if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
4503 		{
4504 		  task_reduction_p = true;
4505 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
4506 		    {
4507 		      task_reduction_other_cnt++;
4508 		      if (pass == 2)
4509 			continue;
4510 		    }
4511 		  else
4512 		    task_reduction_cnt++;
4513 		  if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4514 		    {
4515 		      var = OMP_CLAUSE_DECL (c);
4516 		      /* If var is a global variable that isn't privatized
4517 			 in outer contexts, we don't need to look up the
4518 			 original address, it is always the address of the
4519 			 global variable itself.  */
4520 		      if (!DECL_P (var)
4521 			  || omp_is_reference (var)
4522 			  || !is_global_var
4523 				(maybe_lookup_decl_in_outer_ctx (var, ctx)))
4524 			{
4525 			  task_reduction_needs_orig_p = true;
4526 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4527 			    task_reduction_cntorig++;
4528 			}
4529 		    }
4530 		}
4531 	      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4532 		reduction_omp_orig_ref = true;
4533 	      break;
4534 	    case OMP_CLAUSE__REDUCTEMP_:
4535 	      if (!is_taskreg_ctx (ctx))
4536 		continue;
4537 	      /* FALLTHRU */
4538 	    case OMP_CLAUSE__LOOPTEMP_:
4539 	      /* Handle _looptemp_/_reductemp_ clauses only on
4540 		 parallel/task.  */
4541 	      if (fd)
4542 		continue;
4543 	      break;
4544 	    case OMP_CLAUSE_LASTPRIVATE:
4545 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
4546 		{
4547 		  lastprivate_firstprivate = true;
4548 		  if (pass != 0 || is_taskloop_ctx (ctx))
4549 		    continue;
4550 		}
4551 	      /* Even without corresponding firstprivate, if
4552 		 decl is Fortran allocatable, it needs outer var
4553 		 reference.  */
4554 	      else if (pass == 0
4555 		       && lang_hooks.decls.omp_private_outer_ref
4556 							(OMP_CLAUSE_DECL (c)))
4557 		lastprivate_firstprivate = true;
4558 	      break;
4559 	    case OMP_CLAUSE_ALIGNED:
4560 	      if (pass != 1)
4561 		continue;
4562 	      var = OMP_CLAUSE_DECL (c);
4563 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
4564 		  && !is_global_var (var))
4565 		{
4566 		  new_var = maybe_lookup_decl (var, ctx);
4567 		  if (new_var == NULL_TREE)
4568 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
4569 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4570 		  tree alarg = omp_clause_aligned_alignment (c);
4571 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4572 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
4573 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4574 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
4575 		  gimplify_and_add (x, ilist);
4576 		}
4577 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
4578 		       && is_global_var (var))
4579 		{
4580 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
4581 		  new_var = lookup_decl (var, ctx);
4582 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
4583 		  t = build_fold_addr_expr_loc (clause_loc, t);
4584 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
4585 		  tree alarg = omp_clause_aligned_alignment (c);
4586 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
4587 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
4588 		  t = fold_convert_loc (clause_loc, ptype, t);
4589 		  x = create_tmp_var (ptype);
4590 		  t = build2 (MODIFY_EXPR, ptype, x, t);
4591 		  gimplify_and_add (t, ilist);
4592 		  t = build_simple_mem_ref_loc (clause_loc, x);
4593 		  SET_DECL_VALUE_EXPR (new_var, t);
4594 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4595 		}
4596 	      continue;
4597 	    case OMP_CLAUSE__CONDTEMP_:
4598 	      if (is_parallel_ctx (ctx)
4599 		  || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
4600 		break;
4601 	      continue;
4602 	    default:
4603 	      continue;
4604 	    }
4605 
4606 	  if (task_reduction_p != (pass >= 2))
4607 	    continue;
4608 
4609 	  new_var = var = OMP_CLAUSE_DECL (c);
4610 	  if ((c_kind == OMP_CLAUSE_REDUCTION
4611 	       || c_kind == OMP_CLAUSE_IN_REDUCTION)
4612 	      && TREE_CODE (var) == MEM_REF)
4613 	    {
4614 	      var = TREE_OPERAND (var, 0);
4615 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
4616 		var = TREE_OPERAND (var, 0);
4617 	      if (TREE_CODE (var) == INDIRECT_REF
4618 		  || TREE_CODE (var) == ADDR_EXPR)
4619 		var = TREE_OPERAND (var, 0);
4620 	      if (is_variable_sized (var))
4621 		{
4622 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
4623 		  var = DECL_VALUE_EXPR (var);
4624 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
4625 		  var = TREE_OPERAND (var, 0);
4626 		  gcc_assert (DECL_P (var));
4627 		}
4628 	      new_var = var;
4629 	    }
4630 	  if (c_kind != OMP_CLAUSE_COPYIN)
4631 	    new_var = lookup_decl (var, ctx);
4632 
4633 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
4634 	    {
4635 	      if (pass != 0)
4636 		continue;
4637 	    }
4638 	  /* C/C++ array section reductions.  */
4639 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
4640 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
4641 		   && var != OMP_CLAUSE_DECL (c))
4642 	    {
4643 	      if (pass == 0)
4644 		continue;
4645 
4646 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
4647 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
4648 
4649 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
4650 		{
4651 		  tree b = TREE_OPERAND (orig_var, 1);
4652 		  b = maybe_lookup_decl (b, ctx);
4653 		  if (b == NULL)
4654 		    {
4655 		      b = TREE_OPERAND (orig_var, 1);
4656 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
4657 		    }
4658 		  if (integer_zerop (bias))
4659 		    bias = b;
4660 		  else
4661 		    {
4662 		      bias = fold_convert_loc (clause_loc,
4663 					       TREE_TYPE (b), bias);
4664 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
4665 					      TREE_TYPE (b), b, bias);
4666 		    }
4667 		  orig_var = TREE_OPERAND (orig_var, 0);
4668 		}
4669 	      if (pass == 2)
4670 		{
4671 		  tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
4672 		  if (is_global_var (out)
4673 		      && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
4674 		      && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
4675 			  || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
4676 			      != POINTER_TYPE)))
4677 		    x = var;
4678 		  else
4679 		    {
4680 		      bool by_ref = use_pointer_for_field (var, NULL);
4681 		      x = build_receiver_ref (var, by_ref, ctx);
4682 		      if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
4683 			  && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
4684 			      == POINTER_TYPE))
4685 			x = build_fold_addr_expr (x);
4686 		    }
4687 		  if (TREE_CODE (orig_var) == INDIRECT_REF)
4688 		    x = build_simple_mem_ref (x);
4689 		  else if (TREE_CODE (orig_var) == ADDR_EXPR)
4690 		    {
4691 		      if (var == TREE_OPERAND (orig_var, 0))
4692 			x = build_fold_addr_expr (x);
4693 		    }
4694 		  bias = fold_convert (sizetype, bias);
4695 		  x = fold_convert (ptr_type_node, x);
4696 		  x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
4697 				       TREE_TYPE (x), x, bias);
4698 		  unsigned cnt = task_reduction_cnt - 1;
4699 		  if (!task_reduction_needs_orig_p)
4700 		    cnt += (task_reduction_cntorig_full
4701 			    - task_reduction_cntorig);
4702 		  else
4703 		    cnt = task_reduction_cntorig - 1;
4704 		  tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4705 				   size_int (cnt), NULL_TREE, NULL_TREE);
4706 		  gimplify_assign (r, x, ilist);
4707 		  continue;
4708 		}
4709 
4710 	      if (TREE_CODE (orig_var) == INDIRECT_REF
4711 		  || TREE_CODE (orig_var) == ADDR_EXPR)
4712 		orig_var = TREE_OPERAND (orig_var, 0);
4713 	      tree d = OMP_CLAUSE_DECL (c);
4714 	      tree type = TREE_TYPE (d);
4715 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
4716 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
4717 	      const char *name = get_name (orig_var);
4718 	      if (pass == 3)
4719 		{
4720 		  tree xv = create_tmp_var (ptr_type_node);
4721 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4722 		    {
4723 		      unsigned cnt = task_reduction_cnt - 1;
4724 		      if (!task_reduction_needs_orig_p)
4725 			cnt += (task_reduction_cntorig_full
4726 				- task_reduction_cntorig);
4727 		      else
4728 			cnt = task_reduction_cntorig - 1;
4729 		      x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4730 				  size_int (cnt), NULL_TREE, NULL_TREE);
4731 
4732 		      gimple *g = gimple_build_assign (xv, x);
4733 		      gimple_seq_add_stmt (ilist, g);
4734 		    }
4735 		  else
4736 		    {
4737 		      unsigned int idx = *ctx->task_reduction_map->get (c);
4738 		      tree off;
4739 		      if (ctx->task_reductions[1 + idx])
4740 			off = fold_convert (sizetype,
4741 					    ctx->task_reductions[1 + idx]);
4742 		      else
4743 			off = task_reduction_read (ilist, tskred_temp, sizetype,
4744 						   7 + 3 * idx + 1);
4745 		      gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
4746 						       tskred_base, off);
4747 		      gimple_seq_add_stmt (ilist, g);
4748 		    }
4749 		  x = fold_convert (build_pointer_type (boolean_type_node),
4750 				    xv);
4751 		  if (TREE_CONSTANT (v))
4752 		    x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
4753 				     TYPE_SIZE_UNIT (type));
4754 		  else
4755 		    {
4756 		      tree t = maybe_lookup_decl (v, ctx);
4757 		      if (t)
4758 			v = t;
4759 		      else
4760 			v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4761 		      gimplify_expr (&v, ilist, NULL, is_gimple_val,
4762 				     fb_rvalue);
4763 		      t = fold_build2_loc (clause_loc, PLUS_EXPR,
4764 					   TREE_TYPE (v), v,
4765 					   build_int_cst (TREE_TYPE (v), 1));
4766 		      t = fold_build2_loc (clause_loc, MULT_EXPR,
4767 					   TREE_TYPE (v), t,
4768 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
4769 		      x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
4770 		    }
4771 		  cond = create_tmp_var (TREE_TYPE (x));
4772 		  gimplify_assign (cond, x, ilist);
4773 		  x = xv;
4774 		}
4775 	      else if (TREE_CONSTANT (v))
4776 		{
4777 		  x = create_tmp_var_raw (type, name);
4778 		  gimple_add_tmp_var (x);
4779 		  TREE_ADDRESSABLE (x) = 1;
4780 		  x = build_fold_addr_expr_loc (clause_loc, x);
4781 		}
4782 	      else
4783 		{
4784 		  tree atmp
4785 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
4786 		  tree t = maybe_lookup_decl (v, ctx);
4787 		  if (t)
4788 		    v = t;
4789 		  else
4790 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
4791 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
4792 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
4793 				       TREE_TYPE (v), v,
4794 				       build_int_cst (TREE_TYPE (v), 1));
4795 		  t = fold_build2_loc (clause_loc, MULT_EXPR,
4796 				       TREE_TYPE (v), t,
4797 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
4798 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
4799 		  x = build_call_expr_loc (clause_loc, atmp, 2, t, al);
4800 		}
4801 
4802 	      tree ptype = build_pointer_type (TREE_TYPE (type));
4803 	      x = fold_convert_loc (clause_loc, ptype, x);
4804 	      tree y = create_tmp_var (ptype, name);
4805 	      gimplify_assign (y, x, ilist);
4806 	      x = y;
4807 	      tree yb = y;
4808 
4809 	      if (!integer_zerop (bias))
4810 		{
4811 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
4812 					   bias);
4813 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
4814 					 x);
4815 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
4816 					pointer_sized_int_node, yb, bias);
4817 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
4818 		  yb = create_tmp_var (ptype, name);
4819 		  gimplify_assign (yb, x, ilist);
4820 		  x = yb;
4821 		}
4822 
4823 	      d = TREE_OPERAND (d, 0);
4824 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
4825 		d = TREE_OPERAND (d, 0);
4826 	      if (TREE_CODE (d) == ADDR_EXPR)
4827 		{
4828 		  if (orig_var != var)
4829 		    {
4830 		      gcc_assert (is_variable_sized (orig_var));
4831 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
4832 					    x);
4833 		      gimplify_assign (new_var, x, ilist);
4834 		      tree new_orig_var = lookup_decl (orig_var, ctx);
4835 		      tree t = build_fold_indirect_ref (new_var);
4836 		      DECL_IGNORED_P (new_var) = 0;
4837 		      TREE_THIS_NOTRAP (t) = 1;
4838 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
4839 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
4840 		    }
4841 		  else
4842 		    {
4843 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
4844 				  build_int_cst (ptype, 0));
4845 		      SET_DECL_VALUE_EXPR (new_var, x);
4846 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4847 		    }
4848 		}
4849 	      else
4850 		{
4851 		  gcc_assert (orig_var == var);
4852 		  if (TREE_CODE (d) == INDIRECT_REF)
4853 		    {
4854 		      x = create_tmp_var (ptype, name);
4855 		      TREE_ADDRESSABLE (x) = 1;
4856 		      gimplify_assign (x, yb, ilist);
4857 		      x = build_fold_addr_expr_loc (clause_loc, x);
4858 		    }
4859 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
4860 		  gimplify_assign (new_var, x, ilist);
4861 		}
4862 	      /* GOMP_taskgroup_reduction_register memsets the whole
4863 		 array to zero.  If the initializer is zero, we don't
4864 		 need to initialize it again, just mark it as ever
4865 		 used unconditionally, i.e. cond = true.  */
4866 	      if (cond
4867 		  && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
4868 		  && initializer_zerop (omp_reduction_init (c,
4869 							    TREE_TYPE (type))))
4870 		{
4871 		  gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
4872 						   boolean_true_node);
4873 		  gimple_seq_add_stmt (ilist, g);
4874 		  continue;
4875 		}
4876 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
4877 	      if (cond)
4878 		{
4879 		  gimple *g;
4880 		  if (!is_parallel_ctx (ctx))
4881 		    {
4882 		      tree condv = create_tmp_var (boolean_type_node);
4883 		      g = gimple_build_assign (condv,
4884 					       build_simple_mem_ref (cond));
4885 		      gimple_seq_add_stmt (ilist, g);
4886 		      tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
4887 		      g = gimple_build_cond (NE_EXPR, condv,
4888 					     boolean_false_node, end, lab1);
4889 		      gimple_seq_add_stmt (ilist, g);
4890 		      gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
4891 		    }
4892 		  g = gimple_build_assign (build_simple_mem_ref (cond),
4893 					   boolean_true_node);
4894 		  gimple_seq_add_stmt (ilist, g);
4895 		}
4896 
4897 	      tree y1 = create_tmp_var (ptype);
4898 	      gimplify_assign (y1, y, ilist);
4899 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
4900 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
4901 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
4902 	      if (task_reduction_needs_orig_p)
4903 		{
4904 		  y3 = create_tmp_var (ptype);
4905 		  tree ref;
4906 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4907 		    ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
4908 				  size_int (task_reduction_cnt_full
4909 					    + task_reduction_cntorig - 1),
4910 				  NULL_TREE, NULL_TREE);
4911 		  else
4912 		    {
4913 		      unsigned int idx = *ctx->task_reduction_map->get (c);
4914 		      ref = task_reduction_read (ilist, tskred_temp, ptype,
4915 						 7 + 3 * idx);
4916 		    }
4917 		  gimplify_assign (y3, ref, ilist);
4918 		}
4919 	      else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
4920 		{
4921 		  if (pass != 3)
4922 		    {
4923 		      y2 = create_tmp_var (ptype);
4924 		      gimplify_assign (y2, y, ilist);
4925 		    }
4926 		  if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4927 		    {
4928 		      tree ref = build_outer_var_ref (var, ctx);
4929 		      /* For ref build_outer_var_ref already performs this.  */
4930 		      if (TREE_CODE (d) == INDIRECT_REF)
4931 			gcc_assert (omp_is_reference (var));
4932 		      else if (TREE_CODE (d) == ADDR_EXPR)
4933 			ref = build_fold_addr_expr (ref);
4934 		      else if (omp_is_reference (var))
4935 			ref = build_fold_addr_expr (ref);
4936 		      ref = fold_convert_loc (clause_loc, ptype, ref);
4937 		      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
4938 			  && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
4939 			{
4940 			  y3 = create_tmp_var (ptype);
4941 			  gimplify_assign (y3, unshare_expr (ref), ilist);
4942 			}
4943 		      if (is_simd)
4944 			{
4945 			  y4 = create_tmp_var (ptype);
4946 			  gimplify_assign (y4, ref, dlist);
4947 			}
4948 		    }
4949 		}
4950 	      tree i = create_tmp_var (TREE_TYPE (v));
4951 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
4952 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
4953 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
4954 	      if (y2)
4955 		{
4956 		  i2 = create_tmp_var (TREE_TYPE (v));
4957 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
4958 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
4959 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
4960 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
4961 		}
4962 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4963 		{
4964 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
4965 		  tree decl_placeholder
4966 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
4967 		  SET_DECL_VALUE_EXPR (decl_placeholder,
4968 				       build_simple_mem_ref (y1));
4969 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
4970 		  SET_DECL_VALUE_EXPR (placeholder,
4971 				       y3 ? build_simple_mem_ref (y3)
4972 				       : error_mark_node);
4973 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
4974 		  x = lang_hooks.decls.omp_clause_default_ctor
4975 				(c, build_simple_mem_ref (y1),
4976 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
4977 		  if (x)
4978 		    gimplify_and_add (x, ilist);
4979 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
4980 		    {
4981 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
4982 		      lower_omp (&tseq, ctx);
4983 		      gimple_seq_add_seq (ilist, tseq);
4984 		    }
4985 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
4986 		  if (is_simd)
4987 		    {
4988 		      SET_DECL_VALUE_EXPR (decl_placeholder,
4989 					   build_simple_mem_ref (y2));
4990 		      SET_DECL_VALUE_EXPR (placeholder,
4991 					   build_simple_mem_ref (y4));
4992 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
4993 		      lower_omp (&tseq, ctx);
4994 		      gimple_seq_add_seq (dlist, tseq);
4995 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
4996 		    }
4997 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
4998 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
4999 		  if (y2)
5000 		    {
5001 		      x = lang_hooks.decls.omp_clause_dtor
5002 						(c, build_simple_mem_ref (y2));
5003 		      if (x)
5004 			gimplify_and_add (x, dlist);
5005 		    }
5006 		}
5007 	      else
5008 		{
5009 		  x = omp_reduction_init (c, TREE_TYPE (type));
5010 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5011 
5012 		  /* reduction(-:var) sums up the partial results, so it
5013 		     acts identically to reduction(+:var).  */
5014 		  if (code == MINUS_EXPR)
5015 		    code = PLUS_EXPR;
5016 
5017 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5018 		  if (is_simd)
5019 		    {
5020 		      x = build2 (code, TREE_TYPE (type),
5021 				  build_simple_mem_ref (y4),
5022 				  build_simple_mem_ref (y2));
5023 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5024 		    }
5025 		}
5026 	      gimple *g
5027 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5028 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
5029 	      gimple_seq_add_stmt (ilist, g);
5030 	      if (y3)
5031 		{
5032 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5033 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5034 		  gimple_seq_add_stmt (ilist, g);
5035 		}
5036 	      g = gimple_build_assign (i, PLUS_EXPR, i,
5037 				       build_int_cst (TREE_TYPE (i), 1));
5038 	      gimple_seq_add_stmt (ilist, g);
5039 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
5040 	      gimple_seq_add_stmt (ilist, g);
5041 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
5042 	      if (y2)
5043 		{
5044 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5045 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5046 		  gimple_seq_add_stmt (dlist, g);
5047 		  if (y4)
5048 		    {
5049 		      g = gimple_build_assign
5050 					(y4, POINTER_PLUS_EXPR, y4,
5051 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5052 		      gimple_seq_add_stmt (dlist, g);
5053 		    }
5054 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
5055 					   build_int_cst (TREE_TYPE (i2), 1));
5056 		  gimple_seq_add_stmt (dlist, g);
5057 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5058 		  gimple_seq_add_stmt (dlist, g);
5059 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5060 		}
5061 	      continue;
5062 	    }
5063 	  else if (pass == 2)
5064 	    {
5065 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
5066 		x = var;
5067 	      else
5068 		{
5069 		  bool by_ref = use_pointer_for_field (var, ctx);
5070 		  x = build_receiver_ref (var, by_ref, ctx);
5071 		}
5072 	      if (!omp_is_reference (var))
5073 		x = build_fold_addr_expr (x);
5074 	      x = fold_convert (ptr_type_node, x);
5075 	      unsigned cnt = task_reduction_cnt - 1;
5076 	      if (!task_reduction_needs_orig_p)
5077 		cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5078 	      else
5079 		cnt = task_reduction_cntorig - 1;
5080 	      tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5081 			       size_int (cnt), NULL_TREE, NULL_TREE);
5082 	      gimplify_assign (r, x, ilist);
5083 	      continue;
5084 	    }
5085 	  else if (pass == 3)
5086 	    {
5087 	      tree type = TREE_TYPE (new_var);
5088 	      if (!omp_is_reference (var))
5089 		type = build_pointer_type (type);
5090 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5091 		{
5092 		  unsigned cnt = task_reduction_cnt - 1;
5093 		  if (!task_reduction_needs_orig_p)
5094 		    cnt += (task_reduction_cntorig_full
5095 			    - task_reduction_cntorig);
5096 		  else
5097 		    cnt = task_reduction_cntorig - 1;
5098 		  x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5099 			      size_int (cnt), NULL_TREE, NULL_TREE);
5100 		}
5101 	      else
5102 		{
5103 		  unsigned int idx = *ctx->task_reduction_map->get (c);
5104 		  tree off;
5105 		  if (ctx->task_reductions[1 + idx])
5106 		    off = fold_convert (sizetype,
5107 					ctx->task_reductions[1 + idx]);
5108 		  else
5109 		    off = task_reduction_read (ilist, tskred_temp, sizetype,
5110 					       7 + 3 * idx + 1);
5111 		  x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5112 				   tskred_base, off);
5113 		}
5114 	      x = fold_convert (type, x);
5115 	      tree t;
5116 	      if (omp_is_reference (var))
5117 		{
5118 		  gimplify_assign (new_var, x, ilist);
5119 		  t = new_var;
5120 		  new_var = build_simple_mem_ref (new_var);
5121 		}
5122 	      else
5123 		{
5124 		  t = create_tmp_var (type);
5125 		  gimplify_assign (t, x, ilist);
5126 		  SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5127 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5128 		}
5129 	      t = fold_convert (build_pointer_type (boolean_type_node), t);
5130 	      t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5131 			       TYPE_SIZE_UNIT (TREE_TYPE (type)));
5132 	      cond = create_tmp_var (TREE_TYPE (t));
5133 	      gimplify_assign (cond, t, ilist);
5134 	    }
5135 	  else if (is_variable_sized (var))
5136 	    {
5137 	      /* For variable sized types, we need to allocate the
5138 		 actual storage here.  Call alloca and store the
5139 		 result in the pointer decl that we created elsewhere.  */
5140 	      if (pass == 0)
5141 		continue;
5142 
5143 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5144 		{
5145 		  gcall *stmt;
5146 		  tree tmp, atmp;
5147 
5148 		  ptr = DECL_VALUE_EXPR (new_var);
5149 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5150 		  ptr = TREE_OPERAND (ptr, 0);
5151 		  gcc_assert (DECL_P (ptr));
5152 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5153 
5154 		  /* void *tmp = __builtin_alloca */
5155 		  atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5156 		  stmt = gimple_build_call (atmp, 2, x,
5157 					    size_int (DECL_ALIGN (var)));
5158 		  cfun->calls_alloca = 1;
5159 		  tmp = create_tmp_var_raw (ptr_type_node);
5160 		  gimple_add_tmp_var (tmp);
5161 		  gimple_call_set_lhs (stmt, tmp);
5162 
5163 		  gimple_seq_add_stmt (ilist, stmt);
5164 
5165 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5166 		  gimplify_assign (ptr, x, ilist);
5167 		}
5168 	    }
5169 	  else if (omp_is_reference (var)
5170 		   && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5171 		       || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5172 	    {
5173 	      /* For references that are being privatized for Fortran,
5174 		 allocate new backing storage for the new pointer
5175 		 variable.  This allows us to avoid changing all the
5176 		 code that expects a pointer to something that expects
5177 		 a direct variable.  */
5178 	      if (pass == 0)
5179 		continue;
5180 
5181 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5182 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5183 		{
5184 		  x = build_receiver_ref (var, false, ctx);
5185 		  x = build_fold_addr_expr_loc (clause_loc, x);
5186 		}
5187 	      else if (TREE_CONSTANT (x))
5188 		{
5189 		  /* For reduction in SIMD loop, defer adding the
5190 		     initialization of the reference, because if we decide
5191 		     to use SIMD array for it, the initilization could cause
5192 		     expansion ICE.  Ditto for other privatization clauses.  */
5193 		  if (is_simd)
5194 		    x = NULL_TREE;
5195 		  else
5196 		    {
5197 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5198 					      get_name (var));
5199 		      gimple_add_tmp_var (x);
5200 		      TREE_ADDRESSABLE (x) = 1;
5201 		      x = build_fold_addr_expr_loc (clause_loc, x);
5202 		    }
5203 		}
5204 	      else
5205 		{
5206 		  tree atmp
5207 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5208 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5209 		  tree al = size_int (TYPE_ALIGN (rtype));
5210 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5211 		}
5212 
5213 	      if (x)
5214 		{
5215 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5216 		  gimplify_assign (new_var, x, ilist);
5217 		}
5218 
5219 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5220 	    }
5221 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
5222 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
5223 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5224 	    {
5225 	      if (pass == 0)
5226 		continue;
5227 	    }
5228 	  else if (pass != 0)
5229 	    continue;
5230 
5231 	  switch (OMP_CLAUSE_CODE (c))
5232 	    {
5233 	    case OMP_CLAUSE_SHARED:
5234 	      /* Ignore shared directives in teams construct inside
5235 		 target construct.  */
5236 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5237 		  && !is_host_teams_ctx (ctx))
5238 		continue;
5239 	      /* Shared global vars are just accessed directly.  */
5240 	      if (is_global_var (new_var))
5241 		break;
5242 	      /* For taskloop firstprivate/lastprivate, represented
5243 		 as firstprivate and shared clause on the task, new_var
5244 		 is the firstprivate var.  */
5245 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5246 		break;
5247 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
5248 		 needs to be delayed until after fixup_child_record_type so
5249 		 that we get the correct type during the dereference.  */
5250 	      by_ref = use_pointer_for_field (var, ctx);
5251 	      x = build_receiver_ref (var, by_ref, ctx);
5252 	      SET_DECL_VALUE_EXPR (new_var, x);
5253 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5254 
5255 	      /* ??? If VAR is not passed by reference, and the variable
5256 		 hasn't been initialized yet, then we'll get a warning for
5257 		 the store into the omp_data_s structure.  Ideally, we'd be
5258 		 able to notice this and not store anything at all, but
5259 		 we're generating code too early.  Suppress the warning.  */
5260 	      if (!by_ref)
5261 		TREE_NO_WARNING (var) = 1;
5262 	      break;
5263 
5264 	    case OMP_CLAUSE__CONDTEMP_:
5265 	      if (is_parallel_ctx (ctx))
5266 		{
5267 		  x = build_receiver_ref (var, false, ctx);
5268 		  SET_DECL_VALUE_EXPR (new_var, x);
5269 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5270 		}
5271 	      else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5272 		{
5273 		  x = build_zero_cst (TREE_TYPE (var));
5274 		  goto do_private;
5275 		}
5276 	      break;
5277 
5278 	    case OMP_CLAUSE_LASTPRIVATE:
5279 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5280 		break;
5281 	      /* FALLTHRU */
5282 
5283 	    case OMP_CLAUSE_PRIVATE:
5284 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5285 		x = build_outer_var_ref (var, ctx);
5286 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5287 		{
5288 		  if (is_task_ctx (ctx))
5289 		    x = build_receiver_ref (var, false, ctx);
5290 		  else
5291 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5292 		}
5293 	      else
5294 		x = NULL;
5295 	    do_private:
5296 	      tree nx;
5297 	      bool copy_ctor;
5298 	      copy_ctor = false;
5299 	      nx = unshare_expr (new_var);
5300 	      if (is_simd
5301 		  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5302 		  && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5303 		copy_ctor = true;
5304 	      if (copy_ctor)
5305 		nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5306 	      else
5307 		nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5308 	      if (is_simd)
5309 		{
5310 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5311 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
5312 		       || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5313 			   && (gimple_omp_for_collapse (ctx->stmt) != 1
5314 			       || (gimple_omp_for_index (ctx->stmt, 0)
5315 				   != new_var)))
5316 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5317 		       || omp_is_reference (var))
5318 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5319 						       ivar, lvar))
5320 		    {
5321 		      if (omp_is_reference (var))
5322 			{
5323 			  gcc_assert (TREE_CODE (new_var) == MEM_REF);
5324 			  tree new_vard = TREE_OPERAND (new_var, 0);
5325 			  gcc_assert (DECL_P (new_vard));
5326 			  SET_DECL_VALUE_EXPR (new_vard,
5327 					       build_fold_addr_expr (lvar));
5328 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5329 			}
5330 
5331 		      if (nx)
5332 			{
5333 			  tree iv = unshare_expr (ivar);
5334 			  if (copy_ctor)
5335 			    x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5336 								       x);
5337 			  else
5338 			    x = lang_hooks.decls.omp_clause_default_ctor (c,
5339 									  iv,
5340 									  x);
5341 			}
5342 		      else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5343 			{
5344 			  x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5345 				      unshare_expr (ivar), x);
5346 			  nx = x;
5347 			}
5348 		      if (nx && x)
5349 			gimplify_and_add (x, &llist[0]);
5350 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5351 			  && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5352 			{
5353 			  tree v = new_var;
5354 			  if (!DECL_P (v))
5355 			    {
5356 			      gcc_assert (TREE_CODE (v) == MEM_REF);
5357 			      v = TREE_OPERAND (v, 0);
5358 			      gcc_assert (DECL_P (v));
5359 			    }
5360 			  v = *ctx->lastprivate_conditional_map->get (v);
5361 			  tree t = create_tmp_var (TREE_TYPE (v));
5362 			  tree z = build_zero_cst (TREE_TYPE (v));
5363 			  tree orig_v
5364 			    = build_outer_var_ref (var, ctx,
5365 						   OMP_CLAUSE_LASTPRIVATE);
5366 			  gimple_seq_add_stmt (dlist,
5367 					       gimple_build_assign (t, z));
5368 			  gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5369 			  tree civar = DECL_VALUE_EXPR (v);
5370 			  gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5371 			  civar = unshare_expr (civar);
5372 			  TREE_OPERAND (civar, 1) = sctx.idx;
5373 			  x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5374 				      unshare_expr (civar));
5375 			  x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5376 				      build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5377 					      orig_v, unshare_expr (ivar)));
5378 			  tree cond = build2 (LT_EXPR, boolean_type_node, t,
5379 					      civar);
5380 			  x = build3 (COND_EXPR, void_type_node, cond, x,
5381 				      void_node);
5382 			  gimple_seq tseq = NULL;
5383 			  gimplify_and_add (x, &tseq);
5384 			  if (ctx->outer)
5385 			    lower_omp (&tseq, ctx->outer);
5386 			  gimple_seq_add_seq (&llist[1], tseq);
5387 			}
5388 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5389 			  && ctx->for_simd_scan_phase)
5390 			{
5391 			  x = unshare_expr (ivar);
5392 			  tree orig_v
5393 			    = build_outer_var_ref (var, ctx,
5394 						   OMP_CLAUSE_LASTPRIVATE);
5395 			  x = lang_hooks.decls.omp_clause_assign_op (c, x,
5396 								     orig_v);
5397 			  gimplify_and_add (x, &llist[0]);
5398 			}
5399 		      if (y)
5400 			{
5401 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5402 			  if (y)
5403 			    gimplify_and_add (y, &llist[1]);
5404 			}
5405 		      break;
5406 		    }
5407 		  if (omp_is_reference (var))
5408 		    {
5409 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5410 		      tree new_vard = TREE_OPERAND (new_var, 0);
5411 		      gcc_assert (DECL_P (new_vard));
5412 		      tree type = TREE_TYPE (TREE_TYPE (new_vard));
5413 		      x = TYPE_SIZE_UNIT (type);
5414 		      if (TREE_CONSTANT (x))
5415 			{
5416 			  x = create_tmp_var_raw (type, get_name (var));
5417 			  gimple_add_tmp_var (x);
5418 			  TREE_ADDRESSABLE (x) = 1;
5419 			  x = build_fold_addr_expr_loc (clause_loc, x);
5420 			  x = fold_convert_loc (clause_loc,
5421 						TREE_TYPE (new_vard), x);
5422 			  gimplify_assign (new_vard, x, ilist);
5423 			}
5424 		    }
5425 		}
5426 	      if (nx)
5427 		gimplify_and_add (nx, ilist);
5428 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5429 		  && is_simd
5430 		  && ctx->for_simd_scan_phase)
5431 		{
5432 		  tree orig_v = build_outer_var_ref (var, ctx,
5433 						     OMP_CLAUSE_LASTPRIVATE);
5434 		  x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
5435 							     orig_v);
5436 		  gimplify_and_add (x, ilist);
5437 		}
5438 	      /* FALLTHRU */
5439 
5440 	    do_dtor:
5441 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
5442 	      if (x)
5443 		gimplify_and_add (x, dlist);
5444 	      break;
5445 
5446 	    case OMP_CLAUSE_LINEAR:
5447 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
5448 		goto do_firstprivate;
5449 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5450 		x = NULL;
5451 	      else
5452 		x = build_outer_var_ref (var, ctx);
5453 	      goto do_private;
5454 
5455 	    case OMP_CLAUSE_FIRSTPRIVATE:
5456 	      if (is_task_ctx (ctx))
5457 		{
5458 		  if ((omp_is_reference (var)
5459 		       && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
5460 		      || is_variable_sized (var))
5461 		    goto do_dtor;
5462 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
5463 									  ctx))
5464 			   || use_pointer_for_field (var, NULL))
5465 		    {
5466 		      x = build_receiver_ref (var, false, ctx);
5467 		      SET_DECL_VALUE_EXPR (new_var, x);
5468 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5469 		      goto do_dtor;
5470 		    }
5471 		}
5472 	      if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
5473 		  && omp_is_reference (var))
5474 		{
5475 		  x = build_outer_var_ref (var, ctx);
5476 		  gcc_assert (TREE_CODE (x) == MEM_REF
5477 			      && integer_zerop (TREE_OPERAND (x, 1)));
5478 		  x = TREE_OPERAND (x, 0);
5479 		  x = lang_hooks.decls.omp_clause_copy_ctor
5480 						(c, unshare_expr (new_var), x);
5481 		  gimplify_and_add (x, ilist);
5482 		  goto do_dtor;
5483 		}
5484 	    do_firstprivate:
5485 	      x = build_outer_var_ref (var, ctx);
5486 	      if (is_simd)
5487 		{
5488 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
5489 		      && gimple_omp_for_combined_into_p (ctx->stmt))
5490 		    {
5491 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
5492 		      tree stept = TREE_TYPE (t);
5493 		      tree ct = omp_find_clause (clauses,
5494 						 OMP_CLAUSE__LOOPTEMP_);
5495 		      gcc_assert (ct);
5496 		      tree l = OMP_CLAUSE_DECL (ct);
5497 		      tree n1 = fd->loop.n1;
5498 		      tree step = fd->loop.step;
5499 		      tree itype = TREE_TYPE (l);
5500 		      if (POINTER_TYPE_P (itype))
5501 			itype = signed_type_for (itype);
5502 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
5503 		      if (TYPE_UNSIGNED (itype)
5504 			  && fd->loop.cond_code == GT_EXPR)
5505 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
5506 					 fold_build1 (NEGATE_EXPR, itype, l),
5507 					 fold_build1 (NEGATE_EXPR,
5508 						      itype, step));
5509 		      else
5510 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
5511 		      t = fold_build2 (MULT_EXPR, stept,
5512 				       fold_convert (stept, l), t);
5513 
5514 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
5515 			{
5516 			  if (omp_is_reference (var))
5517 			    {
5518 			      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5519 			      tree new_vard = TREE_OPERAND (new_var, 0);
5520 			      gcc_assert (DECL_P (new_vard));
5521 			      tree type = TREE_TYPE (TREE_TYPE (new_vard));
5522 			      nx = TYPE_SIZE_UNIT (type);
5523 			      if (TREE_CONSTANT (nx))
5524 				{
5525 				  nx = create_tmp_var_raw (type,
5526 							   get_name (var));
5527 				  gimple_add_tmp_var (nx);
5528 				  TREE_ADDRESSABLE (nx) = 1;
5529 				  nx = build_fold_addr_expr_loc (clause_loc,
5530 								 nx);
5531 				  nx = fold_convert_loc (clause_loc,
5532 							 TREE_TYPE (new_vard),
5533 							 nx);
5534 				  gimplify_assign (new_vard, nx, ilist);
5535 				}
5536 			    }
5537 
5538 			  x = lang_hooks.decls.omp_clause_linear_ctor
5539 							(c, new_var, x, t);
5540 			  gimplify_and_add (x, ilist);
5541 			  goto do_dtor;
5542 			}
5543 
5544 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
5545 			x = fold_build2 (POINTER_PLUS_EXPR,
5546 					 TREE_TYPE (x), x, t);
5547 		      else
5548 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
5549 		    }
5550 
5551 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
5552 		       || TREE_ADDRESSABLE (new_var)
5553 		       || omp_is_reference (var))
5554 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5555 						       ivar, lvar))
5556 		    {
5557 		      if (omp_is_reference (var))
5558 			{
5559 			  gcc_assert (TREE_CODE (new_var) == MEM_REF);
5560 			  tree new_vard = TREE_OPERAND (new_var, 0);
5561 			  gcc_assert (DECL_P (new_vard));
5562 			  SET_DECL_VALUE_EXPR (new_vard,
5563 					       build_fold_addr_expr (lvar));
5564 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5565 			}
5566 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
5567 			{
5568 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
5569 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
5570 			  gimplify_and_add (x, ilist);
5571 			  gimple_stmt_iterator gsi
5572 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
5573 			  gassign *g
5574 			    = gimple_build_assign (unshare_expr (lvar), iv);
5575 			  gsi_insert_before_without_update (&gsi, g,
5576 							    GSI_SAME_STMT);
5577 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
5578 			  enum tree_code code = PLUS_EXPR;
5579 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
5580 			    code = POINTER_PLUS_EXPR;
5581 			  g = gimple_build_assign (iv, code, iv, t);
5582 			  gsi_insert_before_without_update (&gsi, g,
5583 							    GSI_SAME_STMT);
5584 			  break;
5585 			}
5586 		      x = lang_hooks.decls.omp_clause_copy_ctor
5587 						(c, unshare_expr (ivar), x);
5588 		      gimplify_and_add (x, &llist[0]);
5589 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5590 		      if (x)
5591 			gimplify_and_add (x, &llist[1]);
5592 		      break;
5593 		    }
5594 		  if (omp_is_reference (var))
5595 		    {
5596 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5597 		      tree new_vard = TREE_OPERAND (new_var, 0);
5598 		      gcc_assert (DECL_P (new_vard));
5599 		      tree type = TREE_TYPE (TREE_TYPE (new_vard));
5600 		      nx = TYPE_SIZE_UNIT (type);
5601 		      if (TREE_CONSTANT (nx))
5602 			{
5603 			  nx = create_tmp_var_raw (type, get_name (var));
5604 			  gimple_add_tmp_var (nx);
5605 			  TREE_ADDRESSABLE (nx) = 1;
5606 			  nx = build_fold_addr_expr_loc (clause_loc, nx);
5607 			  nx = fold_convert_loc (clause_loc,
5608 						 TREE_TYPE (new_vard), nx);
5609 			  gimplify_assign (new_vard, nx, ilist);
5610 			}
5611 		    }
5612 		}
5613 	      x = lang_hooks.decls.omp_clause_copy_ctor
5614 						(c, unshare_expr (new_var), x);
5615 	      gimplify_and_add (x, ilist);
5616 	      goto do_dtor;
5617 
5618 	    case OMP_CLAUSE__LOOPTEMP_:
5619 	    case OMP_CLAUSE__REDUCTEMP_:
5620 	      gcc_assert (is_taskreg_ctx (ctx));
5621 	      x = build_outer_var_ref (var, ctx);
5622 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5623 	      gimplify_and_add (x, ilist);
5624 	      break;
5625 
5626 	    case OMP_CLAUSE_COPYIN:
5627 	      by_ref = use_pointer_for_field (var, NULL);
5628 	      x = build_receiver_ref (var, by_ref, ctx);
5629 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
5630 	      append_to_statement_list (x, &copyin_seq);
5631 	      copyin_by_ref |= by_ref;
5632 	      break;
5633 
5634 	    case OMP_CLAUSE_REDUCTION:
5635 	    case OMP_CLAUSE_IN_REDUCTION:
5636 	      /* OpenACC reductions are initialized using the
5637 		 GOACC_REDUCTION internal function.  */
5638 	      if (is_gimple_omp_oacc (ctx->stmt))
5639 		break;
5640 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5641 		{
5642 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5643 		  gimple *tseq;
5644 		  tree ptype = TREE_TYPE (placeholder);
5645 		  if (cond)
5646 		    {
5647 		      x = error_mark_node;
5648 		      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
5649 			  && !task_reduction_needs_orig_p)
5650 			x = var;
5651 		      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5652 			{
5653 			  tree pptype = build_pointer_type (ptype);
5654 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5655 			    x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5656 					size_int (task_reduction_cnt_full
5657 						  + task_reduction_cntorig - 1),
5658 					NULL_TREE, NULL_TREE);
5659 			  else
5660 			    {
5661 			      unsigned int idx
5662 				= *ctx->task_reduction_map->get (c);
5663 			      x = task_reduction_read (ilist, tskred_temp,
5664 						       pptype, 7 + 3 * idx);
5665 			    }
5666 			  x = fold_convert (pptype, x);
5667 			  x = build_simple_mem_ref (x);
5668 			}
5669 		    }
5670 		  else
5671 		    {
5672 		      x = build_outer_var_ref (var, ctx);
5673 
5674 		      if (omp_is_reference (var)
5675 			  && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
5676 			x = build_fold_addr_expr_loc (clause_loc, x);
5677 		    }
5678 		  SET_DECL_VALUE_EXPR (placeholder, x);
5679 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5680 		  tree new_vard = new_var;
5681 		  if (omp_is_reference (var))
5682 		    {
5683 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5684 		      new_vard = TREE_OPERAND (new_var, 0);
5685 		      gcc_assert (DECL_P (new_vard));
5686 		    }
5687 		  tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
5688 		  if (is_simd
5689 		      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5690 		      && OMP_CLAUSE_REDUCTION_INSCAN (c))
5691 		    rvarp = &rvar;
5692 		  if (is_simd
5693 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5694 						       ivar, lvar, rvarp,
5695 						       &rvar2))
5696 		    {
5697 		      if (new_vard == new_var)
5698 			{
5699 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
5700 			  SET_DECL_VALUE_EXPR (new_var, ivar);
5701 			}
5702 		      else
5703 			{
5704 			  SET_DECL_VALUE_EXPR (new_vard,
5705 					       build_fold_addr_expr (ivar));
5706 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5707 			}
5708 		      x = lang_hooks.decls.omp_clause_default_ctor
5709 				(c, unshare_expr (ivar),
5710 				 build_outer_var_ref (var, ctx));
5711 		      if (rvarp && ctx->for_simd_scan_phase)
5712 			{
5713 			  if (x)
5714 			    gimplify_and_add (x, &llist[0]);
5715 			  x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5716 			  if (x)
5717 			    gimplify_and_add (x, &llist[1]);
5718 			  break;
5719 			}
5720 		      else if (rvarp)
5721 			{
5722 			  if (x)
5723 			    {
5724 			      gimplify_and_add (x, &llist[0]);
5725 
5726 			      tree ivar2 = unshare_expr (lvar);
5727 			      TREE_OPERAND (ivar2, 1) = sctx.idx;
5728 			      x = lang_hooks.decls.omp_clause_default_ctor
5729 				    (c, ivar2, build_outer_var_ref (var, ctx));
5730 			      gimplify_and_add (x, &llist[0]);
5731 
5732 			      if (rvar2)
5733 				{
5734 				  x = lang_hooks.decls.omp_clause_default_ctor
5735 					(c, unshare_expr (rvar2),
5736 					 build_outer_var_ref (var, ctx));
5737 				  gimplify_and_add (x, &llist[0]);
5738 				}
5739 
5740 			      /* For types that need construction, add another
5741 				 private var which will be default constructed
5742 				 and optionally initialized with
5743 				 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
5744 				 loop we want to assign this value instead of
5745 				 constructing and destructing it in each
5746 				 iteration.  */
5747 			      tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
5748 			      gimple_add_tmp_var (nv);
5749 			      ctx->cb.decl_map->put (TREE_OPERAND (rvar2
5750 								   ? rvar2
5751 								   : ivar, 0),
5752 						     nv);
5753 			      x = lang_hooks.decls.omp_clause_default_ctor
5754 				    (c, nv, build_outer_var_ref (var, ctx));
5755 			      gimplify_and_add (x, ilist);
5756 
5757 			      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5758 				{
5759 				  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5760 				  x = DECL_VALUE_EXPR (new_vard);
5761 				  tree vexpr = nv;
5762 				  if (new_vard != new_var)
5763 				    vexpr = build_fold_addr_expr (nv);
5764 				  SET_DECL_VALUE_EXPR (new_vard, vexpr);
5765 				  lower_omp (&tseq, ctx);
5766 				  SET_DECL_VALUE_EXPR (new_vard, x);
5767 				  gimple_seq_add_seq (ilist, tseq);
5768 				  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5769 				}
5770 
5771 			      x = lang_hooks.decls.omp_clause_dtor (c, nv);
5772 			      if (x)
5773 				gimplify_and_add (x, dlist);
5774 			    }
5775 
5776 			  tree ref = build_outer_var_ref (var, ctx);
5777 			  x = unshare_expr (ivar);
5778 			  x = lang_hooks.decls.omp_clause_assign_op (c, x,
5779 								     ref);
5780 			  gimplify_and_add (x, &llist[0]);
5781 
5782 			  ref = build_outer_var_ref (var, ctx);
5783 			  x = lang_hooks.decls.omp_clause_assign_op (c, ref,
5784 								     rvar);
5785 			  gimplify_and_add (x, &llist[3]);
5786 
5787 			  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5788 			  if (new_vard == new_var)
5789 			    SET_DECL_VALUE_EXPR (new_var, lvar);
5790 			  else
5791 			    SET_DECL_VALUE_EXPR (new_vard,
5792 						 build_fold_addr_expr (lvar));
5793 
5794 			  x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5795 			  if (x)
5796 			    gimplify_and_add (x, &llist[1]);
5797 
5798 			  tree ivar2 = unshare_expr (lvar);
5799 			  TREE_OPERAND (ivar2, 1) = sctx.idx;
5800 			  x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
5801 			  if (x)
5802 			    gimplify_and_add (x, &llist[1]);
5803 
5804 			  if (rvar2)
5805 			    {
5806 			      x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
5807 			      if (x)
5808 				gimplify_and_add (x, &llist[1]);
5809 			    }
5810 			  break;
5811 			}
5812 		      if (x)
5813 			gimplify_and_add (x, &llist[0]);
5814 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5815 			{
5816 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5817 			  lower_omp (&tseq, ctx);
5818 			  gimple_seq_add_seq (&llist[0], tseq);
5819 			}
5820 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5821 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5822 		      lower_omp (&tseq, ctx);
5823 		      gimple_seq_add_seq (&llist[1], tseq);
5824 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5825 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5826 		      if (new_vard == new_var)
5827 			SET_DECL_VALUE_EXPR (new_var, lvar);
5828 		      else
5829 			SET_DECL_VALUE_EXPR (new_vard,
5830 					     build_fold_addr_expr (lvar));
5831 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
5832 		      if (x)
5833 			gimplify_and_add (x, &llist[1]);
5834 		      break;
5835 		    }
5836 		  /* If this is a reference to constant size reduction var
5837 		     with placeholder, we haven't emitted the initializer
5838 		     for it because it is undesirable if SIMD arrays are used.
5839 		     But if they aren't used, we need to emit the deferred
5840 		     initialization now.  */
5841 		  else if (omp_is_reference (var) && is_simd)
5842 		    handle_simd_reference (clause_loc, new_vard, ilist);
5843 
5844 		  tree lab2 = NULL_TREE;
5845 		  if (cond)
5846 		    {
5847 		      gimple *g;
5848 		      if (!is_parallel_ctx (ctx))
5849 			{
5850 			  tree condv = create_tmp_var (boolean_type_node);
5851 			  tree m = build_simple_mem_ref (cond);
5852 			  g = gimple_build_assign (condv, m);
5853 			  gimple_seq_add_stmt (ilist, g);
5854 			  tree lab1
5855 			    = create_artificial_label (UNKNOWN_LOCATION);
5856 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
5857 			  g = gimple_build_cond (NE_EXPR, condv,
5858 						 boolean_false_node,
5859 						 lab2, lab1);
5860 			  gimple_seq_add_stmt (ilist, g);
5861 			  gimple_seq_add_stmt (ilist,
5862 					       gimple_build_label (lab1));
5863 			}
5864 		      g = gimple_build_assign (build_simple_mem_ref (cond),
5865 					       boolean_true_node);
5866 		      gimple_seq_add_stmt (ilist, g);
5867 		    }
5868 		  x = lang_hooks.decls.omp_clause_default_ctor
5869 				(c, unshare_expr (new_var),
5870 				 cond ? NULL_TREE
5871 				 : build_outer_var_ref (var, ctx));
5872 		  if (x)
5873 		    gimplify_and_add (x, ilist);
5874 
5875 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5876 		      && OMP_CLAUSE_REDUCTION_INSCAN (c))
5877 		    {
5878 		      if (ctx->for_simd_scan_phase)
5879 			goto do_dtor;
5880 		      if (x || (!is_simd
5881 				&& OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
5882 			{
5883 			  tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
5884 			  gimple_add_tmp_var (nv);
5885 			  ctx->cb.decl_map->put (new_vard, nv);
5886 			  x = lang_hooks.decls.omp_clause_default_ctor
5887 				(c, nv, build_outer_var_ref (var, ctx));
5888 			  if (x)
5889 			    gimplify_and_add (x, ilist);
5890 			  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5891 			    {
5892 			      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5893 			      tree vexpr = nv;
5894 			      if (new_vard != new_var)
5895 				vexpr = build_fold_addr_expr (nv);
5896 			      SET_DECL_VALUE_EXPR (new_vard, vexpr);
5897 			      DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5898 			      lower_omp (&tseq, ctx);
5899 			      SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
5900 			      DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
5901 			      gimple_seq_add_seq (ilist, tseq);
5902 			    }
5903 			  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5904 			  if (is_simd && ctx->scan_exclusive)
5905 			    {
5906 			      tree nv2
5907 				= create_tmp_var_raw (TREE_TYPE (new_var));
5908 			      gimple_add_tmp_var (nv2);
5909 			      ctx->cb.decl_map->put (nv, nv2);
5910 			      x = lang_hooks.decls.omp_clause_default_ctor
5911 				    (c, nv2, build_outer_var_ref (var, ctx));
5912 			      gimplify_and_add (x, ilist);
5913 			      x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5914 			      if (x)
5915 				gimplify_and_add (x, dlist);
5916 			    }
5917 			  x = lang_hooks.decls.omp_clause_dtor (c, nv);
5918 			  if (x)
5919 			    gimplify_and_add (x, dlist);
5920 			}
5921 		      else if (is_simd
5922 			       && ctx->scan_exclusive
5923 			       && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
5924 			{
5925 			  tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
5926 			  gimple_add_tmp_var (nv2);
5927 			  ctx->cb.decl_map->put (new_vard, nv2);
5928 			  x = lang_hooks.decls.omp_clause_dtor (c, nv2);
5929 			  if (x)
5930 			    gimplify_and_add (x, dlist);
5931 			}
5932 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5933 		      goto do_dtor;
5934 		    }
5935 
5936 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5937 		    {
5938 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5939 		      lower_omp (&tseq, ctx);
5940 		      gimple_seq_add_seq (ilist, tseq);
5941 		    }
5942 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5943 		  if (is_simd)
5944 		    {
5945 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5946 		      lower_omp (&tseq, ctx);
5947 		      gimple_seq_add_seq (dlist, tseq);
5948 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5949 		    }
5950 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5951 		  if (cond)
5952 		    {
5953 		      if (lab2)
5954 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
5955 		      break;
5956 		    }
5957 		  goto do_dtor;
5958 		}
5959 	      else
5960 		{
5961 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
5962 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
5963 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5964 
5965 		  if (cond)
5966 		    {
5967 		      gimple *g;
5968 		      tree lab2 = NULL_TREE;
5969 		      /* GOMP_taskgroup_reduction_register memsets the whole
5970 			 array to zero.  If the initializer is zero, we don't
5971 			 need to initialize it again, just mark it as ever
5972 			 used unconditionally, i.e. cond = true.  */
5973 		      if (initializer_zerop (x))
5974 			{
5975 			  g = gimple_build_assign (build_simple_mem_ref (cond),
5976 						   boolean_true_node);
5977 			  gimple_seq_add_stmt (ilist, g);
5978 			  break;
5979 			}
5980 
5981 		      /* Otherwise, emit
5982 			 if (!cond) { cond = true; new_var = x; }  */
5983 		      if (!is_parallel_ctx (ctx))
5984 			{
5985 			  tree condv = create_tmp_var (boolean_type_node);
5986 			  tree m = build_simple_mem_ref (cond);
5987 			  g = gimple_build_assign (condv, m);
5988 			  gimple_seq_add_stmt (ilist, g);
5989 			  tree lab1
5990 			    = create_artificial_label (UNKNOWN_LOCATION);
5991 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
5992 			  g = gimple_build_cond (NE_EXPR, condv,
5993 						 boolean_false_node,
5994 						 lab2, lab1);
5995 			  gimple_seq_add_stmt (ilist, g);
5996 			  gimple_seq_add_stmt (ilist,
5997 					       gimple_build_label (lab1));
5998 			}
5999 		      g = gimple_build_assign (build_simple_mem_ref (cond),
6000 					       boolean_true_node);
6001 		      gimple_seq_add_stmt (ilist, g);
6002 		      gimplify_assign (new_var, x, ilist);
6003 		      if (lab2)
6004 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6005 		      break;
6006 		    }
6007 
6008 		  /* reduction(-:var) sums up the partial results, so it
6009 		     acts identically to reduction(+:var).  */
6010 		  if (code == MINUS_EXPR)
6011 		    code = PLUS_EXPR;
6012 
6013 		  bool is_truth_op
6014 		    = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6015 		  tree new_vard = new_var;
6016 		  if (is_simd && omp_is_reference (var))
6017 		    {
6018 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
6019 		      new_vard = TREE_OPERAND (new_var, 0);
6020 		      gcc_assert (DECL_P (new_vard));
6021 		    }
6022 		  tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6023 		  if (is_simd
6024 		      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6025 		      && OMP_CLAUSE_REDUCTION_INSCAN (c))
6026 		    rvarp = &rvar;
6027 		  if (is_simd
6028 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6029 						       ivar, lvar, rvarp,
6030 						       &rvar2))
6031 		    {
6032 		      if (new_vard != new_var)
6033 			{
6034 			  SET_DECL_VALUE_EXPR (new_vard,
6035 					       build_fold_addr_expr (lvar));
6036 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6037 			}
6038 
6039 		      tree ref = build_outer_var_ref (var, ctx);
6040 
6041 		      if (rvarp)
6042 			{
6043 			  if (ctx->for_simd_scan_phase)
6044 			    break;
6045 			  gimplify_assign (ivar, ref, &llist[0]);
6046 			  ref = build_outer_var_ref (var, ctx);
6047 			  gimplify_assign (ref, rvar, &llist[3]);
6048 			  break;
6049 			}
6050 
6051 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6052 
6053 		      if (sctx.is_simt)
6054 			{
6055 			  if (!simt_lane)
6056 			    simt_lane = create_tmp_var (unsigned_type_node);
6057 			  x = build_call_expr_internal_loc
6058 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6059 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
6060 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
6061 			  gimplify_assign (ivar, x, &llist[2]);
6062 			}
6063 		      tree ivar2 = ivar;
6064 		      tree ref2 = ref;
6065 		      if (is_truth_op)
6066 			{
6067 			  tree zero = build_zero_cst (TREE_TYPE (ivar));
6068 			  ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6069 						   boolean_type_node, ivar,
6070 						   zero);
6071 			  ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6072 						  boolean_type_node, ref,
6073 						  zero);
6074 			}
6075 		      x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6076 		      if (is_truth_op)
6077 			x = fold_convert (TREE_TYPE (ref), x);
6078 		      ref = build_outer_var_ref (var, ctx);
6079 		      gimplify_assign (ref, x, &llist[1]);
6080 
6081 		    }
6082 		  else
6083 		    {
6084 		      if (omp_is_reference (var) && is_simd)
6085 			handle_simd_reference (clause_loc, new_vard, ilist);
6086 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6087 			  && OMP_CLAUSE_REDUCTION_INSCAN (c))
6088 			break;
6089 		      gimplify_assign (new_var, x, ilist);
6090 		      if (is_simd)
6091 			{
6092 			  tree ref = build_outer_var_ref (var, ctx);
6093 			  tree new_var2 = new_var;
6094 			  tree ref2 = ref;
6095 			  if (is_truth_op)
6096 			    {
6097 			      tree zero = build_zero_cst (TREE_TYPE (new_var));
6098 			      new_var2
6099 				= fold_build2_loc (clause_loc, NE_EXPR,
6100 						   boolean_type_node, new_var,
6101 						   zero);
6102 			      ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6103 						      boolean_type_node, ref,
6104 						      zero);
6105 			    }
6106 			  x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6107 			  if (is_truth_op)
6108 			    x = fold_convert (TREE_TYPE (new_var), x);
6109 			  ref = build_outer_var_ref (var, ctx);
6110 			  gimplify_assign (ref, x, dlist);
6111 			}
6112 		    }
6113 		}
6114 	      break;
6115 
6116 	    default:
6117 	      gcc_unreachable ();
6118 	    }
6119 	}
6120     }
6121   if (tskred_avar)
6122     {
6123       tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6124       gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6125     }
6126 
6127   if (known_eq (sctx.max_vf, 1U))
6128     {
6129       sctx.is_simt = false;
6130       if (ctx->lastprivate_conditional_map)
6131 	{
6132 	  if (gimple_omp_for_combined_into_p (ctx->stmt))
6133 	    {
6134 	      /* Signal to lower_omp_1 that it should use parent context.  */
6135 	      ctx->combined_into_simd_safelen1 = true;
6136 	      for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6137 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6138 		    && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6139 		  {
6140 		    tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6141 		    omp_context *outer = ctx->outer;
6142 		    if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6143 		      outer = outer->outer;
6144 		    tree *v = ctx->lastprivate_conditional_map->get (o);
6145 		    tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6146 		    tree *pv = outer->lastprivate_conditional_map->get (po);
6147 		    *v = *pv;
6148 		  }
6149 	    }
6150 	  else
6151 	    {
6152 	      /* When not vectorized, treat lastprivate(conditional:) like
6153 		 normal lastprivate, as there will be just one simd lane
6154 		 writing the privatized variable.  */
6155 	      delete ctx->lastprivate_conditional_map;
6156 	      ctx->lastprivate_conditional_map = NULL;
6157 	    }
6158 	}
6159     }
6160 
6161   if (nonconst_simd_if)
6162     {
6163       if (sctx.lane == NULL_TREE)
6164 	{
6165 	  sctx.idx = create_tmp_var (unsigned_type_node);
6166 	  sctx.lane = create_tmp_var (unsigned_type_node);
6167 	}
6168       /* FIXME: For now.  */
6169       sctx.is_simt = false;
6170     }
6171 
6172   if (sctx.lane || sctx.is_simt)
6173     {
6174       uid = create_tmp_var (ptr_type_node, "simduid");
6175       /* Don't want uninit warnings on simduid, it is always uninitialized,
6176 	 but we use it not for the value, but for the DECL_UID only.  */
6177       TREE_NO_WARNING (uid) = 1;
6178       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6179       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6180       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6181       gimple_omp_for_set_clauses (ctx->stmt, c);
6182     }
6183   /* Emit calls denoting privatized variables and initializing a pointer to
6184      structure that holds private variables as fields after ompdevlow pass.  */
6185   if (sctx.is_simt)
6186     {
6187       sctx.simt_eargs[0] = uid;
6188       gimple *g
6189 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6190       gimple_call_set_lhs (g, uid);
6191       gimple_seq_add_stmt (ilist, g);
6192       sctx.simt_eargs.release ();
6193 
6194       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6195       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6196       gimple_call_set_lhs (g, simtrec);
6197       gimple_seq_add_stmt (ilist, g);
6198     }
6199   if (sctx.lane)
6200     {
6201       gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6202 					      2 + (nonconst_simd_if != NULL),
6203 					      uid, integer_zero_node,
6204 					      nonconst_simd_if);
6205       gimple_call_set_lhs (g, sctx.lane);
6206       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6207       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6208       g = gimple_build_assign (sctx.lane, INTEGER_CST,
6209 			       build_int_cst (unsigned_type_node, 0));
6210       gimple_seq_add_stmt (ilist, g);
6211       if (sctx.lastlane)
6212 	{
6213 	  g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6214 					  2, uid, sctx.lane);
6215 	  gimple_call_set_lhs (g, sctx.lastlane);
6216 	  gimple_seq_add_stmt (dlist, g);
6217 	  gimple_seq_add_seq (dlist, llist[3]);
6218 	}
6219       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
6220       if (llist[2])
6221 	{
6222 	  tree simt_vf = create_tmp_var (unsigned_type_node);
6223 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6224 	  gimple_call_set_lhs (g, simt_vf);
6225 	  gimple_seq_add_stmt (dlist, g);
6226 
6227 	  tree t = build_int_cst (unsigned_type_node, 1);
6228 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6229 	  gimple_seq_add_stmt (dlist, g);
6230 
6231 	  t = build_int_cst (unsigned_type_node, 0);
6232 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6233 	  gimple_seq_add_stmt (dlist, g);
6234 
6235 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
6236 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
6237 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
6238 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6239 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
6240 
6241 	  gimple_seq_add_seq (dlist, llist[2]);
6242 
6243 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6244 	  gimple_seq_add_stmt (dlist, g);
6245 
6246 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
6247 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6248 	  gimple_seq_add_stmt (dlist, g);
6249 
6250 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
6251 	}
6252       for (int i = 0; i < 2; i++)
6253 	if (llist[i])
6254 	  {
6255 	    tree vf = create_tmp_var (unsigned_type_node);
6256 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6257 	    gimple_call_set_lhs (g, vf);
6258 	    gimple_seq *seq = i == 0 ? ilist : dlist;
6259 	    gimple_seq_add_stmt (seq, g);
6260 	    tree t = build_int_cst (unsigned_type_node, 0);
6261 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6262 	    gimple_seq_add_stmt (seq, g);
6263 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
6264 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
6265 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
6266 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
6267 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
6268 	    gimple_seq_add_seq (seq, llist[i]);
6269 	    t = build_int_cst (unsigned_type_node, 1);
6270 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6271 	    gimple_seq_add_stmt (seq, g);
6272 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
6273 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6274 	    gimple_seq_add_stmt (seq, g);
6275 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
6276 	  }
6277     }
6278   if (sctx.is_simt)
6279     {
6280       gimple_seq_add_seq (dlist, sctx.simt_dlist);
6281       gimple *g
6282 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6283       gimple_seq_add_stmt (dlist, g);
6284     }
6285 
6286   /* The copyin sequence is not to be executed by the main thread, since
6287      that would result in self-copies.  Perhaps not visible to scalars,
6288      but it certainly is to C++ operator=.  */
6289   if (copyin_seq)
6290     {
6291       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6292 			   0);
6293       x = build2 (NE_EXPR, boolean_type_node, x,
6294 		  build_int_cst (TREE_TYPE (x), 0));
6295       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6296       gimplify_and_add (x, ilist);
6297     }
6298 
6299   /* If any copyin variable is passed by reference, we must ensure the
6300      master thread doesn't modify it before it is copied over in all
6301      threads.  Similarly for variables in both firstprivate and
6302      lastprivate clauses we need to ensure the lastprivate copying
6303      happens after firstprivate copying in all threads.  And similarly
6304      for UDRs if initializer expression refers to omp_orig.  */
6305   if (copyin_by_ref || lastprivate_firstprivate
6306       || (reduction_omp_orig_ref
6307 	  && !ctx->scan_inclusive
6308 	  && !ctx->scan_exclusive))
6309     {
6310       /* Don't add any barrier for #pragma omp simd or
6311 	 #pragma omp distribute.  */
6312       if (!is_task_ctx (ctx)
6313 	  && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6314 	      || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6315 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6316     }
6317 
6318   /* If max_vf is non-zero, then we can use only a vectorization factor
6319      up to the max_vf we chose.  So stick it into the safelen clause.  */
6320   if (maybe_ne (sctx.max_vf, 0U))
6321     {
6322       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6323 				OMP_CLAUSE_SAFELEN);
6324       poly_uint64 safe_len;
6325       if (c == NULL_TREE
6326 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6327 	      && maybe_gt (safe_len, sctx.max_vf)))
6328 	{
6329 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6330 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6331 						       sctx.max_vf);
6332 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6333 	  gimple_omp_for_set_clauses (ctx->stmt, c);
6334 	}
6335     }
6336 }
6337 
6338 /* Create temporary variables for lastprivate(conditional:) implementation
6339    in context CTX with CLAUSES.  */
6340 
6341 static void
lower_lastprivate_conditional_clauses(tree * clauses,omp_context * ctx)6342 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6343 {
6344   tree iter_type = NULL_TREE;
6345   tree cond_ptr = NULL_TREE;
6346   tree iter_var = NULL_TREE;
6347   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6348 		  && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6349   tree next = *clauses;
6350   for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6351     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6352 	&& OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6353       {
6354 	if (is_simd)
6355 	  {
6356 	    tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6357 	    gcc_assert (cc);
6358 	    if (iter_type == NULL_TREE)
6359 	      {
6360 		iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
6361 		iter_var = create_tmp_var_raw (iter_type);
6362 		DECL_CONTEXT (iter_var) = current_function_decl;
6363 		DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6364 		DECL_CHAIN (iter_var) = ctx->block_vars;
6365 		ctx->block_vars = iter_var;
6366 		tree c3
6367 		  = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6368 		OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6369 		OMP_CLAUSE_DECL (c3) = iter_var;
6370 		OMP_CLAUSE_CHAIN (c3) = *clauses;
6371 		*clauses = c3;
6372 		ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6373 	      }
6374 	    next = OMP_CLAUSE_CHAIN (cc);
6375 	    tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6376 	    tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
6377 	    ctx->lastprivate_conditional_map->put (o, v);
6378 	    continue;
6379 	  }
6380 	if (iter_type == NULL)
6381 	  {
6382 	    if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
6383 	      {
6384 		struct omp_for_data fd;
6385 		omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
6386 				      NULL);
6387 		iter_type = unsigned_type_for (fd.iter_type);
6388 	      }
6389 	    else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
6390 	      iter_type = unsigned_type_node;
6391 	    tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
6392 	    if (c2)
6393 	      {
6394 		cond_ptr
6395 		  = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
6396 		OMP_CLAUSE_DECL (c2) = cond_ptr;
6397 	      }
6398 	    else
6399 	      {
6400 		cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
6401 		DECL_CONTEXT (cond_ptr) = current_function_decl;
6402 		DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
6403 		DECL_CHAIN (cond_ptr) = ctx->block_vars;
6404 		ctx->block_vars = cond_ptr;
6405 		c2 = build_omp_clause (UNKNOWN_LOCATION,
6406 				       OMP_CLAUSE__CONDTEMP_);
6407 		OMP_CLAUSE_DECL (c2) = cond_ptr;
6408 		OMP_CLAUSE_CHAIN (c2) = *clauses;
6409 		*clauses = c2;
6410 	      }
6411 	    iter_var = create_tmp_var_raw (iter_type);
6412 	    DECL_CONTEXT (iter_var) = current_function_decl;
6413 	    DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
6414 	    DECL_CHAIN (iter_var) = ctx->block_vars;
6415 	    ctx->block_vars = iter_var;
6416 	    tree c3
6417 	      = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
6418 	    OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
6419 	    OMP_CLAUSE_DECL (c3) = iter_var;
6420 	    OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
6421 	    OMP_CLAUSE_CHAIN (c2) = c3;
6422 	    ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
6423 	  }
6424 	tree v = create_tmp_var_raw (iter_type);
6425 	DECL_CONTEXT (v) = current_function_decl;
6426 	DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
6427 	DECL_CHAIN (v) = ctx->block_vars;
6428 	ctx->block_vars = v;
6429 	tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6430 	ctx->lastprivate_conditional_map->put (o, v);
6431       }
6432 }
6433 
6434 
6435 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
6436    both parallel and workshare constructs.  PREDICATE may be NULL if it's
6437    always true.  BODY_P is the sequence to insert early initialization
6438    if needed, STMT_LIST is where the non-conditional lastprivate handling
6439    goes into and CSTMT_LIST is a sequence that needs to be run in a critical
6440    section.  */
6441 
6442 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * body_p,gimple_seq * stmt_list,gimple_seq * cstmt_list,omp_context * ctx)6443 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
6444 			   gimple_seq *stmt_list, gimple_seq *cstmt_list,
6445 			   omp_context *ctx)
6446 {
6447   tree x, c, label = NULL, orig_clauses = clauses;
6448   bool par_clauses = false;
6449   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
6450   unsigned HOST_WIDE_INT conditional_off = 0;
6451   gimple_seq post_stmt_list = NULL;
6452 
6453   /* Early exit if there are no lastprivate or linear clauses.  */
6454   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
6455     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
6456 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
6457 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
6458       break;
6459   if (clauses == NULL)
6460     {
6461       /* If this was a workshare clause, see if it had been combined
6462 	 with its parallel.  In that case, look for the clauses on the
6463 	 parallel statement itself.  */
6464       if (is_parallel_ctx (ctx))
6465 	return;
6466 
6467       ctx = ctx->outer;
6468       if (ctx == NULL || !is_parallel_ctx (ctx))
6469 	return;
6470 
6471       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6472 				 OMP_CLAUSE_LASTPRIVATE);
6473       if (clauses == NULL)
6474 	return;
6475       par_clauses = true;
6476     }
6477 
6478   bool maybe_simt = false;
6479   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6480       && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6481     {
6482       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
6483       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
6484       if (simduid)
6485 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
6486     }
6487 
6488   if (predicate)
6489     {
6490       gcond *stmt;
6491       tree label_true, arm1, arm2;
6492       enum tree_code pred_code = TREE_CODE (predicate);
6493 
6494       label = create_artificial_label (UNKNOWN_LOCATION);
6495       label_true = create_artificial_label (UNKNOWN_LOCATION);
6496       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
6497 	{
6498 	  arm1 = TREE_OPERAND (predicate, 0);
6499 	  arm2 = TREE_OPERAND (predicate, 1);
6500 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6501 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
6502 	}
6503       else
6504 	{
6505 	  arm1 = predicate;
6506 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
6507 	  arm2 = boolean_false_node;
6508 	  pred_code = NE_EXPR;
6509 	}
6510       if (maybe_simt)
6511 	{
6512 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
6513 	  c = fold_convert (integer_type_node, c);
6514 	  simtcond = create_tmp_var (integer_type_node);
6515 	  gimplify_assign (simtcond, c, stmt_list);
6516 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
6517 						 1, simtcond);
6518 	  c = create_tmp_var (integer_type_node);
6519 	  gimple_call_set_lhs (g, c);
6520 	  gimple_seq_add_stmt (stmt_list, g);
6521 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
6522 				    label_true, label);
6523 	}
6524       else
6525 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
6526       gimple_seq_add_stmt (stmt_list, stmt);
6527       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
6528     }
6529 
6530   tree cond_ptr = NULL_TREE;
6531   for (c = clauses; c ;)
6532     {
6533       tree var, new_var;
6534       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6535       gimple_seq *this_stmt_list = stmt_list;
6536       tree lab2 = NULL_TREE;
6537 
6538       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6539 	  && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6540 	  && ctx->lastprivate_conditional_map
6541 	  && !ctx->combined_into_simd_safelen1)
6542 	{
6543 	  gcc_assert (body_p);
6544 	  if (simduid)
6545 	    goto next;
6546 	  if (cond_ptr == NULL_TREE)
6547 	    {
6548 	      cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
6549 	      cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
6550 	    }
6551 	  tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
6552 	  tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6553 	  tree v = *ctx->lastprivate_conditional_map->get (o);
6554 	  gimplify_assign (v, build_zero_cst (type), body_p);
6555 	  this_stmt_list = cstmt_list;
6556 	  tree mem;
6557 	  if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
6558 	    {
6559 	      mem = build2 (MEM_REF, type, cond_ptr,
6560 			    build_int_cst (TREE_TYPE (cond_ptr),
6561 					   conditional_off));
6562 	      conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
6563 	    }
6564 	  else
6565 	    mem = build4 (ARRAY_REF, type, cond_ptr,
6566 			  size_int (conditional_off++), NULL_TREE, NULL_TREE);
6567 	  tree mem2 = copy_node (mem);
6568 	  gimple_seq seq = NULL;
6569 	  mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
6570 	  gimple_seq_add_seq (this_stmt_list, seq);
6571 	  tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
6572 	  lab2 = create_artificial_label (UNKNOWN_LOCATION);
6573 	  gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
6574 	  gimple_seq_add_stmt (this_stmt_list, g);
6575 	  gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
6576 	  gimplify_assign (mem2, v, this_stmt_list);
6577 	}
6578       else if (predicate
6579 	       && ctx->combined_into_simd_safelen1
6580 	       && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6581 	       && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
6582 	       && ctx->lastprivate_conditional_map)
6583 	this_stmt_list = &post_stmt_list;
6584 
6585       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6586 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6587 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6588 	{
6589 	  var = OMP_CLAUSE_DECL (c);
6590 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6591 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6592 	      && is_taskloop_ctx (ctx))
6593 	    {
6594 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
6595 	      new_var = lookup_decl (var, ctx->outer);
6596 	    }
6597 	  else
6598 	    {
6599 	      new_var = lookup_decl (var, ctx);
6600 	      /* Avoid uninitialized warnings for lastprivate and
6601 		 for linear iterators.  */
6602 	      if (predicate
6603 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6604 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
6605 		TREE_NO_WARNING (new_var) = 1;
6606 	    }
6607 
6608 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
6609 	    {
6610 	      tree val = DECL_VALUE_EXPR (new_var);
6611 	      if (TREE_CODE (val) == ARRAY_REF
6612 		  && VAR_P (TREE_OPERAND (val, 0))
6613 		  && lookup_attribute ("omp simd array",
6614 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
6615 								      0))))
6616 		{
6617 		  if (lastlane == NULL)
6618 		    {
6619 		      lastlane = create_tmp_var (unsigned_type_node);
6620 		      gcall *g
6621 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6622 						      2, simduid,
6623 						      TREE_OPERAND (val, 1));
6624 		      gimple_call_set_lhs (g, lastlane);
6625 		      gimple_seq_add_stmt (this_stmt_list, g);
6626 		    }
6627 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
6628 				    TREE_OPERAND (val, 0), lastlane,
6629 				    NULL_TREE, NULL_TREE);
6630 		  TREE_THIS_NOTRAP (new_var) = 1;
6631 		}
6632 	    }
6633 	  else if (maybe_simt)
6634 	    {
6635 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
6636 			  ? DECL_VALUE_EXPR (new_var)
6637 			  : new_var);
6638 	      if (simtlast == NULL)
6639 		{
6640 		  simtlast = create_tmp_var (unsigned_type_node);
6641 		  gcall *g = gimple_build_call_internal
6642 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
6643 		  gimple_call_set_lhs (g, simtlast);
6644 		  gimple_seq_add_stmt (this_stmt_list, g);
6645 		}
6646 	      x = build_call_expr_internal_loc
6647 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
6648 		 TREE_TYPE (val), 2, val, simtlast);
6649 	      new_var = unshare_expr (new_var);
6650 	      gimplify_assign (new_var, x, this_stmt_list);
6651 	      new_var = unshare_expr (new_var);
6652 	    }
6653 
6654 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6655 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
6656 	    {
6657 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
6658 	      gimple_seq_add_seq (this_stmt_list,
6659 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6660 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
6661 	    }
6662 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6663 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
6664 	    {
6665 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
6666 	      gimple_seq_add_seq (this_stmt_list,
6667 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6668 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
6669 	    }
6670 
6671 	  x = NULL_TREE;
6672 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6673 	      && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
6674 	      && is_taskloop_ctx (ctx))
6675 	    {
6676 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
6677 							  ctx->outer->outer);
6678 	      if (is_global_var (ovar))
6679 		x = ovar;
6680 	    }
6681 	  if (!x)
6682 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
6683 	  if (omp_is_reference (var))
6684 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
6685 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
6686 	  gimplify_and_add (x, this_stmt_list);
6687 
6688 	  if (lab2)
6689 	    gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
6690 	}
6691 
6692      next:
6693       c = OMP_CLAUSE_CHAIN (c);
6694       if (c == NULL && !par_clauses)
6695 	{
6696 	  /* If this was a workshare clause, see if it had been combined
6697 	     with its parallel.  In that case, continue looking for the
6698 	     clauses also on the parallel statement itself.  */
6699 	  if (is_parallel_ctx (ctx))
6700 	    break;
6701 
6702 	  ctx = ctx->outer;
6703 	  if (ctx == NULL || !is_parallel_ctx (ctx))
6704 	    break;
6705 
6706 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
6707 			       OMP_CLAUSE_LASTPRIVATE);
6708 	  par_clauses = true;
6709 	}
6710     }
6711 
6712   if (label)
6713     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
6714   gimple_seq_add_seq (stmt_list, post_stmt_list);
6715 }
6716 
6717 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
6718    (which might be a placeholder).  INNER is true if this is an inner
6719    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
6720    join markers.  Generate the before-loop forking sequence in
6721    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
6722    general form of these sequences is
6723 
6724      GOACC_REDUCTION_SETUP
6725      GOACC_FORK
6726      GOACC_REDUCTION_INIT
6727      ...
6728      GOACC_REDUCTION_FINI
6729      GOACC_JOIN
6730      GOACC_REDUCTION_TEARDOWN.  */
6731 
6732 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)6733 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
6734 		       gcall *fork, gcall *join, gimple_seq *fork_seq,
6735 		       gimple_seq *join_seq, omp_context *ctx)
6736 {
6737   gimple_seq before_fork = NULL;
6738   gimple_seq after_fork = NULL;
6739   gimple_seq before_join = NULL;
6740   gimple_seq after_join = NULL;
6741   tree init_code = NULL_TREE, fini_code = NULL_TREE,
6742     setup_code = NULL_TREE, teardown_code = NULL_TREE;
6743   unsigned offset = 0;
6744 
6745   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
6746     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
6747       {
6748 	/* No 'reduction' clauses on OpenACC 'kernels'.  */
6749 	gcc_checking_assert (!is_oacc_kernels (ctx));
6750 
6751 	tree orig = OMP_CLAUSE_DECL (c);
6752 	tree var = maybe_lookup_decl (orig, ctx);
6753 	tree ref_to_res = NULL_TREE;
6754 	tree incoming, outgoing, v1, v2, v3;
6755 	bool is_private = false;
6756 
6757 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
6758 	if (rcode == MINUS_EXPR)
6759 	  rcode = PLUS_EXPR;
6760 	else if (rcode == TRUTH_ANDIF_EXPR)
6761 	  rcode = BIT_AND_EXPR;
6762 	else if (rcode == TRUTH_ORIF_EXPR)
6763 	  rcode = BIT_IOR_EXPR;
6764 	tree op = build_int_cst (unsigned_type_node, rcode);
6765 
6766 	if (!var)
6767 	  var = orig;
6768 
6769 	incoming = outgoing = var;
6770 
6771 	if (!inner)
6772 	  {
6773 	    /* See if an outer construct also reduces this variable.  */
6774 	    omp_context *outer = ctx;
6775 
6776 	    while (omp_context *probe = outer->outer)
6777 	      {
6778 		enum gimple_code type = gimple_code (probe->stmt);
6779 		tree cls;
6780 
6781 		switch (type)
6782 		  {
6783 		  case GIMPLE_OMP_FOR:
6784 		    cls = gimple_omp_for_clauses (probe->stmt);
6785 		    break;
6786 
6787 		  case GIMPLE_OMP_TARGET:
6788 		    /* No 'reduction' clauses inside OpenACC 'kernels'
6789 		       regions.  */
6790 		    gcc_checking_assert (!is_oacc_kernels (probe));
6791 
6792 		    if (!is_gimple_omp_offloaded (probe->stmt))
6793 		      goto do_lookup;
6794 
6795 		    cls = gimple_omp_target_clauses (probe->stmt);
6796 		    break;
6797 
6798 		  default:
6799 		    goto do_lookup;
6800 		  }
6801 
6802 		outer = probe;
6803 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
6804 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
6805 		      && orig == OMP_CLAUSE_DECL (cls))
6806 		    {
6807 		      incoming = outgoing = lookup_decl (orig, probe);
6808 		      goto has_outer_reduction;
6809 		    }
6810 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
6811 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
6812 			   && orig == OMP_CLAUSE_DECL (cls))
6813 		    {
6814 		      is_private = true;
6815 		      goto do_lookup;
6816 		    }
6817 	      }
6818 
6819 	  do_lookup:
6820 	    /* This is the outermost construct with this reduction,
6821 	       see if there's a mapping for it.  */
6822 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
6823 		&& maybe_lookup_field (orig, outer) && !is_private)
6824 	      {
6825 		ref_to_res = build_receiver_ref (orig, false, outer);
6826 		if (omp_is_reference (orig))
6827 		  ref_to_res = build_simple_mem_ref (ref_to_res);
6828 
6829 		tree type = TREE_TYPE (var);
6830 		if (POINTER_TYPE_P (type))
6831 		  type = TREE_TYPE (type);
6832 
6833 		outgoing = var;
6834 		incoming = omp_reduction_init_op (loc, rcode, type);
6835 	      }
6836 	    else
6837 	      {
6838 		/* Try to look at enclosing contexts for reduction var,
6839 		   use original if no mapping found.  */
6840 		tree t = NULL_TREE;
6841 		omp_context *c = ctx->outer;
6842 		while (c && !t)
6843 		  {
6844 		    t = maybe_lookup_decl (orig, c);
6845 		    c = c->outer;
6846 		  }
6847 		incoming = outgoing = (t ? t : orig);
6848 	      }
6849 
6850 	  has_outer_reduction:;
6851 	  }
6852 
6853 	if (!ref_to_res)
6854 	  ref_to_res = integer_zero_node;
6855 
6856 	if (omp_is_reference (orig))
6857 	  {
6858 	    tree type = TREE_TYPE (var);
6859 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
6860 
6861 	    if (!inner)
6862 	      {
6863 		tree x = create_tmp_var (TREE_TYPE (type), id);
6864 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
6865 	      }
6866 
6867 	    v1 = create_tmp_var (type, id);
6868 	    v2 = create_tmp_var (type, id);
6869 	    v3 = create_tmp_var (type, id);
6870 
6871 	    gimplify_assign (v1, var, fork_seq);
6872 	    gimplify_assign (v2, var, fork_seq);
6873 	    gimplify_assign (v3, var, fork_seq);
6874 
6875 	    var = build_simple_mem_ref (var);
6876 	    v1 = build_simple_mem_ref (v1);
6877 	    v2 = build_simple_mem_ref (v2);
6878 	    v3 = build_simple_mem_ref (v3);
6879 	    outgoing = build_simple_mem_ref (outgoing);
6880 
6881 	    if (!TREE_CONSTANT (incoming))
6882 	      incoming = build_simple_mem_ref (incoming);
6883 	  }
6884 	else
6885 	  v1 = v2 = v3 = var;
6886 
6887 	/* Determine position in reduction buffer, which may be used
6888 	   by target.  The parser has ensured that this is not a
6889 	   variable-sized type.  */
6890 	fixed_size_mode mode
6891 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
6892 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
6893 	offset = (offset + align - 1) & ~(align - 1);
6894 	tree off = build_int_cst (sizetype, offset);
6895 	offset += GET_MODE_SIZE (mode);
6896 
6897 	if (!init_code)
6898 	  {
6899 	    init_code = build_int_cst (integer_type_node,
6900 				       IFN_GOACC_REDUCTION_INIT);
6901 	    fini_code = build_int_cst (integer_type_node,
6902 				       IFN_GOACC_REDUCTION_FINI);
6903 	    setup_code = build_int_cst (integer_type_node,
6904 					IFN_GOACC_REDUCTION_SETUP);
6905 	    teardown_code = build_int_cst (integer_type_node,
6906 					   IFN_GOACC_REDUCTION_TEARDOWN);
6907 	  }
6908 
6909 	tree setup_call
6910 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6911 					  TREE_TYPE (var), 6, setup_code,
6912 					  unshare_expr (ref_to_res),
6913 					  incoming, level, op, off);
6914 	tree init_call
6915 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6916 					  TREE_TYPE (var), 6, init_code,
6917 					  unshare_expr (ref_to_res),
6918 					  v1, level, op, off);
6919 	tree fini_call
6920 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6921 					  TREE_TYPE (var), 6, fini_code,
6922 					  unshare_expr (ref_to_res),
6923 					  v2, level, op, off);
6924 	tree teardown_call
6925 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
6926 					  TREE_TYPE (var), 6, teardown_code,
6927 					  ref_to_res, v3, level, op, off);
6928 
6929 	gimplify_assign (v1, setup_call, &before_fork);
6930 	gimplify_assign (v2, init_call, &after_fork);
6931 	gimplify_assign (v3, fini_call, &before_join);
6932 	gimplify_assign (outgoing, teardown_call, &after_join);
6933       }
6934 
6935   /* Now stitch things together.  */
6936   gimple_seq_add_seq (fork_seq, before_fork);
6937   if (fork)
6938     gimple_seq_add_stmt (fork_seq, fork);
6939   gimple_seq_add_seq (fork_seq, after_fork);
6940 
6941   gimple_seq_add_seq (join_seq, before_join);
6942   if (join)
6943     gimple_seq_add_stmt (join_seq, join);
6944   gimple_seq_add_seq (join_seq, after_join);
6945 }
6946 
6947 /* Generate code to implement the REDUCTION clauses, append it
6948    to STMT_SEQP.  CLIST if non-NULL is a pointer to a sequence
6949    that should be emitted also inside of the critical section,
6950    in that case clear *CLIST afterwards, otherwise leave it as is
6951    and let the caller emit it itself.  */
6952 
6953 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,gimple_seq * clist,omp_context * ctx)6954 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
6955 			 gimple_seq *clist, omp_context *ctx)
6956 {
6957   gimple_seq sub_seq = NULL;
6958   gimple *stmt;
6959   tree x, c;
6960   int count = 0;
6961 
6962   /* OpenACC loop reductions are handled elsewhere.  */
6963   if (is_gimple_omp_oacc (ctx->stmt))
6964     return;
6965 
6966   /* SIMD reductions are handled in lower_rec_input_clauses.  */
6967   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6968       && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
6969     return;
6970 
6971   /* inscan reductions are handled elsewhere.  */
6972   if (ctx->scan_inclusive || ctx->scan_exclusive)
6973     return;
6974 
6975   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
6976      update in that case, otherwise use a lock.  */
6977   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
6978     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6979 	&& !OMP_CLAUSE_REDUCTION_TASK (c))
6980       {
6981 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
6982 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
6983 	  {
6984 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
6985 	    count = -1;
6986 	    break;
6987 	  }
6988 	count++;
6989       }
6990 
6991   if (count == 0)
6992     return;
6993 
6994   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6995     {
6996       tree var, ref, new_var, orig_var;
6997       enum tree_code code;
6998       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
6999 
7000       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7001 	  || OMP_CLAUSE_REDUCTION_TASK (c))
7002 	continue;
7003 
7004       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7005       orig_var = var = OMP_CLAUSE_DECL (c);
7006       if (TREE_CODE (var) == MEM_REF)
7007 	{
7008 	  var = TREE_OPERAND (var, 0);
7009 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7010 	    var = TREE_OPERAND (var, 0);
7011 	  if (TREE_CODE (var) == ADDR_EXPR)
7012 	    var = TREE_OPERAND (var, 0);
7013 	  else
7014 	    {
7015 	      /* If this is a pointer or referenced based array
7016 		 section, the var could be private in the outer
7017 		 context e.g. on orphaned loop construct.  Pretend this
7018 		 is private variable's outer reference.  */
7019 	      ccode = OMP_CLAUSE_PRIVATE;
7020 	      if (TREE_CODE (var) == INDIRECT_REF)
7021 		var = TREE_OPERAND (var, 0);
7022 	    }
7023 	  orig_var = var;
7024 	  if (is_variable_sized (var))
7025 	    {
7026 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7027 	      var = DECL_VALUE_EXPR (var);
7028 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7029 	      var = TREE_OPERAND (var, 0);
7030 	      gcc_assert (DECL_P (var));
7031 	    }
7032 	}
7033       new_var = lookup_decl (var, ctx);
7034       if (var == OMP_CLAUSE_DECL (c) && omp_is_reference (var))
7035 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7036       ref = build_outer_var_ref (var, ctx, ccode);
7037       code = OMP_CLAUSE_REDUCTION_CODE (c);
7038 
7039       /* reduction(-:var) sums up the partial results, so it acts
7040 	 identically to reduction(+:var).  */
7041       if (code == MINUS_EXPR)
7042         code = PLUS_EXPR;
7043 
7044       bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7045       if (count == 1)
7046 	{
7047 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7048 
7049 	  addr = save_expr (addr);
7050 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7051 	  tree new_var2 = new_var;
7052 	  tree ref2 = ref;
7053 	  if (is_truth_op)
7054 	    {
7055 	      tree zero = build_zero_cst (TREE_TYPE (new_var));
7056 	      new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7057 					  boolean_type_node, new_var, zero);
7058 	      ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7059 				      ref, zero);
7060 	    }
7061 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7062 			       new_var2);
7063 	  if (is_truth_op)
7064 	    x = fold_convert (TREE_TYPE (new_var), x);
7065 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7066 	  OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7067 	  gimplify_and_add (x, stmt_seqp);
7068 	  return;
7069 	}
7070       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7071 	{
7072 	  tree d = OMP_CLAUSE_DECL (c);
7073 	  tree type = TREE_TYPE (d);
7074 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7075 	  tree i = create_tmp_var (TREE_TYPE (v));
7076 	  tree ptype = build_pointer_type (TREE_TYPE (type));
7077 	  tree bias = TREE_OPERAND (d, 1);
7078 	  d = TREE_OPERAND (d, 0);
7079 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7080 	    {
7081 	      tree b = TREE_OPERAND (d, 1);
7082 	      b = maybe_lookup_decl (b, ctx);
7083 	      if (b == NULL)
7084 		{
7085 		  b = TREE_OPERAND (d, 1);
7086 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7087 		}
7088 	      if (integer_zerop (bias))
7089 		bias = b;
7090 	      else
7091 		{
7092 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7093 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7094 					  TREE_TYPE (b), b, bias);
7095 		}
7096 	      d = TREE_OPERAND (d, 0);
7097 	    }
7098 	  /* For ref build_outer_var_ref already performs this, so
7099 	     only new_var needs a dereference.  */
7100 	  if (TREE_CODE (d) == INDIRECT_REF)
7101 	    {
7102 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7103 	      gcc_assert (omp_is_reference (var) && var == orig_var);
7104 	    }
7105 	  else if (TREE_CODE (d) == ADDR_EXPR)
7106 	    {
7107 	      if (orig_var == var)
7108 		{
7109 		  new_var = build_fold_addr_expr (new_var);
7110 		  ref = build_fold_addr_expr (ref);
7111 		}
7112 	    }
7113 	  else
7114 	    {
7115 	      gcc_assert (orig_var == var);
7116 	      if (omp_is_reference (var))
7117 		ref = build_fold_addr_expr (ref);
7118 	    }
7119 	  if (DECL_P (v))
7120 	    {
7121 	      tree t = maybe_lookup_decl (v, ctx);
7122 	      if (t)
7123 		v = t;
7124 	      else
7125 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7126 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7127 	    }
7128 	  if (!integer_zerop (bias))
7129 	    {
7130 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
7131 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7132 					 TREE_TYPE (new_var), new_var,
7133 					 unshare_expr (bias));
7134 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7135 					 TREE_TYPE (ref), ref, bias);
7136 	    }
7137 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
7138 	  ref = fold_convert_loc (clause_loc, ptype, ref);
7139 	  tree m = create_tmp_var (ptype);
7140 	  gimplify_assign (m, new_var, stmt_seqp);
7141 	  new_var = m;
7142 	  m = create_tmp_var (ptype);
7143 	  gimplify_assign (m, ref, stmt_seqp);
7144 	  ref = m;
7145 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7146 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
7147 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
7148 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7149 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7150 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
7151 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7152 	    {
7153 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7154 	      tree decl_placeholder
7155 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7156 	      SET_DECL_VALUE_EXPR (placeholder, out);
7157 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7158 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7159 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7160 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7161 	      gimple_seq_add_seq (&sub_seq,
7162 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7163 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7164 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7165 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7166 	    }
7167 	  else
7168 	    {
7169 	      tree out2 = out;
7170 	      tree priv2 = priv;
7171 	      if (is_truth_op)
7172 		{
7173 		  tree zero = build_zero_cst (TREE_TYPE (out));
7174 		  out2 = fold_build2_loc (clause_loc, NE_EXPR,
7175 					  boolean_type_node, out, zero);
7176 		  priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7177 					   boolean_type_node, priv, zero);
7178 		}
7179 	      x = build2 (code, TREE_TYPE (out2), out2, priv2);
7180 	      if (is_truth_op)
7181 		x = fold_convert (TREE_TYPE (out), x);
7182 	      out = unshare_expr (out);
7183 	      gimplify_assign (out, x, &sub_seq);
7184 	    }
7185 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7186 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
7187 	  gimple_seq_add_stmt (&sub_seq, g);
7188 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7189 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
7190 	  gimple_seq_add_stmt (&sub_seq, g);
7191 	  g = gimple_build_assign (i, PLUS_EXPR, i,
7192 				   build_int_cst (TREE_TYPE (i), 1));
7193 	  gimple_seq_add_stmt (&sub_seq, g);
7194 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
7195 	  gimple_seq_add_stmt (&sub_seq, g);
7196 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7197 	}
7198       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7199 	{
7200 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7201 
7202 	  if (omp_is_reference (var)
7203 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
7204 					     TREE_TYPE (ref)))
7205 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
7206 	  SET_DECL_VALUE_EXPR (placeholder, ref);
7207 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7208 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7209 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7210 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7211 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7212 	}
7213       else
7214 	{
7215 	  tree new_var2 = new_var;
7216 	  tree ref2 = ref;
7217 	  if (is_truth_op)
7218 	    {
7219 	      tree zero = build_zero_cst (TREE_TYPE (new_var));
7220 	      new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7221 					  boolean_type_node, new_var, zero);
7222 	      ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7223 				      ref, zero);
7224 	    }
7225 	  x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7226 	  if (is_truth_op)
7227 	    x = fold_convert (TREE_TYPE (new_var), x);
7228 	  ref = build_outer_var_ref (var, ctx);
7229 	  gimplify_assign (ref, x, &sub_seq);
7230 	}
7231     }
7232 
7233   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7234 			    0);
7235   gimple_seq_add_stmt (stmt_seqp, stmt);
7236 
7237   gimple_seq_add_seq (stmt_seqp, sub_seq);
7238 
7239   if (clist)
7240     {
7241       gimple_seq_add_seq (stmt_seqp, *clist);
7242       *clist = NULL;
7243     }
7244 
7245   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7246 			    0);
7247   gimple_seq_add_stmt (stmt_seqp, stmt);
7248 }
7249 
7250 
7251 /* Generate code to implement the COPYPRIVATE clauses.  */
7252 
7253 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)7254 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7255 			    omp_context *ctx)
7256 {
7257   tree c;
7258 
7259   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7260     {
7261       tree var, new_var, ref, x;
7262       bool by_ref;
7263       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7264 
7265       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7266 	continue;
7267 
7268       var = OMP_CLAUSE_DECL (c);
7269       by_ref = use_pointer_for_field (var, NULL);
7270 
7271       ref = build_sender_ref (var, ctx);
7272       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7273       if (by_ref)
7274 	{
7275 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
7276 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7277 	}
7278       gimplify_assign (ref, x, slist);
7279 
7280       ref = build_receiver_ref (var, false, ctx);
7281       if (by_ref)
7282 	{
7283 	  ref = fold_convert_loc (clause_loc,
7284 				  build_pointer_type (TREE_TYPE (new_var)),
7285 				  ref);
7286 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
7287 	}
7288       if (omp_is_reference (var))
7289 	{
7290 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7291 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
7292 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7293 	}
7294       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7295       gimplify_and_add (x, rlist);
7296     }
7297 }
7298 
7299 
7300 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7301    and REDUCTION from the sender (aka parent) side.  */
7302 
7303 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7304 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7305     		    omp_context *ctx)
7306 {
7307   tree c, t;
7308   int ignored_looptemp = 0;
7309   bool is_taskloop = false;
7310 
7311   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7312      by GOMP_taskloop.  */
7313   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7314     {
7315       ignored_looptemp = 2;
7316       is_taskloop = true;
7317     }
7318 
7319   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7320     {
7321       tree val, ref, x, var;
7322       bool by_ref, do_in = false, do_out = false;
7323       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7324 
7325       switch (OMP_CLAUSE_CODE (c))
7326 	{
7327 	case OMP_CLAUSE_PRIVATE:
7328 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7329 	    break;
7330 	  continue;
7331 	case OMP_CLAUSE_FIRSTPRIVATE:
7332 	case OMP_CLAUSE_COPYIN:
7333 	case OMP_CLAUSE_LASTPRIVATE:
7334 	case OMP_CLAUSE_IN_REDUCTION:
7335 	case OMP_CLAUSE__REDUCTEMP_:
7336 	  break;
7337 	case OMP_CLAUSE_REDUCTION:
7338 	  if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7339 	    continue;
7340 	  break;
7341 	case OMP_CLAUSE_SHARED:
7342 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7343 	    break;
7344 	  continue;
7345 	case OMP_CLAUSE__LOOPTEMP_:
7346 	  if (ignored_looptemp)
7347 	    {
7348 	      ignored_looptemp--;
7349 	      continue;
7350 	    }
7351 	  break;
7352 	default:
7353 	  continue;
7354 	}
7355 
7356       val = OMP_CLAUSE_DECL (c);
7357       if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7358 	   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
7359 	  && TREE_CODE (val) == MEM_REF)
7360 	{
7361 	  val = TREE_OPERAND (val, 0);
7362 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
7363 	    val = TREE_OPERAND (val, 0);
7364 	  if (TREE_CODE (val) == INDIRECT_REF
7365 	      || TREE_CODE (val) == ADDR_EXPR)
7366 	    val = TREE_OPERAND (val, 0);
7367 	  if (is_variable_sized (val))
7368 	    continue;
7369 	}
7370 
7371       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
7372 	 outer taskloop region.  */
7373       omp_context *ctx_for_o = ctx;
7374       if (is_taskloop
7375 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
7376 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7377 	ctx_for_o = ctx->outer;
7378 
7379       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
7380 
7381       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
7382 	  && is_global_var (var)
7383 	  && (val == OMP_CLAUSE_DECL (c)
7384 	      || !is_task_ctx (ctx)
7385 	      || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
7386 		  && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
7387 		      || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
7388 			  != POINTER_TYPE)))))
7389 	continue;
7390 
7391       t = omp_member_access_dummy_var (var);
7392       if (t)
7393 	{
7394 	  var = DECL_VALUE_EXPR (var);
7395 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
7396 	  if (o != t)
7397 	    var = unshare_and_remap (var, t, o);
7398 	  else
7399 	    var = unshare_expr (var);
7400 	}
7401 
7402       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
7403 	{
7404 	  /* Handle taskloop firstprivate/lastprivate, where the
7405 	     lastprivate on GIMPLE_OMP_TASK is represented as
7406 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
7407 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
7408 	  x = omp_build_component_ref (ctx->sender_decl, f);
7409 	  if (use_pointer_for_field (val, ctx))
7410 	    var = build_fold_addr_expr (var);
7411 	  gimplify_assign (x, var, ilist);
7412 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
7413 	  continue;
7414 	}
7415 
7416       if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7417 	    && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
7418 	   || val == OMP_CLAUSE_DECL (c))
7419 	  && is_variable_sized (val))
7420 	continue;
7421       by_ref = use_pointer_for_field (val, NULL);
7422 
7423       switch (OMP_CLAUSE_CODE (c))
7424 	{
7425 	case OMP_CLAUSE_FIRSTPRIVATE:
7426 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
7427 	      && !by_ref
7428 	      && is_task_ctx (ctx))
7429 	    TREE_NO_WARNING (var) = 1;
7430 	  do_in = true;
7431 	  break;
7432 
7433 	case OMP_CLAUSE_PRIVATE:
7434 	case OMP_CLAUSE_COPYIN:
7435 	case OMP_CLAUSE__LOOPTEMP_:
7436 	case OMP_CLAUSE__REDUCTEMP_:
7437 	  do_in = true;
7438 	  break;
7439 
7440 	case OMP_CLAUSE_LASTPRIVATE:
7441 	  if (by_ref || omp_is_reference (val))
7442 	    {
7443 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
7444 		continue;
7445 	      do_in = true;
7446 	    }
7447 	  else
7448 	    {
7449 	      do_out = true;
7450 	      if (lang_hooks.decls.omp_private_outer_ref (val))
7451 		do_in = true;
7452 	    }
7453 	  break;
7454 
7455 	case OMP_CLAUSE_REDUCTION:
7456 	case OMP_CLAUSE_IN_REDUCTION:
7457 	  do_in = true;
7458 	  if (val == OMP_CLAUSE_DECL (c))
7459 	    {
7460 	      if (is_task_ctx (ctx))
7461 		by_ref = use_pointer_for_field (val, ctx);
7462 	      else
7463 		do_out = !(by_ref || omp_is_reference (val));
7464 	    }
7465 	  else
7466 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
7467 	  break;
7468 
7469 	default:
7470 	  gcc_unreachable ();
7471 	}
7472 
7473       if (do_in)
7474 	{
7475 	  ref = build_sender_ref (val, ctx);
7476 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
7477 	  gimplify_assign (ref, x, ilist);
7478 	  if (is_task_ctx (ctx))
7479 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
7480 	}
7481 
7482       if (do_out)
7483 	{
7484 	  ref = build_sender_ref (val, ctx);
7485 	  gimplify_assign (var, ref, olist);
7486 	}
7487     }
7488 }
7489 
7490 /* Generate code to implement SHARED from the sender (aka parent)
7491    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
7492    list things that got automatically shared.  */
7493 
7494 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7495 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
7496 {
7497   tree var, ovar, nvar, t, f, x, record_type;
7498 
7499   if (ctx->record_type == NULL)
7500     return;
7501 
7502   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
7503   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
7504     {
7505       ovar = DECL_ABSTRACT_ORIGIN (f);
7506       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
7507 	continue;
7508 
7509       nvar = maybe_lookup_decl (ovar, ctx);
7510       if (!nvar || !DECL_HAS_VALUE_EXPR_P (nvar))
7511 	continue;
7512 
7513       /* If CTX is a nested parallel directive.  Find the immediately
7514 	 enclosing parallel or workshare construct that contains a
7515 	 mapping for OVAR.  */
7516       var = lookup_decl_in_outer_ctx (ovar, ctx);
7517 
7518       t = omp_member_access_dummy_var (var);
7519       if (t)
7520 	{
7521 	  var = DECL_VALUE_EXPR (var);
7522 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
7523 	  if (o != t)
7524 	    var = unshare_and_remap (var, t, o);
7525 	  else
7526 	    var = unshare_expr (var);
7527 	}
7528 
7529       if (use_pointer_for_field (ovar, ctx))
7530 	{
7531 	  x = build_sender_ref (ovar, ctx);
7532 	  if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
7533 	      && TREE_TYPE (f) == TREE_TYPE (ovar))
7534 	    {
7535 	      gcc_assert (is_parallel_ctx (ctx)
7536 			  && DECL_ARTIFICIAL (ovar));
7537 	      /* _condtemp_ clause.  */
7538 	      var = build_constructor (TREE_TYPE (x), NULL);
7539 	    }
7540 	  else
7541 	    var = build_fold_addr_expr (var);
7542 	  gimplify_assign (x, var, ilist);
7543 	}
7544       else
7545 	{
7546 	  x = build_sender_ref (ovar, ctx);
7547 	  gimplify_assign (x, var, ilist);
7548 
7549 	  if (!TREE_READONLY (var)
7550 	      /* We don't need to receive a new reference to a result
7551 	         or parm decl.  In fact we may not store to it as we will
7552 		 invalidate any pending RSO and generate wrong gimple
7553 		 during inlining.  */
7554 	      && !((TREE_CODE (var) == RESULT_DECL
7555 		    || TREE_CODE (var) == PARM_DECL)
7556 		   && DECL_BY_REFERENCE (var)))
7557 	    {
7558 	      x = build_sender_ref (ovar, ctx);
7559 	      gimplify_assign (var, x, olist);
7560 	    }
7561 	}
7562     }
7563 }
7564 
7565 /* Emit an OpenACC head marker call, encapulating the partitioning and
7566    other information that must be processed by the target compiler.
7567    Return the maximum number of dimensions the associated loop might
7568    be partitioned over.  */
7569 
7570 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)7571 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
7572 		      gimple_seq *seq, omp_context *ctx)
7573 {
7574   unsigned levels = 0;
7575   unsigned tag = 0;
7576   tree gang_static = NULL_TREE;
7577   auto_vec<tree, 5> args;
7578 
7579   args.quick_push (build_int_cst
7580 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
7581   args.quick_push (ddvar);
7582   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7583     {
7584       switch (OMP_CLAUSE_CODE (c))
7585 	{
7586 	case OMP_CLAUSE_GANG:
7587 	  tag |= OLF_DIM_GANG;
7588 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
7589 	  /* static:* is represented by -1, and we can ignore it, as
7590 	     scheduling is always static.  */
7591 	  if (gang_static && integer_minus_onep (gang_static))
7592 	    gang_static = NULL_TREE;
7593 	  levels++;
7594 	  break;
7595 
7596 	case OMP_CLAUSE_WORKER:
7597 	  tag |= OLF_DIM_WORKER;
7598 	  levels++;
7599 	  break;
7600 
7601 	case OMP_CLAUSE_VECTOR:
7602 	  tag |= OLF_DIM_VECTOR;
7603 	  levels++;
7604 	  break;
7605 
7606 	case OMP_CLAUSE_SEQ:
7607 	  tag |= OLF_SEQ;
7608 	  break;
7609 
7610 	case OMP_CLAUSE_AUTO:
7611 	  tag |= OLF_AUTO;
7612 	  break;
7613 
7614 	case OMP_CLAUSE_INDEPENDENT:
7615 	  tag |= OLF_INDEPENDENT;
7616 	  break;
7617 
7618 	case OMP_CLAUSE_TILE:
7619 	  tag |= OLF_TILE;
7620 	  break;
7621 
7622 	default:
7623 	  continue;
7624 	}
7625     }
7626 
7627   if (gang_static)
7628     {
7629       if (DECL_P (gang_static))
7630 	gang_static = build_outer_var_ref (gang_static, ctx);
7631       tag |= OLF_GANG_STATIC;
7632     }
7633 
7634   omp_context *tgt = enclosing_target_ctx (ctx);
7635   if (!tgt || is_oacc_parallel_or_serial (tgt))
7636     ;
7637   else if (is_oacc_kernels (tgt))
7638     /* Not using this loops handling inside OpenACC 'kernels' regions.  */
7639     gcc_unreachable ();
7640   else
7641     gcc_unreachable ();
7642 
7643   /* In a parallel region, loops are implicitly INDEPENDENT.  */
7644   if (!tgt || is_oacc_parallel_or_serial (tgt))
7645     tag |= OLF_INDEPENDENT;
7646 
7647   if (tag & OLF_TILE)
7648     /* Tiling could use all 3 levels.  */
7649     levels = 3;
7650   else
7651     {
7652       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
7653 	 Ensure at least one level, or 2 for possible auto
7654 	 partitioning */
7655       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
7656 				  << OLF_DIM_BASE) | OLF_SEQ));
7657 
7658       if (levels < 1u + maybe_auto)
7659 	levels = 1u + maybe_auto;
7660     }
7661 
7662   args.quick_push (build_int_cst (integer_type_node, levels));
7663   args.quick_push (build_int_cst (integer_type_node, tag));
7664   if (gang_static)
7665     args.quick_push (gang_static);
7666 
7667   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
7668   gimple_set_location (call, loc);
7669   gimple_set_lhs (call, ddvar);
7670   gimple_seq_add_stmt (seq, call);
7671 
7672   return levels;
7673 }
7674 
7675 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
7676    partitioning level of the enclosed region.  */
7677 
7678 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)7679 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
7680 			tree tofollow, gimple_seq *seq)
7681 {
7682   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
7683 		     : IFN_UNIQUE_OACC_TAIL_MARK);
7684   tree marker = build_int_cst (integer_type_node, marker_kind);
7685   int nargs = 2 + (tofollow != NULL_TREE);
7686   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
7687 					    marker, ddvar, tofollow);
7688   gimple_set_location (call, loc);
7689   gimple_set_lhs (call, ddvar);
7690   gimple_seq_add_stmt (seq, call);
7691 }
7692 
7693 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
7694    the loop clauses, from which we extract reductions.  Initialize
7695    HEAD and TAIL.  */
7696 
7697 static void
lower_oacc_head_tail(location_t loc,tree clauses,gimple_seq * head,gimple_seq * tail,omp_context * ctx)7698 lower_oacc_head_tail (location_t loc, tree clauses,
7699 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
7700 {
7701   bool inner = false;
7702   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
7703   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
7704 
7705   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
7706   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
7707   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
7708 
7709   gcc_assert (count);
7710   for (unsigned done = 1; count; count--, done++)
7711     {
7712       gimple_seq fork_seq = NULL;
7713       gimple_seq join_seq = NULL;
7714 
7715       tree place = build_int_cst (integer_type_node, -1);
7716       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
7717 						fork_kind, ddvar, place);
7718       gimple_set_location (fork, loc);
7719       gimple_set_lhs (fork, ddvar);
7720 
7721       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
7722 						join_kind, ddvar, place);
7723       gimple_set_location (join, loc);
7724       gimple_set_lhs (join, ddvar);
7725 
7726       /* Mark the beginning of this level sequence.  */
7727       if (inner)
7728 	lower_oacc_loop_marker (loc, ddvar, true,
7729 				build_int_cst (integer_type_node, count),
7730 				&fork_seq);
7731       lower_oacc_loop_marker (loc, ddvar, false,
7732 			      build_int_cst (integer_type_node, done),
7733 			      &join_seq);
7734 
7735       lower_oacc_reductions (loc, clauses, place, inner,
7736 			     fork, join, &fork_seq, &join_seq,  ctx);
7737 
7738       /* Append this level to head. */
7739       gimple_seq_add_seq (head, fork_seq);
7740       /* Prepend it to tail.  */
7741       gimple_seq_add_seq (&join_seq, *tail);
7742       *tail = join_seq;
7743 
7744       inner = true;
7745     }
7746 
7747   /* Mark the end of the sequence.  */
7748   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
7749   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
7750 }
7751 
7752 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
7753    catch handler and return it.  This prevents programs from violating the
7754    structured block semantics with throws.  */
7755 
7756 static gimple_seq
maybe_catch_exception(gimple_seq body)7757 maybe_catch_exception (gimple_seq body)
7758 {
7759   gimple *g;
7760   tree decl;
7761 
7762   if (!flag_exceptions)
7763     return body;
7764 
7765   if (lang_hooks.eh_protect_cleanup_actions != NULL)
7766     decl = lang_hooks.eh_protect_cleanup_actions ();
7767   else
7768     decl = builtin_decl_explicit (BUILT_IN_TRAP);
7769 
7770   g = gimple_build_eh_must_not_throw (decl);
7771   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
7772       			GIMPLE_TRY_CATCH);
7773 
7774  return gimple_seq_alloc_with_stmt (g);
7775 }
7776 
7777 
7778 /* Routines to lower OMP directives into OMP-GIMPLE.  */
7779 
7780 /* If ctx is a worksharing context inside of a cancellable parallel
7781    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
7782    and conditional branch to parallel's cancel_label to handle
7783    cancellation in the implicit barrier.  */
7784 
7785 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple * omp_return,gimple_seq * body)7786 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
7787 				   gimple_seq *body)
7788 {
7789   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
7790   if (gimple_omp_return_nowait_p (omp_return))
7791     return;
7792   for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
7793     if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
7794 	&& outer->cancellable)
7795       {
7796 	tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
7797 	tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
7798 	tree lhs = create_tmp_var (c_bool_type);
7799 	gimple_omp_return_set_lhs (omp_return, lhs);
7800 	tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
7801 	gimple *g = gimple_build_cond (NE_EXPR, lhs,
7802 				       fold_convert (c_bool_type,
7803 						     boolean_false_node),
7804 				       outer->cancel_label, fallthru_label);
7805 	gimple_seq_add_stmt (body, g);
7806 	gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
7807       }
7808     else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
7809       return;
7810 }
7811 
7812 /* Find the first task_reduction or reduction clause or return NULL
7813    if there are none.  */
7814 
7815 static inline tree
omp_task_reductions_find_first(tree clauses,enum tree_code code,enum omp_clause_code ccode)7816 omp_task_reductions_find_first (tree clauses, enum tree_code code,
7817 				enum omp_clause_code ccode)
7818 {
7819   while (1)
7820     {
7821       clauses = omp_find_clause (clauses, ccode);
7822       if (clauses == NULL_TREE)
7823 	return NULL_TREE;
7824       if (ccode != OMP_CLAUSE_REDUCTION
7825 	  || code == OMP_TASKLOOP
7826 	  || OMP_CLAUSE_REDUCTION_TASK (clauses))
7827 	return clauses;
7828       clauses = OMP_CLAUSE_CHAIN (clauses);
7829     }
7830 }
7831 
7832 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
7833 				       gimple_seq *, gimple_seq *);
7834 
7835 /* Lower the OpenMP sections directive in the current statement in GSI_P.
7836    CTX is the enclosing OMP context for the current statement.  */
7837 
7838 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)7839 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
7840 {
7841   tree block, control;
7842   gimple_stmt_iterator tgsi;
7843   gomp_sections *stmt;
7844   gimple *t;
7845   gbind *new_stmt, *bind;
7846   gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
7847 
7848   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
7849 
7850   push_gimplify_context ();
7851 
7852   dlist = NULL;
7853   ilist = NULL;
7854 
7855   tree rclauses
7856     = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
7857 				      OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
7858   tree rtmp = NULL_TREE;
7859   if (rclauses)
7860     {
7861       tree type = build_pointer_type (pointer_sized_int_node);
7862       tree temp = create_tmp_var (type);
7863       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
7864       OMP_CLAUSE_DECL (c) = temp;
7865       OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
7866       gimple_omp_sections_set_clauses (stmt, c);
7867       lower_omp_task_reductions (ctx, OMP_SECTIONS,
7868 				 gimple_omp_sections_clauses (stmt),
7869 				 &ilist, &tred_dlist);
7870       rclauses = c;
7871       rtmp = make_ssa_name (type);
7872       gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
7873     }
7874 
7875   tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
7876   lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
7877 
7878   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
7879       			   &ilist, &dlist, ctx, NULL);
7880 
7881   control = create_tmp_var (unsigned_type_node, ".section");
7882   gimple_omp_sections_set_control (stmt, control);
7883 
7884   new_body = gimple_omp_body (stmt);
7885   gimple_omp_set_body (stmt, NULL);
7886   tgsi = gsi_start (new_body);
7887   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
7888     {
7889       omp_context *sctx;
7890       gimple *sec_start;
7891 
7892       sec_start = gsi_stmt (tgsi);
7893       sctx = maybe_lookup_ctx (sec_start);
7894       gcc_assert (sctx);
7895 
7896       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
7897       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
7898 			    GSI_CONTINUE_LINKING);
7899       gimple_omp_set_body (sec_start, NULL);
7900 
7901       if (gsi_one_before_end_p (tgsi))
7902 	{
7903 	  gimple_seq l = NULL;
7904 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
7905 				     &ilist, &l, &clist, ctx);
7906 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
7907 	  gimple_omp_section_set_last (sec_start);
7908 	}
7909 
7910       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
7911 			GSI_CONTINUE_LINKING);
7912     }
7913 
7914   block = make_node (BLOCK);
7915   bind = gimple_build_bind (NULL, new_body, block);
7916 
7917   olist = NULL;
7918   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
7919 			   &clist, ctx);
7920   if (clist)
7921     {
7922       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
7923       gcall *g = gimple_build_call (fndecl, 0);
7924       gimple_seq_add_stmt (&olist, g);
7925       gimple_seq_add_seq (&olist, clist);
7926       fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
7927       g = gimple_build_call (fndecl, 0);
7928       gimple_seq_add_stmt (&olist, g);
7929     }
7930 
7931   block = make_node (BLOCK);
7932   new_stmt = gimple_build_bind (NULL, NULL, block);
7933   gsi_replace (gsi_p, new_stmt, true);
7934 
7935   pop_gimplify_context (new_stmt);
7936   gimple_bind_append_vars (new_stmt, ctx->block_vars);
7937   BLOCK_VARS (block) = gimple_bind_vars (bind);
7938   if (BLOCK_VARS (block))
7939     TREE_USED (block) = 1;
7940 
7941   new_body = NULL;
7942   gimple_seq_add_seq (&new_body, ilist);
7943   gimple_seq_add_stmt (&new_body, stmt);
7944   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
7945   gimple_seq_add_stmt (&new_body, bind);
7946 
7947   t = gimple_build_omp_continue (control, control);
7948   gimple_seq_add_stmt (&new_body, t);
7949 
7950   gimple_seq_add_seq (&new_body, olist);
7951   if (ctx->cancellable)
7952     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
7953   gimple_seq_add_seq (&new_body, dlist);
7954 
7955   new_body = maybe_catch_exception (new_body);
7956 
7957   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
7958 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
7959   t = gimple_build_omp_return (nowait);
7960   gimple_seq_add_stmt (&new_body, t);
7961   gimple_seq_add_seq (&new_body, tred_dlist);
7962   maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
7963 
7964   if (rclauses)
7965     OMP_CLAUSE_DECL (rclauses) = rtmp;
7966 
7967   gimple_bind_set_body (new_stmt, new_body);
7968 }
7969 
7970 
7971 /* A subroutine of lower_omp_single.  Expand the simple form of
7972    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
7973 
7974      	if (GOMP_single_start ())
7975 	  BODY;
7976 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
7977 
7978   FIXME.  It may be better to delay expanding the logic of this until
7979   pass_expand_omp.  The expanded logic may make the job more difficult
7980   to a synchronization analysis pass.  */
7981 
7982 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)7983 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
7984 {
7985   location_t loc = gimple_location (single_stmt);
7986   tree tlabel = create_artificial_label (loc);
7987   tree flabel = create_artificial_label (loc);
7988   gimple *call, *cond;
7989   tree lhs, decl;
7990 
7991   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
7992   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
7993   call = gimple_build_call (decl, 0);
7994   gimple_call_set_lhs (call, lhs);
7995   gimple_seq_add_stmt (pre_p, call);
7996 
7997   cond = gimple_build_cond (EQ_EXPR, lhs,
7998 			    fold_convert_loc (loc, TREE_TYPE (lhs),
7999 					      boolean_true_node),
8000 			    tlabel, flabel);
8001   gimple_seq_add_stmt (pre_p, cond);
8002   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8003   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8004   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8005 }
8006 
8007 
8008 /* A subroutine of lower_omp_single.  Expand the simple form of
8009    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8010 
8011 	#pragma omp single copyprivate (a, b, c)
8012 
8013    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8014 
8015       {
8016 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8017 	  {
8018 	    BODY;
8019 	    copyout.a = a;
8020 	    copyout.b = b;
8021 	    copyout.c = c;
8022 	    GOMP_single_copy_end (&copyout);
8023 	  }
8024 	else
8025 	  {
8026 	    a = copyout_p->a;
8027 	    b = copyout_p->b;
8028 	    c = copyout_p->c;
8029 	  }
8030 	GOMP_barrier ();
8031       }
8032 
8033   FIXME.  It may be better to delay expanding the logic of this until
8034   pass_expand_omp.  The expanded logic may make the job more difficult
8035   to a synchronization analysis pass.  */
8036 
8037 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)8038 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8039 		       omp_context *ctx)
8040 {
8041   tree ptr_type, t, l0, l1, l2, bfn_decl;
8042   gimple_seq copyin_seq;
8043   location_t loc = gimple_location (single_stmt);
8044 
8045   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8046 
8047   ptr_type = build_pointer_type (ctx->record_type);
8048   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8049 
8050   l0 = create_artificial_label (loc);
8051   l1 = create_artificial_label (loc);
8052   l2 = create_artificial_label (loc);
8053 
8054   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8055   t = build_call_expr_loc (loc, bfn_decl, 0);
8056   t = fold_convert_loc (loc, ptr_type, t);
8057   gimplify_assign (ctx->receiver_decl, t, pre_p);
8058 
8059   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8060 	      build_int_cst (ptr_type, 0));
8061   t = build3 (COND_EXPR, void_type_node, t,
8062 	      build_and_jump (&l0), build_and_jump (&l1));
8063   gimplify_and_add (t, pre_p);
8064 
8065   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8066 
8067   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8068 
8069   copyin_seq = NULL;
8070   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8071 			      &copyin_seq, ctx);
8072 
8073   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8074   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8075   t = build_call_expr_loc (loc, bfn_decl, 1, t);
8076   gimplify_and_add (t, pre_p);
8077 
8078   t = build_and_jump (&l2);
8079   gimplify_and_add (t, pre_p);
8080 
8081   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8082 
8083   gimple_seq_add_seq (pre_p, copyin_seq);
8084 
8085   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8086 }
8087 
8088 
8089 /* Expand code for an OpenMP single directive.  */
8090 
8091 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)8092 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8093 {
8094   tree block;
8095   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8096   gbind *bind;
8097   gimple_seq bind_body, bind_body_tail = NULL, dlist;
8098 
8099   push_gimplify_context ();
8100 
8101   block = make_node (BLOCK);
8102   bind = gimple_build_bind (NULL, NULL, block);
8103   gsi_replace (gsi_p, bind, true);
8104   bind_body = NULL;
8105   dlist = NULL;
8106   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8107 			   &bind_body, &dlist, ctx, NULL);
8108   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8109 
8110   gimple_seq_add_stmt (&bind_body, single_stmt);
8111 
8112   if (ctx->record_type)
8113     lower_omp_single_copy (single_stmt, &bind_body, ctx);
8114   else
8115     lower_omp_single_simple (single_stmt, &bind_body);
8116 
8117   gimple_omp_set_body (single_stmt, NULL);
8118 
8119   gimple_seq_add_seq (&bind_body, dlist);
8120 
8121   bind_body = maybe_catch_exception (bind_body);
8122 
8123   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8124 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8125   gimple *g = gimple_build_omp_return (nowait);
8126   gimple_seq_add_stmt (&bind_body_tail, g);
8127   maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8128   if (ctx->record_type)
8129     {
8130       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8131       tree clobber = build_clobber (ctx->record_type);
8132       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8133 						   clobber), GSI_SAME_STMT);
8134     }
8135   gimple_seq_add_seq (&bind_body, bind_body_tail);
8136   gimple_bind_set_body (bind, bind_body);
8137 
8138   pop_gimplify_context (bind);
8139 
8140   gimple_bind_append_vars (bind, ctx->block_vars);
8141   BLOCK_VARS (block) = ctx->block_vars;
8142   if (BLOCK_VARS (block))
8143     TREE_USED (block) = 1;
8144 }
8145 
8146 
8147 /* Expand code for an OpenMP master directive.  */
8148 
8149 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)8150 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8151 {
8152   tree block, lab = NULL, x, bfn_decl;
8153   gimple *stmt = gsi_stmt (*gsi_p);
8154   gbind *bind;
8155   location_t loc = gimple_location (stmt);
8156   gimple_seq tseq;
8157 
8158   push_gimplify_context ();
8159 
8160   block = make_node (BLOCK);
8161   bind = gimple_build_bind (NULL, NULL, block);
8162   gsi_replace (gsi_p, bind, true);
8163   gimple_bind_add_stmt (bind, stmt);
8164 
8165   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8166   x = build_call_expr_loc (loc, bfn_decl, 0);
8167   x = build2 (EQ_EXPR, boolean_type_node, x, integer_zero_node);
8168   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8169   tseq = NULL;
8170   gimplify_and_add (x, &tseq);
8171   gimple_bind_add_seq (bind, tseq);
8172 
8173   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8174   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8175   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8176   gimple_omp_set_body (stmt, NULL);
8177 
8178   gimple_bind_add_stmt (bind, gimple_build_label (lab));
8179 
8180   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8181 
8182   pop_gimplify_context (bind);
8183 
8184   gimple_bind_append_vars (bind, ctx->block_vars);
8185   BLOCK_VARS (block) = ctx->block_vars;
8186 }
8187 
8188 /* Helper function for lower_omp_task_reductions.  For a specific PASS
8189    find out the current clause it should be processed, or return false
8190    if all have been processed already.  */
8191 
8192 static inline bool
omp_task_reduction_iterate(int pass,enum tree_code code,enum omp_clause_code ccode,tree * c,tree * decl,tree * type,tree * next)8193 omp_task_reduction_iterate (int pass, enum tree_code code,
8194 			    enum omp_clause_code ccode, tree *c, tree *decl,
8195 			    tree *type, tree *next)
8196 {
8197   for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8198     {
8199       if (ccode == OMP_CLAUSE_REDUCTION
8200 	  && code != OMP_TASKLOOP
8201 	  && !OMP_CLAUSE_REDUCTION_TASK (*c))
8202 	continue;
8203       *decl = OMP_CLAUSE_DECL (*c);
8204       *type = TREE_TYPE (*decl);
8205       if (TREE_CODE (*decl) == MEM_REF)
8206 	{
8207 	  if (pass != 1)
8208 	    continue;
8209 	}
8210       else
8211 	{
8212 	  if (omp_is_reference (*decl))
8213 	    *type = TREE_TYPE (*type);
8214 	  if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8215 	    continue;
8216 	}
8217       *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8218       return true;
8219     }
8220   *decl = NULL_TREE;
8221   *type = NULL_TREE;
8222   *next = NULL_TREE;
8223   return false;
8224 }
8225 
8226 /* Lower task_reduction and reduction clauses (the latter unless CODE is
8227    OMP_TASKGROUP only with task modifier).  Register mapping of those in
8228    START sequence and reducing them and unregister them in the END sequence.  */
8229 
8230 static void
lower_omp_task_reductions(omp_context * ctx,enum tree_code code,tree clauses,gimple_seq * start,gimple_seq * end)8231 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
8232 			   gimple_seq *start, gimple_seq *end)
8233 {
8234   enum omp_clause_code ccode
8235     = (code == OMP_TASKGROUP
8236        ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
8237   tree cancellable = NULL_TREE;
8238   clauses = omp_task_reductions_find_first (clauses, code, ccode);
8239   if (clauses == NULL_TREE)
8240     return;
8241   if (code == OMP_FOR || code == OMP_SECTIONS)
8242     {
8243       for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8244 	if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8245 	    && outer->cancellable)
8246 	  {
8247 	    cancellable = error_mark_node;
8248 	    break;
8249 	  }
8250 	else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP)
8251 	  break;
8252     }
8253   tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
8254   tree *last = &TYPE_FIELDS (record_type);
8255   unsigned cnt = 0;
8256   if (cancellable)
8257     {
8258       tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8259 			       ptr_type_node);
8260       tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
8261 				integer_type_node);
8262       *last = field;
8263       DECL_CHAIN (field) = ifield;
8264       last = &DECL_CHAIN (ifield);
8265       DECL_CONTEXT (field) = record_type;
8266       if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8267 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8268       DECL_CONTEXT (ifield) = record_type;
8269       if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
8270 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
8271     }
8272   for (int pass = 0; pass < 2; pass++)
8273     {
8274       tree decl, type, next;
8275       for (tree c = clauses;
8276 	   omp_task_reduction_iterate (pass, code, ccode,
8277 				       &c, &decl, &type, &next); c = next)
8278 	{
8279 	  ++cnt;
8280 	  tree new_type = type;
8281 	  if (ctx->outer)
8282 	    new_type = remap_type (type, &ctx->outer->cb);
8283 	  tree field
8284 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
8285 			  DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
8286 			  new_type);
8287 	  if (DECL_P (decl) && type == TREE_TYPE (decl))
8288 	    {
8289 	      SET_DECL_ALIGN (field, DECL_ALIGN (decl));
8290 	      DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
8291 	      TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
8292 	    }
8293 	  else
8294 	    SET_DECL_ALIGN (field, TYPE_ALIGN (type));
8295 	  DECL_CONTEXT (field) = record_type;
8296 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
8297 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
8298 	  *last = field;
8299 	  last = &DECL_CHAIN (field);
8300 	  tree bfield
8301 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
8302 			  boolean_type_node);
8303 	  DECL_CONTEXT (bfield) = record_type;
8304 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
8305 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
8306 	  *last = bfield;
8307 	  last = &DECL_CHAIN (bfield);
8308 	}
8309     }
8310   *last = NULL_TREE;
8311   layout_type (record_type);
8312 
8313   /* Build up an array which registers with the runtime all the reductions
8314      and deregisters them at the end.  Format documented in libgomp/task.c.  */
8315   tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
8316   tree avar = create_tmp_var_raw (atype);
8317   gimple_add_tmp_var (avar);
8318   TREE_ADDRESSABLE (avar) = 1;
8319   tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
8320 		   NULL_TREE, NULL_TREE);
8321   tree t = build_int_cst (pointer_sized_int_node, cnt);
8322   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8323   gimple_seq seq = NULL;
8324   tree sz = fold_convert (pointer_sized_int_node,
8325 			  TYPE_SIZE_UNIT (record_type));
8326   int cachesz = 64;
8327   sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
8328 		    build_int_cst (pointer_sized_int_node, cachesz - 1));
8329   sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
8330 		    build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
8331   ctx->task_reductions.create (1 + cnt);
8332   ctx->task_reduction_map = new hash_map<tree, unsigned>;
8333   ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
8334 				   ? sz : NULL_TREE);
8335   sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
8336   gimple_seq_add_seq (start, seq);
8337   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
8338 	      NULL_TREE, NULL_TREE);
8339   gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
8340   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8341 	      NULL_TREE, NULL_TREE);
8342   t = build_int_cst (pointer_sized_int_node,
8343 		     MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
8344   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8345   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
8346 	      NULL_TREE, NULL_TREE);
8347   t = build_int_cst (pointer_sized_int_node, -1);
8348   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8349   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
8350 	      NULL_TREE, NULL_TREE);
8351   t = build_int_cst (pointer_sized_int_node, 0);
8352   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8353 
8354   /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
8355      and for each task reduction checks a bool right after the private variable
8356      within that thread's chunk; if the bool is clear, it hasn't been
8357      initialized and thus isn't going to be reduced nor destructed, otherwise
8358      reduce and destruct it.  */
8359   tree idx = create_tmp_var (size_type_node);
8360   gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
8361   tree num_thr_sz = create_tmp_var (size_type_node);
8362   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
8363   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
8364   tree lab3 = NULL_TREE, lab7 = NULL_TREE;
8365   gimple *g;
8366   if (code == OMP_FOR || code == OMP_SECTIONS)
8367     {
8368       /* For worksharing constructs, only perform it in the master thread,
8369 	 with the exception of cancelled implicit barriers - then only handle
8370 	 the current thread.  */
8371       tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8372       t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8373       tree thr_num = create_tmp_var (integer_type_node);
8374       g = gimple_build_call (t, 0);
8375       gimple_call_set_lhs (g, thr_num);
8376       gimple_seq_add_stmt (end, g);
8377       if (cancellable)
8378 	{
8379 	  tree c;
8380 	  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8381 	  tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
8382 	  lab3 = create_artificial_label (UNKNOWN_LOCATION);
8383 	  if (code == OMP_FOR)
8384 	    c = gimple_omp_for_clauses (ctx->stmt);
8385 	  else /* if (code == OMP_SECTIONS) */
8386 	    c = gimple_omp_sections_clauses (ctx->stmt);
8387 	  c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
8388 	  cancellable = c;
8389 	  g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
8390 				 lab5, lab6);
8391 	  gimple_seq_add_stmt (end, g);
8392 	  gimple_seq_add_stmt (end, gimple_build_label (lab5));
8393 	  g = gimple_build_assign (idx, NOP_EXPR, thr_num);
8394 	  gimple_seq_add_stmt (end, g);
8395 	  g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
8396 				   build_one_cst (TREE_TYPE (idx)));
8397 	  gimple_seq_add_stmt (end, g);
8398 	  gimple_seq_add_stmt (end, gimple_build_goto (lab3));
8399 	  gimple_seq_add_stmt (end, gimple_build_label (lab6));
8400 	}
8401       g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
8402       gimple_seq_add_stmt (end, g);
8403       gimple_seq_add_stmt (end, gimple_build_label (lab4));
8404     }
8405   if (code != OMP_PARALLEL)
8406     {
8407       t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
8408       tree num_thr = create_tmp_var (integer_type_node);
8409       g = gimple_build_call (t, 0);
8410       gimple_call_set_lhs (g, num_thr);
8411       gimple_seq_add_stmt (end, g);
8412       g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
8413       gimple_seq_add_stmt (end, g);
8414       if (cancellable)
8415 	gimple_seq_add_stmt (end, gimple_build_label (lab3));
8416     }
8417   else
8418     {
8419       tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
8420 				OMP_CLAUSE__REDUCTEMP_);
8421       t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
8422       t = fold_convert (size_type_node, t);
8423       gimplify_assign (num_thr_sz, t, end);
8424     }
8425   t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
8426 	      NULL_TREE, NULL_TREE);
8427   tree data = create_tmp_var (pointer_sized_int_node);
8428   gimple_seq_add_stmt (end, gimple_build_assign (data, t));
8429   if (code == OMP_TASKLOOP)
8430     {
8431       lab7 = create_artificial_label (UNKNOWN_LOCATION);
8432       g = gimple_build_cond (NE_EXPR, data,
8433 			     build_zero_cst (pointer_sized_int_node),
8434 			     lab1, lab7);
8435       gimple_seq_add_stmt (end, g);
8436     }
8437   gimple_seq_add_stmt (end, gimple_build_label (lab1));
8438   tree ptr;
8439   if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
8440     ptr = create_tmp_var (build_pointer_type (record_type));
8441   else
8442     ptr = create_tmp_var (ptr_type_node);
8443   gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
8444 
8445   tree field = TYPE_FIELDS (record_type);
8446   cnt = 0;
8447   if (cancellable)
8448     field = DECL_CHAIN (DECL_CHAIN (field));
8449   for (int pass = 0; pass < 2; pass++)
8450     {
8451       tree decl, type, next;
8452       for (tree c = clauses;
8453 	   omp_task_reduction_iterate (pass, code, ccode,
8454 				       &c, &decl, &type, &next); c = next)
8455 	{
8456 	  tree var = decl, ref;
8457 	  if (TREE_CODE (decl) == MEM_REF)
8458 	    {
8459 	      var = TREE_OPERAND (var, 0);
8460 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
8461 		var = TREE_OPERAND (var, 0);
8462 	      tree v = var;
8463 	      if (TREE_CODE (var) == ADDR_EXPR)
8464 		var = TREE_OPERAND (var, 0);
8465 	      else if (TREE_CODE (var) == INDIRECT_REF)
8466 		var = TREE_OPERAND (var, 0);
8467 	      tree orig_var = var;
8468 	      if (is_variable_sized (var))
8469 		{
8470 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
8471 		  var = DECL_VALUE_EXPR (var);
8472 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
8473 		  var = TREE_OPERAND (var, 0);
8474 		  gcc_assert (DECL_P (var));
8475 		}
8476 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8477 	      if (orig_var != var)
8478 		gcc_assert (TREE_CODE (v) == ADDR_EXPR);
8479 	      else if (TREE_CODE (v) == ADDR_EXPR)
8480 		t = build_fold_addr_expr (t);
8481 	      else if (TREE_CODE (v) == INDIRECT_REF)
8482 		t = build_fold_indirect_ref (t);
8483 	      if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
8484 		{
8485 		  tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
8486 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
8487 		  t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
8488 		}
8489 	      if (!integer_zerop (TREE_OPERAND (decl, 1)))
8490 		t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
8491 				 fold_convert (size_type_node,
8492 					       TREE_OPERAND (decl, 1)));
8493 	    }
8494 	  else
8495 	    {
8496 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
8497 	      if (!omp_is_reference (decl))
8498 		t = build_fold_addr_expr (t);
8499 	    }
8500 	  t = fold_convert (pointer_sized_int_node, t);
8501 	  seq = NULL;
8502 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
8503 	  gimple_seq_add_seq (start, seq);
8504 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8505 		      size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8506 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8507 	  t = unshare_expr (byte_position (field));
8508 	  t = fold_convert (pointer_sized_int_node, t);
8509 	  ctx->task_reduction_map->put (c, cnt);
8510 	  ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
8511 					   ? t : NULL_TREE);
8512 	  seq = NULL;
8513 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
8514 	  gimple_seq_add_seq (start, seq);
8515 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8516 		      size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
8517 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
8518 
8519 	  tree bfield = DECL_CHAIN (field);
8520 	  tree cond;
8521 	  if (code == OMP_PARALLEL || code == OMP_FOR || code == OMP_SECTIONS)
8522 	    /* In parallel or worksharing all threads unconditionally
8523 	       initialize all their task reduction private variables.  */
8524 	    cond = boolean_true_node;
8525 	  else if (TREE_TYPE (ptr) == ptr_type_node)
8526 	    {
8527 	      cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8528 			     unshare_expr (byte_position (bfield)));
8529 	      seq = NULL;
8530 	      cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
8531 	      gimple_seq_add_seq (end, seq);
8532 	      tree pbool = build_pointer_type (TREE_TYPE (bfield));
8533 	      cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
8534 			     build_int_cst (pbool, 0));
8535 	    }
8536 	  else
8537 	    cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
8538 			   build_simple_mem_ref (ptr), bfield, NULL_TREE);
8539 	  tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
8540 	  tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
8541 	  tree condv = create_tmp_var (boolean_type_node);
8542 	  gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
8543 	  g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
8544 				 lab3, lab4);
8545 	  gimple_seq_add_stmt (end, g);
8546 	  gimple_seq_add_stmt (end, gimple_build_label (lab3));
8547 	  if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
8548 	    {
8549 	      /* If this reduction doesn't need destruction and parallel
8550 		 has been cancelled, there is nothing to do for this
8551 		 reduction, so jump around the merge operation.  */
8552 	      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8553 	      g = gimple_build_cond (NE_EXPR, cancellable,
8554 				     build_zero_cst (TREE_TYPE (cancellable)),
8555 				     lab4, lab5);
8556 	      gimple_seq_add_stmt (end, g);
8557 	      gimple_seq_add_stmt (end, gimple_build_label (lab5));
8558 	    }
8559 
8560 	  tree new_var;
8561 	  if (TREE_TYPE (ptr) == ptr_type_node)
8562 	    {
8563 	      new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
8564 				unshare_expr (byte_position (field)));
8565 	      seq = NULL;
8566 	      new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
8567 	      gimple_seq_add_seq (end, seq);
8568 	      tree pbool = build_pointer_type (TREE_TYPE (field));
8569 	      new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
8570 				build_int_cst (pbool, 0));
8571 	    }
8572 	  else
8573 	    new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
8574 			      build_simple_mem_ref (ptr), field, NULL_TREE);
8575 
8576 	  enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
8577 	  if (TREE_CODE (decl) != MEM_REF && omp_is_reference (decl))
8578 	    ref = build_simple_mem_ref (ref);
8579 	  /* reduction(-:var) sums up the partial results, so it acts
8580 	     identically to reduction(+:var).  */
8581 	  if (rcode == MINUS_EXPR)
8582 	    rcode = PLUS_EXPR;
8583 	  if (TREE_CODE (decl) == MEM_REF)
8584 	    {
8585 	      tree type = TREE_TYPE (new_var);
8586 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8587 	      tree i = create_tmp_var (TREE_TYPE (v));
8588 	      tree ptype = build_pointer_type (TREE_TYPE (type));
8589 	      if (DECL_P (v))
8590 		{
8591 		  v = maybe_lookup_decl_in_outer_ctx (v, ctx);
8592 		  tree vv = create_tmp_var (TREE_TYPE (v));
8593 		  gimplify_assign (vv, v, start);
8594 		  v = vv;
8595 		}
8596 	      ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
8597 			    size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
8598 	      new_var = build_fold_addr_expr (new_var);
8599 	      new_var = fold_convert (ptype, new_var);
8600 	      ref = fold_convert (ptype, ref);
8601 	      tree m = create_tmp_var (ptype);
8602 	      gimplify_assign (m, new_var, end);
8603 	      new_var = m;
8604 	      m = create_tmp_var (ptype);
8605 	      gimplify_assign (m, ref, end);
8606 	      ref = m;
8607 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
8608 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
8609 	      tree endl = create_artificial_label (UNKNOWN_LOCATION);
8610 	      gimple_seq_add_stmt (end, gimple_build_label (body));
8611 	      tree priv = build_simple_mem_ref (new_var);
8612 	      tree out = build_simple_mem_ref (ref);
8613 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8614 		{
8615 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8616 		  tree decl_placeholder
8617 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
8618 		  tree lab6 = NULL_TREE;
8619 		  if (cancellable)
8620 		    {
8621 		      /* If this reduction needs destruction and parallel
8622 			 has been cancelled, jump around the merge operation
8623 			 to the destruction.  */
8624 		      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8625 		      lab6 = create_artificial_label (UNKNOWN_LOCATION);
8626 		      tree zero = build_zero_cst (TREE_TYPE (cancellable));
8627 		      g = gimple_build_cond (NE_EXPR, cancellable, zero,
8628 					     lab6, lab5);
8629 		      gimple_seq_add_stmt (end, g);
8630 		      gimple_seq_add_stmt (end, gimple_build_label (lab5));
8631 		    }
8632 		  SET_DECL_VALUE_EXPR (placeholder, out);
8633 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8634 		  SET_DECL_VALUE_EXPR (decl_placeholder, priv);
8635 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
8636 		  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8637 		  gimple_seq_add_seq (end,
8638 				      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8639 		  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8640 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8641 		    {
8642 		      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8643 		      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
8644 		    }
8645 		  if (cancellable)
8646 		    gimple_seq_add_stmt (end, gimple_build_label (lab6));
8647 		  tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
8648 		  if (x)
8649 		    {
8650 		      gimple_seq tseq = NULL;
8651 		      gimplify_stmt (&x, &tseq);
8652 		      gimple_seq_add_seq (end, tseq);
8653 		    }
8654 		}
8655 	      else
8656 		{
8657 		  tree x = build2 (rcode, TREE_TYPE (out), out, priv);
8658 		  out = unshare_expr (out);
8659 		  gimplify_assign (out, x, end);
8660 		}
8661 	      gimple *g
8662 		= gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
8663 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
8664 	      gimple_seq_add_stmt (end, g);
8665 	      g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
8666 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
8667 	      gimple_seq_add_stmt (end, g);
8668 	      g = gimple_build_assign (i, PLUS_EXPR, i,
8669 				       build_int_cst (TREE_TYPE (i), 1));
8670 	      gimple_seq_add_stmt (end, g);
8671 	      g = gimple_build_cond (LE_EXPR, i, v, body, endl);
8672 	      gimple_seq_add_stmt (end, g);
8673 	      gimple_seq_add_stmt (end, gimple_build_label (endl));
8674 	    }
8675 	  else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8676 	    {
8677 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
8678 	      tree oldv = NULL_TREE;
8679 	      tree lab6 = NULL_TREE;
8680 	      if (cancellable)
8681 		{
8682 		  /* If this reduction needs destruction and parallel
8683 		     has been cancelled, jump around the merge operation
8684 		     to the destruction.  */
8685 		  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
8686 		  lab6 = create_artificial_label (UNKNOWN_LOCATION);
8687 		  tree zero = build_zero_cst (TREE_TYPE (cancellable));
8688 		  g = gimple_build_cond (NE_EXPR, cancellable, zero,
8689 					 lab6, lab5);
8690 		  gimple_seq_add_stmt (end, g);
8691 		  gimple_seq_add_stmt (end, gimple_build_label (lab5));
8692 		}
8693 	      if (omp_is_reference (decl)
8694 		  && !useless_type_conversion_p (TREE_TYPE (placeholder),
8695 						 TREE_TYPE (ref)))
8696 		ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8697 	      ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
8698 	      tree refv = create_tmp_var (TREE_TYPE (ref));
8699 	      gimplify_assign (refv, ref, end);
8700 	      ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
8701 	      SET_DECL_VALUE_EXPR (placeholder, ref);
8702 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
8703 	      tree d = maybe_lookup_decl (decl, ctx);
8704 	      gcc_assert (d);
8705 	      if (DECL_HAS_VALUE_EXPR_P (d))
8706 		oldv = DECL_VALUE_EXPR (d);
8707 	      if (omp_is_reference (var))
8708 		{
8709 		  tree v = fold_convert (TREE_TYPE (d),
8710 					 build_fold_addr_expr (new_var));
8711 		  SET_DECL_VALUE_EXPR (d, v);
8712 		}
8713 	      else
8714 		SET_DECL_VALUE_EXPR (d, new_var);
8715 	      DECL_HAS_VALUE_EXPR_P (d) = 1;
8716 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
8717 	      if (oldv)
8718 		SET_DECL_VALUE_EXPR (d, oldv);
8719 	      else
8720 		{
8721 		  SET_DECL_VALUE_EXPR (d, NULL_TREE);
8722 		  DECL_HAS_VALUE_EXPR_P (d) = 0;
8723 		}
8724 	      gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8725 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8726 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
8727 		OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
8728 	      if (cancellable)
8729 		gimple_seq_add_stmt (end, gimple_build_label (lab6));
8730 	      tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
8731 	      if (x)
8732 		{
8733 		  gimple_seq tseq = NULL;
8734 		  gimplify_stmt (&x, &tseq);
8735 		  gimple_seq_add_seq (end, tseq);
8736 		}
8737 	    }
8738 	  else
8739 	    {
8740 	      tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
8741 	      ref = unshare_expr (ref);
8742 	      gimplify_assign (ref, x, end);
8743 	    }
8744 	  gimple_seq_add_stmt (end, gimple_build_label (lab4));
8745 	  ++cnt;
8746 	  field = DECL_CHAIN (bfield);
8747 	}
8748     }
8749 
8750   if (code == OMP_TASKGROUP)
8751     {
8752       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
8753       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8754       gimple_seq_add_stmt (start, g);
8755     }
8756   else
8757     {
8758       tree c;
8759       if (code == OMP_FOR)
8760 	c = gimple_omp_for_clauses (ctx->stmt);
8761       else if (code == OMP_SECTIONS)
8762 	c = gimple_omp_sections_clauses (ctx->stmt);
8763       else
8764 	c = gimple_omp_taskreg_clauses (ctx->stmt);
8765       c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
8766       t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
8767 			build_fold_addr_expr (avar));
8768       gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
8769     }
8770 
8771   gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
8772   gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
8773 						 size_one_node));
8774   g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
8775   gimple_seq_add_stmt (end, g);
8776   gimple_seq_add_stmt (end, gimple_build_label (lab2));
8777   if (code == OMP_FOR || code == OMP_SECTIONS)
8778     {
8779       enum built_in_function bfn
8780 	= BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
8781       t = builtin_decl_explicit (bfn);
8782       tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
8783       tree arg;
8784       if (cancellable)
8785 	{
8786 	  arg = create_tmp_var (c_bool_type);
8787 	  gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
8788 							 cancellable));
8789 	}
8790       else
8791 	arg = build_int_cst (c_bool_type, 0);
8792       g = gimple_build_call (t, 1, arg);
8793     }
8794   else
8795     {
8796       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
8797       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
8798     }
8799   gimple_seq_add_stmt (end, g);
8800   if (lab7)
8801     gimple_seq_add_stmt (end, gimple_build_label (lab7));
8802   t = build_constructor (atype, NULL);
8803   TREE_THIS_VOLATILE (t) = 1;
8804   gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
8805 }
8806 
8807 /* Expand code for an OpenMP taskgroup directive.  */
8808 
8809 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)8810 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8811 {
8812   gimple *stmt = gsi_stmt (*gsi_p);
8813   gcall *x;
8814   gbind *bind;
8815   gimple_seq dseq = NULL;
8816   tree block = make_node (BLOCK);
8817 
8818   bind = gimple_build_bind (NULL, NULL, block);
8819   gsi_replace (gsi_p, bind, true);
8820   gimple_bind_add_stmt (bind, stmt);
8821 
8822   push_gimplify_context ();
8823 
8824   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
8825 			 0);
8826   gimple_bind_add_stmt (bind, x);
8827 
8828   lower_omp_task_reductions (ctx, OMP_TASKGROUP,
8829 			     gimple_omp_taskgroup_clauses (stmt),
8830 			     gimple_bind_body_ptr (bind), &dseq);
8831 
8832   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8833   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8834   gimple_omp_set_body (stmt, NULL);
8835 
8836   gimple_bind_add_seq (bind, dseq);
8837 
8838   pop_gimplify_context (bind);
8839 
8840   gimple_bind_append_vars (bind, ctx->block_vars);
8841   BLOCK_VARS (block) = ctx->block_vars;
8842 }
8843 
8844 
8845 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
8846 
8847 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)8848 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
8849 			   omp_context *ctx)
8850 {
8851   struct omp_for_data fd;
8852   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
8853     return;
8854 
8855   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
8856   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
8857   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
8858   if (!fd.ordered)
8859     return;
8860 
8861   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8862   tree c = gimple_omp_ordered_clauses (ord_stmt);
8863   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
8864       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8865     {
8866       /* Merge depend clauses from multiple adjacent
8867 	 #pragma omp ordered depend(sink:...) constructs
8868 	 into one #pragma omp ordered depend(sink:...), so that
8869 	 we can optimize them together.  */
8870       gimple_stmt_iterator gsi = *gsi_p;
8871       gsi_next (&gsi);
8872       while (!gsi_end_p (gsi))
8873 	{
8874 	  gimple *stmt = gsi_stmt (gsi);
8875 	  if (is_gimple_debug (stmt)
8876 	      || gimple_code (stmt) == GIMPLE_NOP)
8877 	    {
8878 	      gsi_next (&gsi);
8879 	      continue;
8880 	    }
8881 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
8882 	    break;
8883 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
8884 	  c = gimple_omp_ordered_clauses (ord_stmt2);
8885 	  if (c == NULL_TREE
8886 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
8887 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8888 	    break;
8889 	  while (*list_p)
8890 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
8891 	  *list_p = c;
8892 	  gsi_remove (&gsi, true);
8893 	}
8894     }
8895 
8896   /* Canonicalize sink dependence clauses into one folded clause if
8897      possible.
8898 
8899      The basic algorithm is to create a sink vector whose first
8900      element is the GCD of all the first elements, and whose remaining
8901      elements are the minimum of the subsequent columns.
8902 
8903      We ignore dependence vectors whose first element is zero because
8904      such dependencies are known to be executed by the same thread.
8905 
8906      We take into account the direction of the loop, so a minimum
8907      becomes a maximum if the loop is iterating forwards.  We also
8908      ignore sink clauses where the loop direction is unknown, or where
8909      the offsets are clearly invalid because they are not a multiple
8910      of the loop increment.
8911 
8912      For example:
8913 
8914 	#pragma omp for ordered(2)
8915 	for (i=0; i < N; ++i)
8916 	  for (j=0; j < M; ++j)
8917 	    {
8918 	      #pragma omp ordered \
8919 		depend(sink:i-8,j-2) \
8920 		depend(sink:i,j-1) \	// Completely ignored because i+0.
8921 		depend(sink:i-4,j-3) \
8922 		depend(sink:i-6,j-4)
8923 	      #pragma omp ordered depend(source)
8924 	    }
8925 
8926      Folded clause is:
8927 
8928 	depend(sink:-gcd(8,4,6),-min(2,3,4))
8929 	  -or-
8930 	depend(sink:-2,-2)
8931   */
8932 
8933   /* FIXME: Computing GCD's where the first element is zero is
8934      non-trivial in the presence of collapsed loops.  Do this later.  */
8935   if (fd.collapse > 1)
8936     return;
8937 
8938   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
8939 
8940   /* wide_int is not a POD so it must be default-constructed.  */
8941   for (unsigned i = 0; i != 2 * len - 1; ++i)
8942     new (static_cast<void*>(folded_deps + i)) wide_int ();
8943 
8944   tree folded_dep = NULL_TREE;
8945   /* TRUE if the first dimension's offset is negative.  */
8946   bool neg_offset_p = false;
8947 
8948   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
8949   unsigned int i;
8950   while ((c = *list_p) != NULL)
8951     {
8952       bool remove = false;
8953 
8954       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
8955       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
8956 	goto next_ordered_clause;
8957 
8958       tree vec;
8959       for (vec = OMP_CLAUSE_DECL (c), i = 0;
8960 	   vec && TREE_CODE (vec) == TREE_LIST;
8961 	   vec = TREE_CHAIN (vec), ++i)
8962 	{
8963 	  gcc_assert (i < len);
8964 
8965 	  /* omp_extract_for_data has canonicalized the condition.  */
8966 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
8967 		      || fd.loops[i].cond_code == GT_EXPR);
8968 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
8969 	  bool maybe_lexically_later = true;
8970 
8971 	  /* While the committee makes up its mind, bail if we have any
8972 	     non-constant steps.  */
8973 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
8974 	    goto lower_omp_ordered_ret;
8975 
8976 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
8977 	  if (POINTER_TYPE_P (itype))
8978 	    itype = sizetype;
8979 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
8980 					    TYPE_PRECISION (itype),
8981 					    TYPE_SIGN (itype));
8982 
8983 	  /* Ignore invalid offsets that are not multiples of the step.  */
8984 	  if (!wi::multiple_of_p (wi::abs (offset),
8985 				  wi::abs (wi::to_wide (fd.loops[i].step)),
8986 				  UNSIGNED))
8987 	    {
8988 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
8989 			  "ignoring sink clause with offset that is not "
8990 			  "a multiple of the loop step");
8991 	      remove = true;
8992 	      goto next_ordered_clause;
8993 	    }
8994 
8995 	  /* Calculate the first dimension.  The first dimension of
8996 	     the folded dependency vector is the GCD of the first
8997 	     elements, while ignoring any first elements whose offset
8998 	     is 0.  */
8999 	  if (i == 0)
9000 	    {
9001 	      /* Ignore dependence vectors whose first dimension is 0.  */
9002 	      if (offset == 0)
9003 		{
9004 		  remove = true;
9005 		  goto next_ordered_clause;
9006 		}
9007 	      else
9008 		{
9009 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9010 		    {
9011 		      error_at (OMP_CLAUSE_LOCATION (c),
9012 				"first offset must be in opposite direction "
9013 				"of loop iterations");
9014 		      goto lower_omp_ordered_ret;
9015 		    }
9016 		  if (forward)
9017 		    offset = -offset;
9018 		  neg_offset_p = forward;
9019 		  /* Initialize the first time around.  */
9020 		  if (folded_dep == NULL_TREE)
9021 		    {
9022 		      folded_dep = c;
9023 		      folded_deps[0] = offset;
9024 		    }
9025 		  else
9026 		    folded_deps[0] = wi::gcd (folded_deps[0],
9027 					      offset, UNSIGNED);
9028 		}
9029 	    }
9030 	  /* Calculate minimum for the remaining dimensions.  */
9031 	  else
9032 	    {
9033 	      folded_deps[len + i - 1] = offset;
9034 	      if (folded_dep == c)
9035 		folded_deps[i] = offset;
9036 	      else if (maybe_lexically_later
9037 		       && !wi::eq_p (folded_deps[i], offset))
9038 		{
9039 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
9040 		    {
9041 		      unsigned int j;
9042 		      folded_dep = c;
9043 		      for (j = 1; j <= i; j++)
9044 			folded_deps[j] = folded_deps[len + j - 1];
9045 		    }
9046 		  else
9047 		    maybe_lexically_later = false;
9048 		}
9049 	    }
9050 	}
9051       gcc_assert (i == len);
9052 
9053       remove = true;
9054 
9055     next_ordered_clause:
9056       if (remove)
9057 	*list_p = OMP_CLAUSE_CHAIN (c);
9058       else
9059 	list_p = &OMP_CLAUSE_CHAIN (c);
9060     }
9061 
9062   if (folded_dep)
9063     {
9064       if (neg_offset_p)
9065 	folded_deps[0] = -folded_deps[0];
9066 
9067       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9068       if (POINTER_TYPE_P (itype))
9069 	itype = sizetype;
9070 
9071       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9072 	= wide_int_to_tree (itype, folded_deps[0]);
9073       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9074       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9075     }
9076 
9077  lower_omp_ordered_ret:
9078 
9079   /* Ordered without clauses is #pragma omp threads, while we want
9080      a nop instead if we remove all clauses.  */
9081   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9082     gsi_replace (gsi_p, gimple_build_nop (), true);
9083 }
9084 
9085 
9086 /* Expand code for an OpenMP ordered directive.  */
9087 
9088 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)9089 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9090 {
9091   tree block;
9092   gimple *stmt = gsi_stmt (*gsi_p), *g;
9093   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9094   gcall *x;
9095   gbind *bind;
9096   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9097 			       OMP_CLAUSE_SIMD);
9098   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9099      loop.  */
9100   bool maybe_simt
9101     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9102   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9103 				  OMP_CLAUSE_THREADS);
9104 
9105   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9106 		       OMP_CLAUSE_DEPEND))
9107     {
9108       /* FIXME: This is needs to be moved to the expansion to verify various
9109 	 conditions only testable on cfg with dominators computed, and also
9110 	 all the depend clauses to be merged still might need to be available
9111 	 for the runtime checks.  */
9112       if (0)
9113 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9114       return;
9115     }
9116 
9117   push_gimplify_context ();
9118 
9119   block = make_node (BLOCK);
9120   bind = gimple_build_bind (NULL, NULL, block);
9121   gsi_replace (gsi_p, bind, true);
9122   gimple_bind_add_stmt (bind, stmt);
9123 
9124   if (simd)
9125     {
9126       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9127 				      build_int_cst (NULL_TREE, threads));
9128       cfun->has_simduid_loops = true;
9129     }
9130   else
9131     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9132 			   0);
9133   gimple_bind_add_stmt (bind, x);
9134 
9135   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9136   if (maybe_simt)
9137     {
9138       counter = create_tmp_var (integer_type_node);
9139       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9140       gimple_call_set_lhs (g, counter);
9141       gimple_bind_add_stmt (bind, g);
9142 
9143       body = create_artificial_label (UNKNOWN_LOCATION);
9144       test = create_artificial_label (UNKNOWN_LOCATION);
9145       gimple_bind_add_stmt (bind, gimple_build_label (body));
9146 
9147       tree simt_pred = create_tmp_var (integer_type_node);
9148       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9149       gimple_call_set_lhs (g, simt_pred);
9150       gimple_bind_add_stmt (bind, g);
9151 
9152       tree t = create_artificial_label (UNKNOWN_LOCATION);
9153       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9154       gimple_bind_add_stmt (bind, g);
9155 
9156       gimple_bind_add_stmt (bind, gimple_build_label (t));
9157     }
9158   lower_omp (gimple_omp_body_ptr (stmt), ctx);
9159   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9160   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9161   gimple_omp_set_body (stmt, NULL);
9162 
9163   if (maybe_simt)
9164     {
9165       gimple_bind_add_stmt (bind, gimple_build_label (test));
9166       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9167       gimple_bind_add_stmt (bind, g);
9168 
9169       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9170       tree nonneg = create_tmp_var (integer_type_node);
9171       gimple_seq tseq = NULL;
9172       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9173       gimple_bind_add_seq (bind, tseq);
9174 
9175       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9176       gimple_call_set_lhs (g, nonneg);
9177       gimple_bind_add_stmt (bind, g);
9178 
9179       tree end = create_artificial_label (UNKNOWN_LOCATION);
9180       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9181       gimple_bind_add_stmt (bind, g);
9182 
9183       gimple_bind_add_stmt (bind, gimple_build_label (end));
9184     }
9185   if (simd)
9186     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9187 				    build_int_cst (NULL_TREE, threads));
9188   else
9189     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9190 			   0);
9191   gimple_bind_add_stmt (bind, x);
9192 
9193   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9194 
9195   pop_gimplify_context (bind);
9196 
9197   gimple_bind_append_vars (bind, ctx->block_vars);
9198   BLOCK_VARS (block) = gimple_bind_vars (bind);
9199 }
9200 
9201 
9202 /* Expand code for an OpenMP scan directive and the structured block
9203    before the scan directive.  */
9204 
9205 static void
lower_omp_scan(gimple_stmt_iterator * gsi_p,omp_context * ctx)9206 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9207 {
9208   gimple *stmt = gsi_stmt (*gsi_p);
9209   bool has_clauses
9210     = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9211   tree lane = NULL_TREE;
9212   gimple_seq before = NULL;
9213   omp_context *octx = ctx->outer;
9214   gcc_assert (octx);
9215   if (octx->scan_exclusive && !has_clauses)
9216     {
9217       gimple_stmt_iterator gsi2 = *gsi_p;
9218       gsi_next (&gsi2);
9219       gimple *stmt2 = gsi_stmt (gsi2);
9220       /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
9221 	 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
9222 	 the one with exclusive clause(s), comes first.  */
9223       if (stmt2
9224 	  && gimple_code (stmt2) == GIMPLE_OMP_SCAN
9225 	  && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
9226 	{
9227 	  gsi_remove (gsi_p, false);
9228 	  gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
9229 	  ctx = maybe_lookup_ctx (stmt2);
9230 	  gcc_assert (ctx);
9231 	  lower_omp_scan (gsi_p, ctx);
9232 	  return;
9233 	}
9234     }
9235 
9236   bool input_phase = has_clauses ^ octx->scan_inclusive;
9237   bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9238 		  && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
9239   bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
9240 		 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
9241 		 && !gimple_omp_for_combined_p (octx->stmt));
9242   bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
9243   if (is_for_simd && octx->for_simd_scan_phase)
9244     is_simd = false;
9245   if (is_simd)
9246     if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
9247 				  OMP_CLAUSE__SIMDUID_))
9248       {
9249 	tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
9250 	lane = create_tmp_var (unsigned_type_node);
9251 	tree t = build_int_cst (integer_type_node,
9252 				input_phase ? 1
9253 				: octx->scan_inclusive ? 2 : 3);
9254 	gimple *g
9255 	  = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
9256 	gimple_call_set_lhs (g, lane);
9257 	gimple_seq_add_stmt (&before, g);
9258       }
9259 
9260   if (is_simd || is_for)
9261     {
9262       for (tree c = gimple_omp_for_clauses (octx->stmt);
9263 	   c; c = OMP_CLAUSE_CHAIN (c))
9264 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9265 	    && OMP_CLAUSE_REDUCTION_INSCAN (c))
9266 	  {
9267 	    location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9268 	    tree var = OMP_CLAUSE_DECL (c);
9269 	    tree new_var = lookup_decl (var, octx);
9270 	    tree val = new_var;
9271 	    tree var2 = NULL_TREE;
9272 	    tree var3 = NULL_TREE;
9273 	    tree var4 = NULL_TREE;
9274 	    tree lane0 = NULL_TREE;
9275 	    tree new_vard = new_var;
9276 	    if (omp_is_reference (var))
9277 	      {
9278 		new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9279 		val = new_var;
9280 	      }
9281 	    if (DECL_HAS_VALUE_EXPR_P (new_vard))
9282 	      {
9283 		val = DECL_VALUE_EXPR (new_vard);
9284 		if (new_vard != new_var)
9285 		  {
9286 		    gcc_assert (TREE_CODE (val) == ADDR_EXPR);
9287 		    val = TREE_OPERAND (val, 0);
9288 		  }
9289 		if (TREE_CODE (val) == ARRAY_REF
9290 		    && VAR_P (TREE_OPERAND (val, 0)))
9291 		  {
9292 		    tree v = TREE_OPERAND (val, 0);
9293 		    if (lookup_attribute ("omp simd array",
9294 					  DECL_ATTRIBUTES (v)))
9295 		      {
9296 			val = unshare_expr (val);
9297 			lane0 = TREE_OPERAND (val, 1);
9298 			TREE_OPERAND (val, 1) = lane;
9299 			var2 = lookup_decl (v, octx);
9300 			if (octx->scan_exclusive)
9301 			  var4 = lookup_decl (var2, octx);
9302 			if (input_phase
9303 			    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9304 			  var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
9305 			if (!input_phase)
9306 			  {
9307 			    var2 = build4 (ARRAY_REF, TREE_TYPE (val),
9308 					   var2, lane, NULL_TREE, NULL_TREE);
9309 			    TREE_THIS_NOTRAP (var2) = 1;
9310 			    if (octx->scan_exclusive)
9311 			      {
9312 				var4 = build4 (ARRAY_REF, TREE_TYPE (val),
9313 					       var4, lane, NULL_TREE,
9314 					       NULL_TREE);
9315 				TREE_THIS_NOTRAP (var4) = 1;
9316 			      }
9317 			  }
9318 			else
9319 			  var2 = val;
9320 		      }
9321 		  }
9322 		gcc_assert (var2);
9323 	      }
9324 	    else
9325 	      {
9326 		var2 = build_outer_var_ref (var, octx);
9327 		if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9328 		  {
9329 		    var3 = maybe_lookup_decl (new_vard, octx);
9330 		    if (var3 == new_vard || var3 == NULL_TREE)
9331 		      var3 = NULL_TREE;
9332 		    else if (is_simd && octx->scan_exclusive && !input_phase)
9333 		      {
9334 			var4 = maybe_lookup_decl (var3, octx);
9335 			if (var4 == var3 || var4 == NULL_TREE)
9336 			  {
9337 			    if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
9338 			      {
9339 				var4 = var3;
9340 				var3 = NULL_TREE;
9341 			      }
9342 			    else
9343 			      var4 = NULL_TREE;
9344 			  }
9345 		      }
9346 		  }
9347 		if (is_simd
9348 		    && octx->scan_exclusive
9349 		    && !input_phase
9350 		    && var4 == NULL_TREE)
9351 		  var4 = create_tmp_var (TREE_TYPE (val));
9352 	      }
9353 	    if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9354 	      {
9355 		tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9356 		if (input_phase)
9357 		  {
9358 		    if (var3)
9359 		      {
9360 			/* If we've added a separate identity element
9361 			   variable, copy it over into val.  */
9362 			tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
9363 									var3);
9364 			gimplify_and_add (x, &before);
9365 		      }
9366 		    else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
9367 		      {
9368 			/* Otherwise, assign to it the identity element.  */
9369 			gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
9370 			if (is_for)
9371 			  tseq = copy_gimple_seq_and_replace_locals (tseq);
9372 			tree ref = build_outer_var_ref (var, octx);
9373 			tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9374 				  ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9375 			if (x)
9376 			  {
9377 			    if (new_vard != new_var)
9378 			      val = build_fold_addr_expr_loc (clause_loc, val);
9379 			    SET_DECL_VALUE_EXPR (new_vard, val);
9380 			  }
9381 			SET_DECL_VALUE_EXPR (placeholder, ref);
9382 			DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9383 			lower_omp (&tseq, octx);
9384 			if (x)
9385 			  SET_DECL_VALUE_EXPR (new_vard, x);
9386 			SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9387 			DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9388 			gimple_seq_add_seq (&before, tseq);
9389 			if (is_simd)
9390 			  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9391 		      }
9392 		  }
9393 		else if (is_simd)
9394 		  {
9395 		    tree x;
9396 		    if (octx->scan_exclusive)
9397 		      {
9398 			tree v4 = unshare_expr (var4);
9399 			tree v2 = unshare_expr (var2);
9400 			x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
9401 			gimplify_and_add (x, &before);
9402 		      }
9403 		    gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
9404 		    x = (DECL_HAS_VALUE_EXPR_P (new_vard)
9405 			 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
9406 		    tree vexpr = val;
9407 		    if (x && new_vard != new_var)
9408 		      vexpr = build_fold_addr_expr_loc (clause_loc, val);
9409 		    if (x)
9410 		      SET_DECL_VALUE_EXPR (new_vard, vexpr);
9411 		    SET_DECL_VALUE_EXPR (placeholder, var2);
9412 		    DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9413 		    lower_omp (&tseq, octx);
9414 		    gimple_seq_add_seq (&before, tseq);
9415 		    OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9416 		    if (x)
9417 		      SET_DECL_VALUE_EXPR (new_vard, x);
9418 		    SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
9419 		    DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
9420 		    if (octx->scan_inclusive)
9421 		      {
9422 			x = lang_hooks.decls.omp_clause_assign_op (c, val,
9423 								   var2);
9424 			gimplify_and_add (x, &before);
9425 		      }
9426 		    else if (lane0 == NULL_TREE)
9427 		      {
9428 			x = lang_hooks.decls.omp_clause_assign_op (c, val,
9429 								   var4);
9430 			gimplify_and_add (x, &before);
9431 		      }
9432 		  }
9433 	      }
9434 	    else
9435 	      {
9436 		if (input_phase)
9437 		  {
9438 		    /* input phase.  Set val to initializer before
9439 		       the body.  */
9440 		    tree x = omp_reduction_init (c, TREE_TYPE (new_var));
9441 		    gimplify_assign (val, x, &before);
9442 		  }
9443 		else if (is_simd)
9444 		  {
9445 		    /* scan phase.  */
9446 		    enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
9447 		    if (code == MINUS_EXPR)
9448 		      code = PLUS_EXPR;
9449 
9450 		    tree x = build2 (code, TREE_TYPE (var2),
9451 				     unshare_expr (var2), unshare_expr (val));
9452 		    if (octx->scan_inclusive)
9453 		      {
9454 			gimplify_assign (unshare_expr (var2), x, &before);
9455 			gimplify_assign (val, var2, &before);
9456 		      }
9457 		    else
9458 		      {
9459 			gimplify_assign (unshare_expr (var4),
9460 					 unshare_expr (var2), &before);
9461 			gimplify_assign (var2, x, &before);
9462 			if (lane0 == NULL_TREE)
9463 			  gimplify_assign (val, var4, &before);
9464 		      }
9465 		  }
9466 	      }
9467 	    if (octx->scan_exclusive && !input_phase && lane0)
9468 	      {
9469 		tree vexpr = unshare_expr (var4);
9470 		TREE_OPERAND (vexpr, 1) = lane0;
9471 		if (new_vard != new_var)
9472 		  vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
9473 		SET_DECL_VALUE_EXPR (new_vard, vexpr);
9474 	      }
9475 	  }
9476     }
9477   if (is_simd && !is_for_simd)
9478     {
9479       gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
9480       gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
9481       gsi_replace (gsi_p, gimple_build_nop (), true);
9482       return;
9483     }
9484   lower_omp (gimple_omp_body_ptr (stmt), octx);
9485   if (before)
9486     {
9487       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
9488       gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
9489     }
9490 }
9491 
9492 
9493 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
9494    substitution of a couple of function calls.  But in the NAMED case,
9495    requires that languages coordinate a symbol name.  It is therefore
9496    best put here in common code.  */
9497 
9498 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
9499 
9500 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)9501 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9502 {
9503   tree block;
9504   tree name, lock, unlock;
9505   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
9506   gbind *bind;
9507   location_t loc = gimple_location (stmt);
9508   gimple_seq tbody;
9509 
9510   name = gimple_omp_critical_name (stmt);
9511   if (name)
9512     {
9513       tree decl;
9514 
9515       if (!critical_name_mutexes)
9516 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
9517 
9518       tree *n = critical_name_mutexes->get (name);
9519       if (n == NULL)
9520 	{
9521 	  char *new_str;
9522 
9523 	  decl = create_tmp_var_raw (ptr_type_node);
9524 
9525 	  new_str = ACONCAT ((".gomp_critical_user_",
9526 			      IDENTIFIER_POINTER (name), NULL));
9527 	  DECL_NAME (decl) = get_identifier (new_str);
9528 	  TREE_PUBLIC (decl) = 1;
9529 	  TREE_STATIC (decl) = 1;
9530 	  DECL_COMMON (decl) = 1;
9531 	  DECL_ARTIFICIAL (decl) = 1;
9532 	  DECL_IGNORED_P (decl) = 1;
9533 
9534 	  varpool_node::finalize_decl (decl);
9535 
9536 	  critical_name_mutexes->put (name, decl);
9537 	}
9538       else
9539 	decl = *n;
9540 
9541       /* If '#pragma omp critical' is inside offloaded region or
9542 	 inside function marked as offloadable, the symbol must be
9543 	 marked as offloadable too.  */
9544       omp_context *octx;
9545       if (cgraph_node::get (current_function_decl)->offloadable)
9546 	varpool_node::get_create (decl)->offloadable = 1;
9547       else
9548 	for (octx = ctx->outer; octx; octx = octx->outer)
9549 	  if (is_gimple_omp_offloaded (octx->stmt))
9550 	    {
9551 	      varpool_node::get_create (decl)->offloadable = 1;
9552 	      break;
9553 	    }
9554 
9555       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
9556       lock = build_call_expr_loc (loc, lock, 1,
9557 				  build_fold_addr_expr_loc (loc, decl));
9558 
9559       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
9560       unlock = build_call_expr_loc (loc, unlock, 1,
9561 				build_fold_addr_expr_loc (loc, decl));
9562     }
9563   else
9564     {
9565       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
9566       lock = build_call_expr_loc (loc, lock, 0);
9567 
9568       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
9569       unlock = build_call_expr_loc (loc, unlock, 0);
9570     }
9571 
9572   push_gimplify_context ();
9573 
9574   block = make_node (BLOCK);
9575   bind = gimple_build_bind (NULL, NULL, block);
9576   gsi_replace (gsi_p, bind, true);
9577   gimple_bind_add_stmt (bind, stmt);
9578 
9579   tbody = gimple_bind_body (bind);
9580   gimplify_and_add (lock, &tbody);
9581   gimple_bind_set_body (bind, tbody);
9582 
9583   lower_omp (gimple_omp_body_ptr (stmt), ctx);
9584   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9585   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9586   gimple_omp_set_body (stmt, NULL);
9587 
9588   tbody = gimple_bind_body (bind);
9589   gimplify_and_add (unlock, &tbody);
9590   gimple_bind_set_body (bind, tbody);
9591 
9592   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9593 
9594   pop_gimplify_context (bind);
9595   gimple_bind_append_vars (bind, ctx->block_vars);
9596   BLOCK_VARS (block) = gimple_bind_vars (bind);
9597 }
9598 
9599 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
9600    for a lastprivate clause.  Given a loop control predicate of (V
9601    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
9602    is appended to *DLIST, iterator initialization is appended to
9603    *BODY_P.  *CLIST is for lastprivate(conditional:) code that needs
9604    to be emitted in a critical section.  */
9605 
9606 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,gimple_seq * clist,struct omp_context * ctx)9607 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
9608 			   gimple_seq *dlist, gimple_seq *clist,
9609 			   struct omp_context *ctx)
9610 {
9611   tree clauses, cond, vinit;
9612   enum tree_code cond_code;
9613   gimple_seq stmts;
9614 
9615   cond_code = fd->loop.cond_code;
9616   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
9617 
9618   /* When possible, use a strict equality expression.  This can let VRP
9619      type optimizations deduce the value and remove a copy.  */
9620   if (tree_fits_shwi_p (fd->loop.step))
9621     {
9622       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
9623       if (step == 1 || step == -1)
9624 	cond_code = EQ_EXPR;
9625     }
9626 
9627   if (gimple_omp_for_kind (fd->for_stmt) == GF_OMP_FOR_KIND_GRID_LOOP
9628       || gimple_omp_for_grid_phony (fd->for_stmt))
9629     cond = omp_grid_lastprivate_predicate (fd);
9630   else
9631     {
9632       tree n2 = fd->loop.n2;
9633       if (fd->collapse > 1
9634 	  && TREE_CODE (n2) != INTEGER_CST
9635 	  && gimple_omp_for_combined_into_p (fd->for_stmt))
9636 	{
9637 	  struct omp_context *taskreg_ctx = NULL;
9638 	  if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
9639 	    {
9640 	      gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
9641 	      if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
9642 		  || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
9643 		{
9644 		  if (gimple_omp_for_combined_into_p (gfor))
9645 		    {
9646 		      gcc_assert (ctx->outer->outer
9647 				  && is_parallel_ctx (ctx->outer->outer));
9648 		      taskreg_ctx = ctx->outer->outer;
9649 		    }
9650 		  else
9651 		    {
9652 		      struct omp_for_data outer_fd;
9653 		      omp_extract_for_data (gfor, &outer_fd, NULL);
9654 		      n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
9655 		    }
9656 		}
9657 	      else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
9658 		taskreg_ctx = ctx->outer->outer;
9659 	    }
9660 	  else if (is_taskreg_ctx (ctx->outer))
9661 	    taskreg_ctx = ctx->outer;
9662 	  if (taskreg_ctx)
9663 	    {
9664 	      int i;
9665 	      tree taskreg_clauses
9666 		= gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
9667 	      tree innerc = omp_find_clause (taskreg_clauses,
9668 					     OMP_CLAUSE__LOOPTEMP_);
9669 	      gcc_assert (innerc);
9670 	      for (i = 0; i < fd->collapse; i++)
9671 		{
9672 		  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9673 					    OMP_CLAUSE__LOOPTEMP_);
9674 		  gcc_assert (innerc);
9675 		}
9676 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
9677 					OMP_CLAUSE__LOOPTEMP_);
9678 	      if (innerc)
9679 		n2 = fold_convert (TREE_TYPE (n2),
9680 				   lookup_decl (OMP_CLAUSE_DECL (innerc),
9681 						taskreg_ctx));
9682 	    }
9683 	}
9684       cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
9685     }
9686 
9687   clauses = gimple_omp_for_clauses (fd->for_stmt);
9688   stmts = NULL;
9689   lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
9690   if (!gimple_seq_empty_p (stmts))
9691     {
9692       gimple_seq_add_seq (&stmts, *dlist);
9693       *dlist = stmts;
9694 
9695       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
9696       vinit = fd->loop.n1;
9697       if (cond_code == EQ_EXPR
9698 	  && tree_fits_shwi_p (fd->loop.n2)
9699 	  && ! integer_zerop (fd->loop.n2))
9700 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
9701       else
9702 	vinit = unshare_expr (vinit);
9703 
9704       /* Initialize the iterator variable, so that threads that don't execute
9705 	 any iterations don't execute the lastprivate clauses by accident.  */
9706       gimplify_assign (fd->loop.v, vinit, body_p);
9707     }
9708 }
9709 
9710 /* Callback for walk_gimple_seq.  Find #pragma omp scan statement.  */
9711 
9712 static tree
omp_find_scan(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9713 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
9714 	       struct walk_stmt_info *wi)
9715 {
9716   gimple *stmt = gsi_stmt (*gsi_p);
9717 
9718   *handled_ops_p = true;
9719   switch (gimple_code (stmt))
9720     {
9721     WALK_SUBSTMTS;
9722 
9723     case GIMPLE_OMP_FOR:
9724       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
9725 	  && gimple_omp_for_combined_into_p (stmt))
9726 	*handled_ops_p = false;
9727       break;
9728 
9729     case GIMPLE_OMP_SCAN:
9730       *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
9731       return integer_zero_node;
9732     default:
9733       break;
9734     }
9735   return NULL;
9736 }
9737 
9738 /* Helper function for lower_omp_for, add transformations for a worksharing
9739    loop with scan directives inside of it.
9740    For worksharing loop not combined with simd, transform:
9741    #pragma omp for reduction(inscan,+:r) private(i)
9742    for (i = 0; i < n; i = i + 1)
9743      {
9744        {
9745 	 update (r);
9746        }
9747        #pragma omp scan inclusive(r)
9748        {
9749 	 use (r);
9750        }
9751      }
9752 
9753    into two worksharing loops + code to merge results:
9754 
9755    num_threads = omp_get_num_threads ();
9756    thread_num = omp_get_thread_num ();
9757    if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
9758    <D.2099>:
9759    var2 = r;
9760    goto <D.2101>;
9761    <D.2100>:
9762    // For UDRs this is UDR init, or if ctors are needed, copy from
9763    // var3 that has been constructed to contain the neutral element.
9764    var2 = 0;
9765    <D.2101>:
9766    ivar = 0;
9767    // The _scantemp_ clauses will arrange for rpriva to be initialized to
9768    // a shared array with num_threads elements and rprivb to a local array
9769    // number of elements equal to the number of (contiguous) iterations the
9770    // current thread will perform.  controlb and controlp variables are
9771    // temporaries to handle deallocation of rprivb at the end of second
9772    // GOMP_FOR.
9773    #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
9774      _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
9775    for (i = 0; i < n; i = i + 1)
9776      {
9777        {
9778 	 // For UDRs this is UDR init or copy from var3.
9779 	 r = 0;
9780 	 // This is the input phase from user code.
9781 	 update (r);
9782        }
9783        {
9784 	 // For UDRs this is UDR merge.
9785 	 var2 = var2 + r;
9786 	 // Rather than handing it over to the user, save to local thread's
9787 	 // array.
9788 	 rprivb[ivar] = var2;
9789 	 // For exclusive scan, the above two statements are swapped.
9790 	 ivar = ivar + 1;
9791        }
9792      }
9793    // And remember the final value from this thread's into the shared
9794    // rpriva array.
9795    rpriva[(sizetype) thread_num] = var2;
9796    // If more than one thread, compute using Work-Efficient prefix sum
9797    // the inclusive parallel scan of the rpriva array.
9798    if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
9799    <D.2102>:
9800    GOMP_barrier ();
9801    down = 0;
9802    k = 1;
9803    num_threadsu = (unsigned int) num_threads;
9804    thread_numup1 = (unsigned int) thread_num + 1;
9805    <D.2108>:
9806    twok = k << 1;
9807    if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
9808    <D.2110>:
9809    down = 4294967295;
9810    k = k >> 1;
9811    if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
9812    <D.2112>:
9813    k = k >> 1;
9814    <D.2111>:
9815    twok = k << 1;
9816    cplx = .MUL_OVERFLOW (thread_nump1, twok);
9817    mul = REALPART_EXPR <cplx>;
9818    ovf = IMAGPART_EXPR <cplx>;
9819    if (ovf == 0) goto <D.2116>; else goto <D.2117>;
9820    <D.2116>:
9821    andv = k & down;
9822    andvm1 = andv + 4294967295;
9823    l = mul + andvm1;
9824    if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
9825    <D.2120>:
9826    // For UDRs this is UDR merge, performed using var2 variable as temporary,
9827    // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
9828    rpriva[l] = rpriva[l - k] + rpriva[l];
9829    <D.2117>:
9830    if (down == 0) goto <D.2121>; else goto <D.2122>;
9831    <D.2121>:
9832    k = k << 1;
9833    goto <D.2123>;
9834    <D.2122>:
9835    k = k >> 1;
9836    <D.2123>:
9837    GOMP_barrier ();
9838    if (k != 0) goto <D.2108>; else goto <D.2103>;
9839    <D.2103>:
9840    if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
9841    <D.2124>:
9842    // For UDRs this is UDR init or copy from var3.
9843    var2 = 0;
9844    goto <D.2126>;
9845    <D.2125>:
9846    var2 = rpriva[thread_num - 1];
9847    <D.2126>:
9848    ivar = 0;
9849    #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
9850      reduction(inscan,+:r) private(i)
9851    for (i = 0; i < n; i = i + 1)
9852      {
9853        {
9854 	 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
9855 	 r = var2 + rprivb[ivar];
9856        }
9857        {
9858 	 // This is the scan phase from user code.
9859 	 use (r);
9860 	 // Plus a bump of the iterator.
9861 	 ivar = ivar + 1;
9862        }
9863      }  */
9864 
9865 static void
lower_omp_for_scan(gimple_seq * body_p,gimple_seq * dlist,gomp_for * stmt,struct omp_for_data * fd,omp_context * ctx)9866 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
9867 		    struct omp_for_data *fd, omp_context *ctx)
9868 {
9869   bool is_for_simd = gimple_omp_for_combined_p (stmt);
9870   gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
9871 
9872   gimple_seq body = gimple_omp_body (stmt);
9873   gimple_stmt_iterator input1_gsi = gsi_none ();
9874   struct walk_stmt_info wi;
9875   memset (&wi, 0, sizeof (wi));
9876   wi.val_only = true;
9877   wi.info = (void *) &input1_gsi;
9878   walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
9879   gcc_assert (!gsi_end_p (input1_gsi));
9880 
9881   gimple *input_stmt1 = gsi_stmt (input1_gsi);
9882   gimple_stmt_iterator gsi = input1_gsi;
9883   gsi_next (&gsi);
9884   gimple_stmt_iterator scan1_gsi = gsi;
9885   gimple *scan_stmt1 = gsi_stmt (gsi);
9886   gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
9887 
9888   gimple_seq input_body = gimple_omp_body (input_stmt1);
9889   gimple_seq scan_body = gimple_omp_body (scan_stmt1);
9890   gimple_omp_set_body (input_stmt1, NULL);
9891   gimple_omp_set_body (scan_stmt1, NULL);
9892   gimple_omp_set_body (stmt, NULL);
9893 
9894   gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
9895   gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
9896   gimple_omp_set_body (stmt, body);
9897   gimple_omp_set_body (input_stmt1, input_body);
9898 
9899   gimple_stmt_iterator input2_gsi = gsi_none ();
9900   memset (&wi, 0, sizeof (wi));
9901   wi.val_only = true;
9902   wi.info = (void *) &input2_gsi;
9903   walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
9904   gcc_assert (!gsi_end_p (input2_gsi));
9905 
9906   gimple *input_stmt2 = gsi_stmt (input2_gsi);
9907   gsi = input2_gsi;
9908   gsi_next (&gsi);
9909   gimple_stmt_iterator scan2_gsi = gsi;
9910   gimple *scan_stmt2 = gsi_stmt (gsi);
9911   gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
9912   gimple_omp_set_body (scan_stmt2, scan_body);
9913 
9914   gimple_stmt_iterator input3_gsi = gsi_none ();
9915   gimple_stmt_iterator scan3_gsi = gsi_none ();
9916   gimple_stmt_iterator input4_gsi = gsi_none ();
9917   gimple_stmt_iterator scan4_gsi = gsi_none ();
9918   gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
9919   gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
9920   omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
9921   if (is_for_simd)
9922     {
9923       memset (&wi, 0, sizeof (wi));
9924       wi.val_only = true;
9925       wi.info = (void *) &input3_gsi;
9926       walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
9927       gcc_assert (!gsi_end_p (input3_gsi));
9928 
9929       input_stmt3 = gsi_stmt (input3_gsi);
9930       gsi = input3_gsi;
9931       gsi_next (&gsi);
9932       scan3_gsi = gsi;
9933       scan_stmt3 = gsi_stmt (gsi);
9934       gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
9935 
9936       memset (&wi, 0, sizeof (wi));
9937       wi.val_only = true;
9938       wi.info = (void *) &input4_gsi;
9939       walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
9940       gcc_assert (!gsi_end_p (input4_gsi));
9941 
9942       input_stmt4 = gsi_stmt (input4_gsi);
9943       gsi = input4_gsi;
9944       gsi_next (&gsi);
9945       scan4_gsi = gsi;
9946       scan_stmt4 = gsi_stmt (gsi);
9947       gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
9948 
9949       input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
9950       scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
9951     }
9952 
9953   tree num_threads = create_tmp_var (integer_type_node);
9954   tree thread_num = create_tmp_var (integer_type_node);
9955   tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9956   tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9957   gimple *g = gimple_build_call (nthreads_decl, 0);
9958   gimple_call_set_lhs (g, num_threads);
9959   gimple_seq_add_stmt (body_p, g);
9960   g = gimple_build_call (threadnum_decl, 0);
9961   gimple_call_set_lhs (g, thread_num);
9962   gimple_seq_add_stmt (body_p, g);
9963 
9964   tree ivar = create_tmp_var (sizetype);
9965   tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
9966   tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
9967   tree k = create_tmp_var (unsigned_type_node);
9968   tree l = create_tmp_var (unsigned_type_node);
9969 
9970   gimple_seq clist = NULL, mdlist = NULL;
9971   gimple_seq thr01_list = NULL, thrn1_list = NULL;
9972   gimple_seq thr02_list = NULL, thrn2_list = NULL;
9973   gimple_seq scan1_list = NULL, input2_list = NULL;
9974   gimple_seq last_list = NULL, reduc_list = NULL;
9975   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
9976     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9977 	&& OMP_CLAUSE_REDUCTION_INSCAN (c))
9978       {
9979 	location_t clause_loc = OMP_CLAUSE_LOCATION (c);
9980 	tree var = OMP_CLAUSE_DECL (c);
9981 	tree new_var = lookup_decl (var, ctx);
9982 	tree var3 = NULL_TREE;
9983 	tree new_vard = new_var;
9984 	if (omp_is_reference (var))
9985 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
9986 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9987 	  {
9988 	    var3 = maybe_lookup_decl (new_vard, ctx);
9989 	    if (var3 == new_vard)
9990 	      var3 = NULL_TREE;
9991 	  }
9992 
9993 	tree ptype = build_pointer_type (TREE_TYPE (new_var));
9994 	tree rpriva = create_tmp_var (ptype);
9995 	tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
9996 	OMP_CLAUSE_DECL (nc) = rpriva;
9997 	*cp1 = nc;
9998 	cp1 = &OMP_CLAUSE_CHAIN (nc);
9999 
10000 	tree rprivb = create_tmp_var (ptype);
10001 	nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10002 	OMP_CLAUSE_DECL (nc) = rprivb;
10003 	OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10004 	*cp1 = nc;
10005 	cp1 = &OMP_CLAUSE_CHAIN (nc);
10006 
10007 	tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10008 	if (new_vard != new_var)
10009 	  TREE_ADDRESSABLE (var2) = 1;
10010 	gimple_add_tmp_var (var2);
10011 
10012 	tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10013 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10014 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10015 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10016 	tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10017 
10018 	x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10019 			     thread_num, integer_minus_one_node);
10020 	x = fold_convert_loc (clause_loc, sizetype, x);
10021 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10022 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10023 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10024 	tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10025 
10026 	x = fold_convert_loc (clause_loc, sizetype, l);
10027 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10028 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10029 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10030 	tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10031 
10032 	x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10033 	x = fold_convert_loc (clause_loc, sizetype, x);
10034 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10035 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10036 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10037 	tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10038 
10039 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10040 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10041 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10042 	tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10043 
10044 	tree var4 = is_for_simd ? new_var : var2;
10045 	tree var5 = NULL_TREE, var6 = NULL_TREE;
10046 	if (is_for_simd)
10047 	  {
10048 	    var5 = lookup_decl (var, input_simd_ctx);
10049 	    var6 = lookup_decl (var, scan_simd_ctx);
10050 	    if (new_vard != new_var)
10051 	      {
10052 		var5 = build_simple_mem_ref_loc (clause_loc, var5);
10053 		var6 = build_simple_mem_ref_loc (clause_loc, var6);
10054 	      }
10055 	  }
10056 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10057 	  {
10058 	    tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10059 	    tree val = var2;
10060 
10061 	    x = lang_hooks.decls.omp_clause_default_ctor
10062 		    (c, var2, build_outer_var_ref (var, ctx));
10063 	    if (x)
10064 	      gimplify_and_add (x, &clist);
10065 
10066 	    x = build_outer_var_ref (var, ctx);
10067 	    x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
10068 						       x);
10069 	    gimplify_and_add (x, &thr01_list);
10070 
10071 	    tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
10072 		      ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10073 	    if (var3)
10074 	      {
10075 		x = unshare_expr (var4);
10076 		x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10077 		gimplify_and_add (x, &thrn1_list);
10078 		x = unshare_expr (var4);
10079 		x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
10080 		gimplify_and_add (x, &thr02_list);
10081 	      }
10082 	    else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10083 	      {
10084 		/* Otherwise, assign to it the identity element.  */
10085 		gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10086 		tseq = copy_gimple_seq_and_replace_locals (tseq);
10087 		if (!is_for_simd)
10088 		  {
10089 		    if (new_vard != new_var)
10090 		      val = build_fold_addr_expr_loc (clause_loc, val);
10091 		    SET_DECL_VALUE_EXPR (new_vard, val);
10092 		    DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10093 		  }
10094 		SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
10095 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10096 		lower_omp (&tseq, ctx);
10097 		gimple_seq_add_seq (&thrn1_list, tseq);
10098 		tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10099 		lower_omp (&tseq, ctx);
10100 		gimple_seq_add_seq (&thr02_list, tseq);
10101 		SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10102 		DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10103 		OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10104 		if (y)
10105 		  SET_DECL_VALUE_EXPR (new_vard, y);
10106 		else
10107 		  {
10108 		    DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10109 		    SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10110 		  }
10111 	      }
10112 
10113 	    x = unshare_expr (var4);
10114 	    x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
10115 	    gimplify_and_add (x, &thrn2_list);
10116 
10117 	    if (is_for_simd)
10118 	      {
10119 		x = unshare_expr (rprivb_ref);
10120 		x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
10121 		gimplify_and_add (x, &scan1_list);
10122 	      }
10123 	    else
10124 	      {
10125 		if (ctx->scan_exclusive)
10126 		  {
10127 		    x = unshare_expr (rprivb_ref);
10128 		    x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10129 		    gimplify_and_add (x, &scan1_list);
10130 		  }
10131 
10132 		gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10133 		tseq = copy_gimple_seq_and_replace_locals (tseq);
10134 		SET_DECL_VALUE_EXPR (placeholder, var2);
10135 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10136 		lower_omp (&tseq, ctx);
10137 		gimple_seq_add_seq (&scan1_list, tseq);
10138 
10139 		if (ctx->scan_inclusive)
10140 		  {
10141 		    x = unshare_expr (rprivb_ref);
10142 		    x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
10143 		    gimplify_and_add (x, &scan1_list);
10144 		  }
10145 	      }
10146 
10147 	    x = unshare_expr (rpriva_ref);
10148 	    x = lang_hooks.decls.omp_clause_assign_op (c, x,
10149 						       unshare_expr (var4));
10150 	    gimplify_and_add (x, &mdlist);
10151 
10152 	    x = unshare_expr (is_for_simd ? var6 : new_var);
10153 	    x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
10154 	    gimplify_and_add (x, &input2_list);
10155 
10156 	    val = rprivb_ref;
10157 	    if (new_vard != new_var)
10158 	      val = build_fold_addr_expr_loc (clause_loc, val);
10159 
10160 	    gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10161 	    tseq = copy_gimple_seq_and_replace_locals (tseq);
10162 	    SET_DECL_VALUE_EXPR (new_vard, val);
10163 	    DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10164 	    if (is_for_simd)
10165 	      {
10166 		SET_DECL_VALUE_EXPR (placeholder, var6);
10167 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10168 	      }
10169 	    else
10170 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10171 	    lower_omp (&tseq, ctx);
10172 	    if (y)
10173 	      SET_DECL_VALUE_EXPR (new_vard, y);
10174 	    else
10175 	      {
10176 		DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10177 		SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10178 	      }
10179 	    if (!is_for_simd)
10180 	      {
10181 		SET_DECL_VALUE_EXPR (placeholder, new_var);
10182 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10183 		lower_omp (&tseq, ctx);
10184 	      }
10185 	    gimple_seq_add_seq (&input2_list, tseq);
10186 
10187 	    x = build_outer_var_ref (var, ctx);
10188 	    x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
10189 	    gimplify_and_add (x, &last_list);
10190 
10191 	    x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
10192 	    gimplify_and_add (x, &reduc_list);
10193 	    tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10194 	    tseq = copy_gimple_seq_and_replace_locals (tseq);
10195 	    val = rprival_ref;
10196 	    if (new_vard != new_var)
10197 	      val = build_fold_addr_expr_loc (clause_loc, val);
10198 	    SET_DECL_VALUE_EXPR (new_vard, val);
10199 	    DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
10200 	    SET_DECL_VALUE_EXPR (placeholder, var2);
10201 	    lower_omp (&tseq, ctx);
10202 	    OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10203 	    SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10204 	    DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10205 	    if (y)
10206 	      SET_DECL_VALUE_EXPR (new_vard, y);
10207 	    else
10208 	      {
10209 		DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
10210 		SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
10211 	      }
10212 	    gimple_seq_add_seq (&reduc_list, tseq);
10213 	    x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
10214 	    gimplify_and_add (x, &reduc_list);
10215 
10216 	    x = lang_hooks.decls.omp_clause_dtor (c, var2);
10217 	    if (x)
10218 	      gimplify_and_add (x, dlist);
10219 	  }
10220 	else
10221 	  {
10222 	    x = build_outer_var_ref (var, ctx);
10223 	    gimplify_assign (unshare_expr (var4), x, &thr01_list);
10224 
10225 	    x = omp_reduction_init (c, TREE_TYPE (new_var));
10226 	    gimplify_assign (unshare_expr (var4), unshare_expr (x),
10227 			     &thrn1_list);
10228 	    gimplify_assign (unshare_expr (var4), x, &thr02_list);
10229 
10230 	    gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
10231 
10232 	    enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10233 	    if (code == MINUS_EXPR)
10234 	      code = PLUS_EXPR;
10235 
10236 	    if (is_for_simd)
10237 	      gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
10238 	    else
10239 	      {
10240 		if (ctx->scan_exclusive)
10241 		  gimplify_assign (unshare_expr (rprivb_ref), var2,
10242 				   &scan1_list);
10243 		x = build2 (code, TREE_TYPE (new_var), var2, new_var);
10244 		gimplify_assign (var2, x, &scan1_list);
10245 		if (ctx->scan_inclusive)
10246 		  gimplify_assign (unshare_expr (rprivb_ref), var2,
10247 				   &scan1_list);
10248 	      }
10249 
10250 	    gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
10251 			     &mdlist);
10252 
10253 	    x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
10254 	    gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
10255 
10256 	    gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
10257 			     &last_list);
10258 
10259 	    x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
10260 			unshare_expr (rprival_ref));
10261 	    gimplify_assign (rprival_ref, x, &reduc_list);
10262 	  }
10263       }
10264 
10265   g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10266   gimple_seq_add_stmt (&scan1_list, g);
10267   g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
10268   gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
10269 					    ? scan_stmt4 : scan_stmt2), g);
10270 
10271   tree controlb = create_tmp_var (boolean_type_node);
10272   tree controlp = create_tmp_var (ptr_type_node);
10273   tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10274   OMP_CLAUSE_DECL (nc) = controlb;
10275   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10276   *cp1 = nc;
10277   cp1 = &OMP_CLAUSE_CHAIN (nc);
10278   nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10279   OMP_CLAUSE_DECL (nc) = controlp;
10280   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10281   *cp1 = nc;
10282   cp1 = &OMP_CLAUSE_CHAIN (nc);
10283   nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10284   OMP_CLAUSE_DECL (nc) = controlb;
10285   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10286   *cp2 = nc;
10287   cp2 = &OMP_CLAUSE_CHAIN (nc);
10288   nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
10289   OMP_CLAUSE_DECL (nc) = controlp;
10290   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
10291   *cp2 = nc;
10292   cp2 = &OMP_CLAUSE_CHAIN (nc);
10293 
10294   *cp1 = gimple_omp_for_clauses (stmt);
10295   gimple_omp_for_set_clauses (stmt, new_clauses1);
10296   *cp2 = gimple_omp_for_clauses (new_stmt);
10297   gimple_omp_for_set_clauses (new_stmt, new_clauses2);
10298 
10299   if (is_for_simd)
10300     {
10301       gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
10302       gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
10303 
10304       gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
10305 			    GSI_SAME_STMT);
10306       gsi_remove (&input3_gsi, true);
10307       gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
10308 			    GSI_SAME_STMT);
10309       gsi_remove (&scan3_gsi, true);
10310       gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
10311 			    GSI_SAME_STMT);
10312       gsi_remove (&input4_gsi, true);
10313       gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
10314 			    GSI_SAME_STMT);
10315       gsi_remove (&scan4_gsi, true);
10316     }
10317   else
10318     {
10319       gimple_omp_set_body (scan_stmt1, scan1_list);
10320       gimple_omp_set_body (input_stmt2, input2_list);
10321     }
10322 
10323   gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
10324 			GSI_SAME_STMT);
10325   gsi_remove (&input1_gsi, true);
10326   gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
10327 			GSI_SAME_STMT);
10328   gsi_remove (&scan1_gsi, true);
10329   gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
10330 			GSI_SAME_STMT);
10331   gsi_remove (&input2_gsi, true);
10332   gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
10333 			GSI_SAME_STMT);
10334   gsi_remove (&scan2_gsi, true);
10335 
10336   gimple_seq_add_seq (body_p, clist);
10337 
10338   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
10339   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
10340   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
10341   g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10342   gimple_seq_add_stmt (body_p, g);
10343   g = gimple_build_label (lab1);
10344   gimple_seq_add_stmt (body_p, g);
10345   gimple_seq_add_seq (body_p, thr01_list);
10346   g = gimple_build_goto (lab3);
10347   gimple_seq_add_stmt (body_p, g);
10348   g = gimple_build_label (lab2);
10349   gimple_seq_add_stmt (body_p, g);
10350   gimple_seq_add_seq (body_p, thrn1_list);
10351   g = gimple_build_label (lab3);
10352   gimple_seq_add_stmt (body_p, g);
10353 
10354   g = gimple_build_assign (ivar, size_zero_node);
10355   gimple_seq_add_stmt (body_p, g);
10356 
10357   gimple_seq_add_stmt (body_p, stmt);
10358   gimple_seq_add_seq (body_p, body);
10359   gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
10360 							  fd->loop.v));
10361 
10362   g = gimple_build_omp_return (true);
10363   gimple_seq_add_stmt (body_p, g);
10364   gimple_seq_add_seq (body_p, mdlist);
10365 
10366   lab1 = create_artificial_label (UNKNOWN_LOCATION);
10367   lab2 = create_artificial_label (UNKNOWN_LOCATION);
10368   g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
10369   gimple_seq_add_stmt (body_p, g);
10370   g = gimple_build_label (lab1);
10371   gimple_seq_add_stmt (body_p, g);
10372 
10373   g = omp_build_barrier (NULL);
10374   gimple_seq_add_stmt (body_p, g);
10375 
10376   tree down = create_tmp_var (unsigned_type_node);
10377   g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
10378   gimple_seq_add_stmt (body_p, g);
10379 
10380   g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
10381   gimple_seq_add_stmt (body_p, g);
10382 
10383   tree num_threadsu = create_tmp_var (unsigned_type_node);
10384   g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
10385   gimple_seq_add_stmt (body_p, g);
10386 
10387   tree thread_numu = create_tmp_var (unsigned_type_node);
10388   g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
10389   gimple_seq_add_stmt (body_p, g);
10390 
10391   tree thread_nump1 = create_tmp_var (unsigned_type_node);
10392   g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
10393 			   build_int_cst (unsigned_type_node, 1));
10394   gimple_seq_add_stmt (body_p, g);
10395 
10396   lab3 = create_artificial_label (UNKNOWN_LOCATION);
10397   g = gimple_build_label (lab3);
10398   gimple_seq_add_stmt (body_p, g);
10399 
10400   tree twok = create_tmp_var (unsigned_type_node);
10401   g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10402   gimple_seq_add_stmt (body_p, g);
10403 
10404   tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
10405   tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
10406   tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
10407   g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
10408   gimple_seq_add_stmt (body_p, g);
10409   g = gimple_build_label (lab4);
10410   gimple_seq_add_stmt (body_p, g);
10411   g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
10412   gimple_seq_add_stmt (body_p, g);
10413   g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10414   gimple_seq_add_stmt (body_p, g);
10415 
10416   g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
10417   gimple_seq_add_stmt (body_p, g);
10418   g = gimple_build_label (lab6);
10419   gimple_seq_add_stmt (body_p, g);
10420 
10421   g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10422   gimple_seq_add_stmt (body_p, g);
10423 
10424   g = gimple_build_label (lab5);
10425   gimple_seq_add_stmt (body_p, g);
10426 
10427   g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
10428   gimple_seq_add_stmt (body_p, g);
10429 
10430   tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
10431   DECL_GIMPLE_REG_P (cplx) = 1;
10432   g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
10433   gimple_call_set_lhs (g, cplx);
10434   gimple_seq_add_stmt (body_p, g);
10435   tree mul = create_tmp_var (unsigned_type_node);
10436   g = gimple_build_assign (mul, REALPART_EXPR,
10437 			   build1 (REALPART_EXPR, unsigned_type_node, cplx));
10438   gimple_seq_add_stmt (body_p, g);
10439   tree ovf = create_tmp_var (unsigned_type_node);
10440   g = gimple_build_assign (ovf, IMAGPART_EXPR,
10441 			   build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
10442   gimple_seq_add_stmt (body_p, g);
10443 
10444   tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
10445   tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
10446   g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
10447 			 lab7, lab8);
10448   gimple_seq_add_stmt (body_p, g);
10449   g = gimple_build_label (lab7);
10450   gimple_seq_add_stmt (body_p, g);
10451 
10452   tree andv = create_tmp_var (unsigned_type_node);
10453   g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
10454   gimple_seq_add_stmt (body_p, g);
10455   tree andvm1 = create_tmp_var (unsigned_type_node);
10456   g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
10457 			   build_minus_one_cst (unsigned_type_node));
10458   gimple_seq_add_stmt (body_p, g);
10459 
10460   g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
10461   gimple_seq_add_stmt (body_p, g);
10462 
10463   tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
10464   g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
10465   gimple_seq_add_stmt (body_p, g);
10466   g = gimple_build_label (lab9);
10467   gimple_seq_add_stmt (body_p, g);
10468   gimple_seq_add_seq (body_p, reduc_list);
10469   g = gimple_build_label (lab8);
10470   gimple_seq_add_stmt (body_p, g);
10471 
10472   tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
10473   tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
10474   tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
10475   g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
10476 			 lab10, lab11);
10477   gimple_seq_add_stmt (body_p, g);
10478   g = gimple_build_label (lab10);
10479   gimple_seq_add_stmt (body_p, g);
10480   g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
10481   gimple_seq_add_stmt (body_p, g);
10482   g = gimple_build_goto (lab12);
10483   gimple_seq_add_stmt (body_p, g);
10484   g = gimple_build_label (lab11);
10485   gimple_seq_add_stmt (body_p, g);
10486   g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
10487   gimple_seq_add_stmt (body_p, g);
10488   g = gimple_build_label (lab12);
10489   gimple_seq_add_stmt (body_p, g);
10490 
10491   g = omp_build_barrier (NULL);
10492   gimple_seq_add_stmt (body_p, g);
10493 
10494   g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
10495 			 lab3, lab2);
10496   gimple_seq_add_stmt (body_p, g);
10497 
10498   g = gimple_build_label (lab2);
10499   gimple_seq_add_stmt (body_p, g);
10500 
10501   lab1 = create_artificial_label (UNKNOWN_LOCATION);
10502   lab2 = create_artificial_label (UNKNOWN_LOCATION);
10503   lab3 = create_artificial_label (UNKNOWN_LOCATION);
10504   g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
10505   gimple_seq_add_stmt (body_p, g);
10506   g = gimple_build_label (lab1);
10507   gimple_seq_add_stmt (body_p, g);
10508   gimple_seq_add_seq (body_p, thr02_list);
10509   g = gimple_build_goto (lab3);
10510   gimple_seq_add_stmt (body_p, g);
10511   g = gimple_build_label (lab2);
10512   gimple_seq_add_stmt (body_p, g);
10513   gimple_seq_add_seq (body_p, thrn2_list);
10514   g = gimple_build_label (lab3);
10515   gimple_seq_add_stmt (body_p, g);
10516 
10517   g = gimple_build_assign (ivar, size_zero_node);
10518   gimple_seq_add_stmt (body_p, g);
10519   gimple_seq_add_stmt (body_p, new_stmt);
10520   gimple_seq_add_seq (body_p, new_body);
10521 
10522   gimple_seq new_dlist = NULL;
10523   lab1 = create_artificial_label (UNKNOWN_LOCATION);
10524   lab2 = create_artificial_label (UNKNOWN_LOCATION);
10525   tree num_threadsm1 = create_tmp_var (integer_type_node);
10526   g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
10527 			   integer_minus_one_node);
10528   gimple_seq_add_stmt (&new_dlist, g);
10529   g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
10530   gimple_seq_add_stmt (&new_dlist, g);
10531   g = gimple_build_label (lab1);
10532   gimple_seq_add_stmt (&new_dlist, g);
10533   gimple_seq_add_seq (&new_dlist, last_list);
10534   g = gimple_build_label (lab2);
10535   gimple_seq_add_stmt (&new_dlist, g);
10536   gimple_seq_add_seq (&new_dlist, *dlist);
10537   *dlist = new_dlist;
10538 }
10539 
10540 /* Lower code for an OMP loop directive.  */
10541 
10542 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)10543 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10544 {
10545   tree *rhs_p, block;
10546   struct omp_for_data fd, *fdp = NULL;
10547   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
10548   gbind *new_stmt;
10549   gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
10550   gimple_seq cnt_list = NULL, clist = NULL;
10551   gimple_seq oacc_head = NULL, oacc_tail = NULL;
10552   size_t i;
10553 
10554   push_gimplify_context ();
10555 
10556   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
10557 
10558   block = make_node (BLOCK);
10559   new_stmt = gimple_build_bind (NULL, NULL, block);
10560   /* Replace at gsi right away, so that 'stmt' is no member
10561      of a sequence anymore as we're going to add to a different
10562      one below.  */
10563   gsi_replace (gsi_p, new_stmt, true);
10564 
10565   /* Move declaration of temporaries in the loop body before we make
10566      it go away.  */
10567   omp_for_body = gimple_omp_body (stmt);
10568   if (!gimple_seq_empty_p (omp_for_body)
10569       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
10570     {
10571       gbind *inner_bind
10572 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
10573       tree vars = gimple_bind_vars (inner_bind);
10574       gimple_bind_append_vars (new_stmt, vars);
10575       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
10576 	 keep them on the inner_bind and it's block.  */
10577       gimple_bind_set_vars (inner_bind, NULL_TREE);
10578       if (gimple_bind_block (inner_bind))
10579 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
10580     }
10581 
10582   if (gimple_omp_for_combined_into_p (stmt))
10583     {
10584       omp_extract_for_data (stmt, &fd, NULL);
10585       fdp = &fd;
10586 
10587       /* We need two temporaries with fd.loop.v type (istart/iend)
10588 	 and then (fd.collapse - 1) temporaries with the same
10589 	 type for count2 ... countN-1 vars if not constant.  */
10590       size_t count = 2;
10591       tree type = fd.iter_type;
10592       if (fd.collapse > 1
10593 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
10594 	count += fd.collapse - 1;
10595       bool taskreg_for
10596 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
10597 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
10598       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
10599       tree simtc = NULL;
10600       tree clauses = *pc;
10601       if (taskreg_for)
10602 	outerc
10603 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
10604 			     OMP_CLAUSE__LOOPTEMP_);
10605       if (ctx->simt_stmt)
10606 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
10607 				 OMP_CLAUSE__LOOPTEMP_);
10608       for (i = 0; i < count; i++)
10609 	{
10610 	  tree temp;
10611 	  if (taskreg_for)
10612 	    {
10613 	      gcc_assert (outerc);
10614 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
10615 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
10616 					OMP_CLAUSE__LOOPTEMP_);
10617 	    }
10618 	  else
10619 	    {
10620 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
10621 		 clause, another without, make sure they have the same
10622 		 decls in _looptemp_ clauses, because the outer stmt
10623 		 they are combined into will look up just one inner_stmt.  */
10624 	      if (ctx->simt_stmt)
10625 		temp = OMP_CLAUSE_DECL (simtc);
10626 	      else
10627 		temp = create_tmp_var (type);
10628 	      insert_decl_map (&ctx->outer->cb, temp, temp);
10629 	    }
10630 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
10631 	  OMP_CLAUSE_DECL (*pc) = temp;
10632 	  pc = &OMP_CLAUSE_CHAIN (*pc);
10633 	  if (ctx->simt_stmt)
10634 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
10635 				     OMP_CLAUSE__LOOPTEMP_);
10636 	}
10637       *pc = clauses;
10638     }
10639 
10640   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
10641   dlist = NULL;
10642   body = NULL;
10643   tree rclauses
10644     = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
10645 				      OMP_CLAUSE_REDUCTION);
10646   tree rtmp = NULL_TREE;
10647   if (rclauses)
10648     {
10649       tree type = build_pointer_type (pointer_sized_int_node);
10650       tree temp = create_tmp_var (type);
10651       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
10652       OMP_CLAUSE_DECL (c) = temp;
10653       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
10654       gimple_omp_for_set_clauses (stmt, c);
10655       lower_omp_task_reductions (ctx, OMP_FOR,
10656 				 gimple_omp_for_clauses (stmt),
10657 				 &tred_ilist, &tred_dlist);
10658       rclauses = c;
10659       rtmp = make_ssa_name (type);
10660       gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
10661     }
10662 
10663   lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
10664 					 ctx);
10665 
10666   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
10667 			   fdp);
10668   gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
10669 		      gimple_omp_for_pre_body (stmt));
10670 
10671   lower_omp (gimple_omp_body_ptr (stmt), ctx);
10672 
10673   /* Lower the header expressions.  At this point, we can assume that
10674      the header is of the form:
10675 
10676      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
10677 
10678      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
10679      using the .omp_data_s mapping, if needed.  */
10680   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
10681     {
10682       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
10683       if (!is_gimple_min_invariant (*rhs_p))
10684 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10685       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10686 	recompute_tree_invariant_for_addr_expr (*rhs_p);
10687 
10688       rhs_p = gimple_omp_for_final_ptr (stmt, i);
10689       if (!is_gimple_min_invariant (*rhs_p))
10690 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10691       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
10692 	recompute_tree_invariant_for_addr_expr (*rhs_p);
10693 
10694       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
10695       if (!is_gimple_min_invariant (*rhs_p))
10696 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
10697     }
10698   if (rclauses)
10699     gimple_seq_add_seq (&tred_ilist, cnt_list);
10700   else
10701     gimple_seq_add_seq (&body, cnt_list);
10702 
10703   /* Once lowered, extract the bounds and clauses.  */
10704   omp_extract_for_data (stmt, &fd, NULL);
10705 
10706   if (is_gimple_omp_oacc (ctx->stmt)
10707       && !ctx_in_oacc_kernels_region (ctx))
10708     lower_oacc_head_tail (gimple_location (stmt),
10709 			  gimple_omp_for_clauses (stmt),
10710 			  &oacc_head, &oacc_tail, ctx);
10711 
10712   /* Add OpenACC partitioning and reduction markers just before the loop.  */
10713   if (oacc_head)
10714     gimple_seq_add_seq (&body, oacc_head);
10715 
10716   lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
10717 
10718   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10719     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10720       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10721 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10722 	{
10723 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
10724 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
10725 	    OMP_CLAUSE_LINEAR_STEP (c)
10726 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
10727 						ctx);
10728 	}
10729 
10730   bool phony_loop = (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_GRID_LOOP
10731 		     && gimple_omp_for_grid_phony (stmt));
10732   if ((ctx->scan_inclusive || ctx->scan_exclusive)
10733       && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
10734     {
10735       gcc_assert (!phony_loop);
10736       lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
10737     }
10738   else
10739     {
10740       if (!phony_loop)
10741 	gimple_seq_add_stmt (&body, stmt);
10742       gimple_seq_add_seq (&body, gimple_omp_body (stmt));
10743     }
10744 
10745   if (!phony_loop)
10746     gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
10747 							   fd.loop.v));
10748 
10749   /* After the loop, add exit clauses.  */
10750   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
10751 
10752   if (clist)
10753     {
10754       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
10755       gcall *g = gimple_build_call (fndecl, 0);
10756       gimple_seq_add_stmt (&body, g);
10757       gimple_seq_add_seq (&body, clist);
10758       fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
10759       g = gimple_build_call (fndecl, 0);
10760       gimple_seq_add_stmt (&body, g);
10761     }
10762 
10763   if (ctx->cancellable)
10764     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
10765 
10766   gimple_seq_add_seq (&body, dlist);
10767 
10768   if (rclauses)
10769     {
10770       gimple_seq_add_seq (&tred_ilist, body);
10771       body = tred_ilist;
10772     }
10773 
10774   body = maybe_catch_exception (body);
10775 
10776   if (!phony_loop)
10777     {
10778       /* Region exit marker goes at the end of the loop body.  */
10779       gimple *g = gimple_build_omp_return (fd.have_nowait);
10780       gimple_seq_add_stmt (&body, g);
10781 
10782       gimple_seq_add_seq (&body, tred_dlist);
10783 
10784       maybe_add_implicit_barrier_cancel (ctx, g, &body);
10785 
10786       if (rclauses)
10787 	OMP_CLAUSE_DECL (rclauses) = rtmp;
10788     }
10789 
10790   /* Add OpenACC joining and reduction markers just after the loop.  */
10791   if (oacc_tail)
10792     gimple_seq_add_seq (&body, oacc_tail);
10793 
10794   pop_gimplify_context (new_stmt);
10795 
10796   gimple_bind_append_vars (new_stmt, ctx->block_vars);
10797   maybe_remove_omp_member_access_dummy_vars (new_stmt);
10798   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
10799   if (BLOCK_VARS (block))
10800     TREE_USED (block) = 1;
10801 
10802   gimple_bind_set_body (new_stmt, body);
10803   gimple_omp_set_body (stmt, NULL);
10804   gimple_omp_for_set_pre_body (stmt, NULL);
10805 }
10806 
10807 /* Callback for walk_stmts.  Check if the current statement only contains
10808    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
10809 
10810 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10811 check_combined_parallel (gimple_stmt_iterator *gsi_p,
10812     			 bool *handled_ops_p,
10813     			 struct walk_stmt_info *wi)
10814 {
10815   int *info = (int *) wi->info;
10816   gimple *stmt = gsi_stmt (*gsi_p);
10817 
10818   *handled_ops_p = true;
10819   switch (gimple_code (stmt))
10820     {
10821     WALK_SUBSTMTS;
10822 
10823     case GIMPLE_DEBUG:
10824       break;
10825     case GIMPLE_OMP_FOR:
10826     case GIMPLE_OMP_SECTIONS:
10827       *info = *info == 0 ? 1 : -1;
10828       break;
10829     default:
10830       *info = -1;
10831       break;
10832     }
10833   return NULL;
10834 }
10835 
10836 struct omp_taskcopy_context
10837 {
10838   /* This field must be at the beginning, as we do "inheritance": Some
10839      callback functions for tree-inline.c (e.g., omp_copy_decl)
10840      receive a copy_body_data pointer that is up-casted to an
10841      omp_context pointer.  */
10842   copy_body_data cb;
10843   omp_context *ctx;
10844 };
10845 
10846 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)10847 task_copyfn_copy_decl (tree var, copy_body_data *cb)
10848 {
10849   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
10850 
10851   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
10852     return create_tmp_var (TREE_TYPE (var));
10853 
10854   return var;
10855 }
10856 
10857 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)10858 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
10859 {
10860   tree name, new_fields = NULL, type, f;
10861 
10862   type = lang_hooks.types.make_type (RECORD_TYPE);
10863   name = DECL_NAME (TYPE_NAME (orig_type));
10864   name = build_decl (gimple_location (tcctx->ctx->stmt),
10865 		     TYPE_DECL, name, type);
10866   TYPE_NAME (type) = name;
10867 
10868   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
10869     {
10870       tree new_f = copy_node (f);
10871       DECL_CONTEXT (new_f) = type;
10872       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
10873       TREE_CHAIN (new_f) = new_fields;
10874       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10875       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
10876       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
10877 		 &tcctx->cb, NULL);
10878       new_fields = new_f;
10879       tcctx->cb.decl_map->put (f, new_f);
10880     }
10881   TYPE_FIELDS (type) = nreverse (new_fields);
10882   layout_type (type);
10883   return type;
10884 }
10885 
10886 /* Create task copyfn.  */
10887 
10888 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)10889 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
10890 {
10891   struct function *child_cfun;
10892   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
10893   tree record_type, srecord_type, bind, list;
10894   bool record_needs_remap = false, srecord_needs_remap = false;
10895   splay_tree_node n;
10896   struct omp_taskcopy_context tcctx;
10897   location_t loc = gimple_location (task_stmt);
10898   size_t looptempno = 0;
10899 
10900   child_fn = gimple_omp_task_copy_fn (task_stmt);
10901   task_cpyfns.safe_push (task_stmt);
10902   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
10903   gcc_assert (child_cfun->cfg == NULL);
10904   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
10905 
10906   /* Reset DECL_CONTEXT on function arguments.  */
10907   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
10908     DECL_CONTEXT (t) = child_fn;
10909 
10910   /* Populate the function.  */
10911   push_gimplify_context ();
10912   push_cfun (child_cfun);
10913 
10914   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
10915   TREE_SIDE_EFFECTS (bind) = 1;
10916   list = NULL;
10917   DECL_SAVED_TREE (child_fn) = bind;
10918   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
10919 
10920   /* Remap src and dst argument types if needed.  */
10921   record_type = ctx->record_type;
10922   srecord_type = ctx->srecord_type;
10923   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
10924     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10925       {
10926 	record_needs_remap = true;
10927 	break;
10928       }
10929   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
10930     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
10931       {
10932 	srecord_needs_remap = true;
10933 	break;
10934       }
10935 
10936   if (record_needs_remap || srecord_needs_remap)
10937     {
10938       memset (&tcctx, '\0', sizeof (tcctx));
10939       tcctx.cb.src_fn = ctx->cb.src_fn;
10940       tcctx.cb.dst_fn = child_fn;
10941       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
10942       gcc_checking_assert (tcctx.cb.src_node);
10943       tcctx.cb.dst_node = tcctx.cb.src_node;
10944       tcctx.cb.src_cfun = ctx->cb.src_cfun;
10945       tcctx.cb.copy_decl = task_copyfn_copy_decl;
10946       tcctx.cb.eh_lp_nr = 0;
10947       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
10948       tcctx.cb.decl_map = new hash_map<tree, tree>;
10949       tcctx.ctx = ctx;
10950 
10951       if (record_needs_remap)
10952 	record_type = task_copyfn_remap_type (&tcctx, record_type);
10953       if (srecord_needs_remap)
10954 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
10955     }
10956   else
10957     tcctx.cb.decl_map = NULL;
10958 
10959   arg = DECL_ARGUMENTS (child_fn);
10960   TREE_TYPE (arg) = build_pointer_type (record_type);
10961   sarg = DECL_CHAIN (arg);
10962   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
10963 
10964   /* First pass: initialize temporaries used in record_type and srecord_type
10965      sizes and field offsets.  */
10966   if (tcctx.cb.decl_map)
10967     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10968       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10969 	{
10970 	  tree *p;
10971 
10972 	  decl = OMP_CLAUSE_DECL (c);
10973 	  p = tcctx.cb.decl_map->get (decl);
10974 	  if (p == NULL)
10975 	    continue;
10976 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
10977 	  sf = (tree) n->value;
10978 	  sf = *tcctx.cb.decl_map->get (sf);
10979 	  src = build_simple_mem_ref_loc (loc, sarg);
10980 	  src = omp_build_component_ref (src, sf);
10981 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
10982 	  append_to_statement_list (t, &list);
10983 	}
10984 
10985   /* Second pass: copy shared var pointers and copy construct non-VLA
10986      firstprivate vars.  */
10987   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
10988     switch (OMP_CLAUSE_CODE (c))
10989       {
10990 	splay_tree_key key;
10991       case OMP_CLAUSE_SHARED:
10992 	decl = OMP_CLAUSE_DECL (c);
10993 	key = (splay_tree_key) decl;
10994 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
10995 	  key = (splay_tree_key) &DECL_UID (decl);
10996 	n = splay_tree_lookup (ctx->field_map, key);
10997 	if (n == NULL)
10998 	  break;
10999 	f = (tree) n->value;
11000 	if (tcctx.cb.decl_map)
11001 	  f = *tcctx.cb.decl_map->get (f);
11002 	n = splay_tree_lookup (ctx->sfield_map, key);
11003 	sf = (tree) n->value;
11004 	if (tcctx.cb.decl_map)
11005 	  sf = *tcctx.cb.decl_map->get (sf);
11006 	src = build_simple_mem_ref_loc (loc, sarg);
11007 	src = omp_build_component_ref (src, sf);
11008 	dst = build_simple_mem_ref_loc (loc, arg);
11009 	dst = omp_build_component_ref (dst, f);
11010 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11011 	append_to_statement_list (t, &list);
11012 	break;
11013       case OMP_CLAUSE_REDUCTION:
11014       case OMP_CLAUSE_IN_REDUCTION:
11015 	decl = OMP_CLAUSE_DECL (c);
11016 	if (TREE_CODE (decl) == MEM_REF)
11017 	  {
11018 	    decl = TREE_OPERAND (decl, 0);
11019 	    if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
11020 	      decl = TREE_OPERAND (decl, 0);
11021 	    if (TREE_CODE (decl) == INDIRECT_REF
11022 		|| TREE_CODE (decl) == ADDR_EXPR)
11023 	      decl = TREE_OPERAND (decl, 0);
11024 	  }
11025 	key = (splay_tree_key) decl;
11026 	n = splay_tree_lookup (ctx->field_map, key);
11027 	if (n == NULL)
11028 	  break;
11029 	f = (tree) n->value;
11030 	if (tcctx.cb.decl_map)
11031 	  f = *tcctx.cb.decl_map->get (f);
11032 	n = splay_tree_lookup (ctx->sfield_map, key);
11033 	sf = (tree) n->value;
11034 	if (tcctx.cb.decl_map)
11035 	  sf = *tcctx.cb.decl_map->get (sf);
11036 	src = build_simple_mem_ref_loc (loc, sarg);
11037 	src = omp_build_component_ref (src, sf);
11038 	if (decl != OMP_CLAUSE_DECL (c)
11039 	    && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
11040 	    && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
11041 	  src = build_simple_mem_ref_loc (loc, src);
11042 	dst = build_simple_mem_ref_loc (loc, arg);
11043 	dst = omp_build_component_ref (dst, f);
11044 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11045 	append_to_statement_list (t, &list);
11046 	break;
11047       case OMP_CLAUSE__LOOPTEMP_:
11048 	/* Fields for first two _looptemp_ clauses are initialized by
11049 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
11050         if (looptempno < 2)
11051 	  {
11052 	    looptempno++;
11053 	    break;
11054 	  }
11055 	/* FALLTHRU */
11056       case OMP_CLAUSE__REDUCTEMP_:
11057       case OMP_CLAUSE_FIRSTPRIVATE:
11058 	decl = OMP_CLAUSE_DECL (c);
11059 	if (is_variable_sized (decl))
11060 	  break;
11061 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11062 	if (n == NULL)
11063 	  break;
11064 	f = (tree) n->value;
11065 	if (tcctx.cb.decl_map)
11066 	  f = *tcctx.cb.decl_map->get (f);
11067 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11068 	if (n != NULL)
11069 	  {
11070 	    sf = (tree) n->value;
11071 	    if (tcctx.cb.decl_map)
11072 	      sf = *tcctx.cb.decl_map->get (sf);
11073 	    src = build_simple_mem_ref_loc (loc, sarg);
11074 	    src = omp_build_component_ref (src, sf);
11075 	    if (use_pointer_for_field (decl, NULL) || omp_is_reference (decl))
11076 	      src = build_simple_mem_ref_loc (loc, src);
11077 	  }
11078 	else
11079 	  src = decl;
11080 	dst = build_simple_mem_ref_loc (loc, arg);
11081 	dst = omp_build_component_ref (dst, f);
11082 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
11083 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11084 	else
11085 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11086 	append_to_statement_list (t, &list);
11087 	break;
11088       case OMP_CLAUSE_PRIVATE:
11089 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
11090 	  break;
11091 	decl = OMP_CLAUSE_DECL (c);
11092 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11093 	f = (tree) n->value;
11094 	if (tcctx.cb.decl_map)
11095 	  f = *tcctx.cb.decl_map->get (f);
11096 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
11097 	if (n != NULL)
11098 	  {
11099 	    sf = (tree) n->value;
11100 	    if (tcctx.cb.decl_map)
11101 	      sf = *tcctx.cb.decl_map->get (sf);
11102 	    src = build_simple_mem_ref_loc (loc, sarg);
11103 	    src = omp_build_component_ref (src, sf);
11104 	    if (use_pointer_for_field (decl, NULL))
11105 	      src = build_simple_mem_ref_loc (loc, src);
11106 	  }
11107 	else
11108 	  src = decl;
11109 	dst = build_simple_mem_ref_loc (loc, arg);
11110 	dst = omp_build_component_ref (dst, f);
11111 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
11112 	append_to_statement_list (t, &list);
11113 	break;
11114       default:
11115 	break;
11116       }
11117 
11118   /* Last pass: handle VLA firstprivates.  */
11119   if (tcctx.cb.decl_map)
11120     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11121       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11122 	{
11123 	  tree ind, ptr, df;
11124 
11125 	  decl = OMP_CLAUSE_DECL (c);
11126 	  if (!is_variable_sized (decl))
11127 	    continue;
11128 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
11129 	  if (n == NULL)
11130 	    continue;
11131 	  f = (tree) n->value;
11132 	  f = *tcctx.cb.decl_map->get (f);
11133 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
11134 	  ind = DECL_VALUE_EXPR (decl);
11135 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
11136 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
11137 	  n = splay_tree_lookup (ctx->sfield_map,
11138 				 (splay_tree_key) TREE_OPERAND (ind, 0));
11139 	  sf = (tree) n->value;
11140 	  sf = *tcctx.cb.decl_map->get (sf);
11141 	  src = build_simple_mem_ref_loc (loc, sarg);
11142 	  src = omp_build_component_ref (src, sf);
11143 	  src = build_simple_mem_ref_loc (loc, src);
11144 	  dst = build_simple_mem_ref_loc (loc, arg);
11145 	  dst = omp_build_component_ref (dst, f);
11146 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
11147 	  append_to_statement_list (t, &list);
11148 	  n = splay_tree_lookup (ctx->field_map,
11149 				 (splay_tree_key) TREE_OPERAND (ind, 0));
11150 	  df = (tree) n->value;
11151 	  df = *tcctx.cb.decl_map->get (df);
11152 	  ptr = build_simple_mem_ref_loc (loc, arg);
11153 	  ptr = omp_build_component_ref (ptr, df);
11154 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
11155 		      build_fold_addr_expr_loc (loc, dst));
11156 	  append_to_statement_list (t, &list);
11157 	}
11158 
11159   t = build1 (RETURN_EXPR, void_type_node, NULL);
11160   append_to_statement_list (t, &list);
11161 
11162   if (tcctx.cb.decl_map)
11163     delete tcctx.cb.decl_map;
11164   pop_gimplify_context (NULL);
11165   BIND_EXPR_BODY (bind) = list;
11166   pop_cfun ();
11167 }
11168 
11169 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)11170 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
11171 {
11172   tree c, clauses;
11173   gimple *g;
11174   size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
11175 
11176   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
11177   gcc_assert (clauses);
11178   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11179     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
11180       switch (OMP_CLAUSE_DEPEND_KIND (c))
11181 	{
11182 	case OMP_CLAUSE_DEPEND_LAST:
11183 	  /* Lowering already done at gimplification.  */
11184 	  return;
11185 	case OMP_CLAUSE_DEPEND_IN:
11186 	  cnt[2]++;
11187 	  break;
11188 	case OMP_CLAUSE_DEPEND_OUT:
11189 	case OMP_CLAUSE_DEPEND_INOUT:
11190 	  cnt[0]++;
11191 	  break;
11192 	case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11193 	  cnt[1]++;
11194 	  break;
11195 	case OMP_CLAUSE_DEPEND_DEPOBJ:
11196 	  cnt[3]++;
11197 	  break;
11198 	case OMP_CLAUSE_DEPEND_SOURCE:
11199 	case OMP_CLAUSE_DEPEND_SINK:
11200 	  /* FALLTHRU */
11201 	default:
11202 	  gcc_unreachable ();
11203 	}
11204   if (cnt[1] || cnt[3])
11205     idx = 5;
11206   size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
11207   tree type = build_array_type_nelts (ptr_type_node, total + idx);
11208   tree array = create_tmp_var (type);
11209   TREE_ADDRESSABLE (array) = 1;
11210   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
11211 		   NULL_TREE);
11212   if (idx == 5)
11213     {
11214       g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
11215       gimple_seq_add_stmt (iseq, g);
11216       r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
11217 		  NULL_TREE);
11218     }
11219   g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
11220   gimple_seq_add_stmt (iseq, g);
11221   for (i = 0; i < (idx == 5 ? 3 : 1); i++)
11222     {
11223       r = build4 (ARRAY_REF, ptr_type_node, array,
11224 		  size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
11225       g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
11226       gimple_seq_add_stmt (iseq, g);
11227     }
11228   for (i = 0; i < 4; i++)
11229     {
11230       if (cnt[i] == 0)
11231 	continue;
11232       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
11233 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
11234 	  continue;
11235 	else
11236 	  {
11237 	    switch (OMP_CLAUSE_DEPEND_KIND (c))
11238 	      {
11239 	      case OMP_CLAUSE_DEPEND_IN:
11240 		if (i != 2)
11241 		  continue;
11242 		break;
11243 	      case OMP_CLAUSE_DEPEND_OUT:
11244 	      case OMP_CLAUSE_DEPEND_INOUT:
11245 		if (i != 0)
11246 		  continue;
11247 		break;
11248 	      case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
11249 		if (i != 1)
11250 		  continue;
11251 		break;
11252 	      case OMP_CLAUSE_DEPEND_DEPOBJ:
11253 		if (i != 3)
11254 		  continue;
11255 		break;
11256 	      default:
11257 		gcc_unreachable ();
11258 	      }
11259 	    tree t = OMP_CLAUSE_DECL (c);
11260 	    t = fold_convert (ptr_type_node, t);
11261 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
11262 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
11263 			NULL_TREE, NULL_TREE);
11264 	    g = gimple_build_assign (r, t);
11265 	    gimple_seq_add_stmt (iseq, g);
11266 	  }
11267     }
11268   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
11269   OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
11270   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
11271   OMP_CLAUSE_CHAIN (c) = *pclauses;
11272   *pclauses = c;
11273   tree clobber = build_clobber (type);
11274   g = gimple_build_assign (array, clobber);
11275   gimple_seq_add_stmt (oseq, g);
11276 }
11277 
11278 /* Lower the OpenMP parallel or task directive in the current statement
11279    in GSI_P.  CTX holds context information for the directive.  */
11280 
11281 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)11282 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11283 {
11284   tree clauses;
11285   tree child_fn, t;
11286   gimple *stmt = gsi_stmt (*gsi_p);
11287   gbind *par_bind, *bind, *dep_bind = NULL;
11288   gimple_seq par_body;
11289   location_t loc = gimple_location (stmt);
11290 
11291   clauses = gimple_omp_taskreg_clauses (stmt);
11292   if (gimple_code (stmt) == GIMPLE_OMP_TASK
11293       && gimple_omp_task_taskwait_p (stmt))
11294     {
11295       par_bind = NULL;
11296       par_body = NULL;
11297     }
11298   else
11299     {
11300       par_bind
11301 	= as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
11302       par_body = gimple_bind_body (par_bind);
11303     }
11304   child_fn = ctx->cb.dst_fn;
11305   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11306       && !gimple_omp_parallel_combined_p (stmt))
11307     {
11308       struct walk_stmt_info wi;
11309       int ws_num = 0;
11310 
11311       memset (&wi, 0, sizeof (wi));
11312       wi.info = &ws_num;
11313       wi.val_only = true;
11314       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
11315       if (ws_num == 1)
11316 	gimple_omp_parallel_set_combined_p (stmt, true);
11317     }
11318   gimple_seq dep_ilist = NULL;
11319   gimple_seq dep_olist = NULL;
11320   if (gimple_code (stmt) == GIMPLE_OMP_TASK
11321       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11322     {
11323       push_gimplify_context ();
11324       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11325       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
11326 			    &dep_ilist, &dep_olist);
11327     }
11328 
11329   if (gimple_code (stmt) == GIMPLE_OMP_TASK
11330       && gimple_omp_task_taskwait_p (stmt))
11331     {
11332       if (dep_bind)
11333 	{
11334 	  gsi_replace (gsi_p, dep_bind, true);
11335 	  gimple_bind_add_seq (dep_bind, dep_ilist);
11336 	  gimple_bind_add_stmt (dep_bind, stmt);
11337 	  gimple_bind_add_seq (dep_bind, dep_olist);
11338 	  pop_gimplify_context (dep_bind);
11339 	}
11340       return;
11341     }
11342 
11343   if (ctx->srecord_type)
11344     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
11345 
11346   gimple_seq tskred_ilist = NULL;
11347   gimple_seq tskred_olist = NULL;
11348   if ((is_task_ctx (ctx)
11349        && gimple_omp_task_taskloop_p (ctx->stmt)
11350        && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
11351 			   OMP_CLAUSE_REDUCTION))
11352       || (is_parallel_ctx (ctx)
11353 	  && omp_find_clause (gimple_omp_parallel_clauses (stmt),
11354 			      OMP_CLAUSE__REDUCTEMP_)))
11355     {
11356       if (dep_bind == NULL)
11357 	{
11358 	  push_gimplify_context ();
11359 	  dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11360 	}
11361       lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
11362 							: OMP_PARALLEL,
11363 				 gimple_omp_taskreg_clauses (ctx->stmt),
11364 				 &tskred_ilist, &tskred_olist);
11365     }
11366 
11367   push_gimplify_context ();
11368 
11369   gimple_seq par_olist = NULL;
11370   gimple_seq par_ilist = NULL;
11371   gimple_seq par_rlist = NULL;
11372   bool phony_construct = gimple_code (stmt) == GIMPLE_OMP_PARALLEL
11373     && gimple_omp_parallel_grid_phony (as_a <gomp_parallel *> (stmt));
11374   if (phony_construct && ctx->record_type)
11375     {
11376       gcc_checking_assert (!ctx->receiver_decl);
11377       ctx->receiver_decl = create_tmp_var
11378 	(build_reference_type (ctx->record_type), ".omp_rec");
11379     }
11380   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
11381   lower_omp (&par_body, ctx);
11382   if (gimple_code (stmt) != GIMPLE_OMP_TASK)
11383     lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
11384 
11385   /* Declare all the variables created by mapping and the variables
11386      declared in the scope of the parallel body.  */
11387   record_vars_into (ctx->block_vars, child_fn);
11388   maybe_remove_omp_member_access_dummy_vars (par_bind);
11389   record_vars_into (gimple_bind_vars (par_bind), child_fn);
11390 
11391   if (ctx->record_type)
11392     {
11393       ctx->sender_decl
11394 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
11395 			  : ctx->record_type, ".omp_data_o");
11396       DECL_NAMELESS (ctx->sender_decl) = 1;
11397       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11398       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
11399     }
11400 
11401   gimple_seq olist = NULL;
11402   gimple_seq ilist = NULL;
11403   lower_send_clauses (clauses, &ilist, &olist, ctx);
11404   lower_send_shared_vars (&ilist, &olist, ctx);
11405 
11406   if (ctx->record_type)
11407     {
11408       tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
11409       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
11410 							clobber));
11411     }
11412 
11413   /* Once all the expansions are done, sequence all the different
11414      fragments inside gimple_omp_body.  */
11415 
11416   gimple_seq new_body = NULL;
11417 
11418   if (ctx->record_type)
11419     {
11420       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
11421       /* fixup_child_record_type might have changed receiver_decl's type.  */
11422       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
11423       gimple_seq_add_stmt (&new_body,
11424 	  		   gimple_build_assign (ctx->receiver_decl, t));
11425     }
11426 
11427   gimple_seq_add_seq (&new_body, par_ilist);
11428   gimple_seq_add_seq (&new_body, par_body);
11429   gimple_seq_add_seq (&new_body, par_rlist);
11430   if (ctx->cancellable)
11431     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
11432   gimple_seq_add_seq (&new_body, par_olist);
11433   new_body = maybe_catch_exception (new_body);
11434   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
11435     gimple_seq_add_stmt (&new_body,
11436 			 gimple_build_omp_continue (integer_zero_node,
11437 						    integer_zero_node));
11438   if (!phony_construct)
11439     {
11440       gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
11441       gimple_omp_set_body (stmt, new_body);
11442     }
11443 
11444   if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
11445     bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11446   else
11447     bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
11448   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
11449   gimple_bind_add_seq (bind, ilist);
11450   if (!phony_construct)
11451     gimple_bind_add_stmt (bind, stmt);
11452   else
11453     gimple_bind_add_seq (bind, new_body);
11454   gimple_bind_add_seq (bind, olist);
11455 
11456   pop_gimplify_context (NULL);
11457 
11458   if (dep_bind)
11459     {
11460       gimple_bind_add_seq (dep_bind, dep_ilist);
11461       gimple_bind_add_seq (dep_bind, tskred_ilist);
11462       gimple_bind_add_stmt (dep_bind, bind);
11463       gimple_bind_add_seq (dep_bind, tskred_olist);
11464       gimple_bind_add_seq (dep_bind, dep_olist);
11465       pop_gimplify_context (dep_bind);
11466     }
11467 }
11468 
11469 /* Lower the GIMPLE_OMP_TARGET in the current statement
11470    in GSI_P.  CTX holds context information for the directive.  */
11471 
11472 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)11473 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11474 {
11475   tree clauses;
11476   tree child_fn, t, c;
11477   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
11478   gbind *tgt_bind, *bind, *dep_bind = NULL;
11479   gimple_seq tgt_body, olist, ilist, fplist, new_body;
11480   location_t loc = gimple_location (stmt);
11481   bool offloaded, data_region;
11482   unsigned int map_cnt = 0;
11483 
11484   offloaded = is_gimple_omp_offloaded (stmt);
11485   switch (gimple_omp_target_kind (stmt))
11486     {
11487     case GF_OMP_TARGET_KIND_REGION:
11488     case GF_OMP_TARGET_KIND_UPDATE:
11489     case GF_OMP_TARGET_KIND_ENTER_DATA:
11490     case GF_OMP_TARGET_KIND_EXIT_DATA:
11491     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
11492     case GF_OMP_TARGET_KIND_OACC_KERNELS:
11493     case GF_OMP_TARGET_KIND_OACC_SERIAL:
11494     case GF_OMP_TARGET_KIND_OACC_UPDATE:
11495     case GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA:
11496     case GF_OMP_TARGET_KIND_OACC_DECLARE:
11497       data_region = false;
11498       break;
11499     case GF_OMP_TARGET_KIND_DATA:
11500     case GF_OMP_TARGET_KIND_OACC_DATA:
11501     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
11502       data_region = true;
11503       break;
11504     default:
11505       gcc_unreachable ();
11506     }
11507 
11508   clauses = gimple_omp_target_clauses (stmt);
11509 
11510   gimple_seq dep_ilist = NULL;
11511   gimple_seq dep_olist = NULL;
11512   if (omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
11513     {
11514       push_gimplify_context ();
11515       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
11516       lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
11517 			    &dep_ilist, &dep_olist);
11518     }
11519 
11520   tgt_bind = NULL;
11521   tgt_body = NULL;
11522   if (offloaded)
11523     {
11524       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
11525       tgt_body = gimple_bind_body (tgt_bind);
11526     }
11527   else if (data_region)
11528     tgt_body = gimple_omp_body (stmt);
11529   child_fn = ctx->cb.dst_fn;
11530 
11531   push_gimplify_context ();
11532   fplist = NULL;
11533 
11534   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11535     switch (OMP_CLAUSE_CODE (c))
11536       {
11537 	tree var, x;
11538 
11539       default:
11540 	break;
11541       case OMP_CLAUSE_MAP:
11542 #if CHECKING_P
11543 	/* First check what we're prepared to handle in the following.  */
11544 	switch (OMP_CLAUSE_MAP_KIND (c))
11545 	  {
11546 	  case GOMP_MAP_ALLOC:
11547 	  case GOMP_MAP_TO:
11548 	  case GOMP_MAP_FROM:
11549 	  case GOMP_MAP_TOFROM:
11550 	  case GOMP_MAP_POINTER:
11551 	  case GOMP_MAP_TO_PSET:
11552 	  case GOMP_MAP_DELETE:
11553 	  case GOMP_MAP_RELEASE:
11554 	  case GOMP_MAP_ALWAYS_TO:
11555 	  case GOMP_MAP_ALWAYS_FROM:
11556 	  case GOMP_MAP_ALWAYS_TOFROM:
11557 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
11558 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
11559 	  case GOMP_MAP_STRUCT:
11560 	  case GOMP_MAP_ALWAYS_POINTER:
11561 	    break;
11562 	  case GOMP_MAP_IF_PRESENT:
11563 	  case GOMP_MAP_FORCE_ALLOC:
11564 	  case GOMP_MAP_FORCE_TO:
11565 	  case GOMP_MAP_FORCE_FROM:
11566 	  case GOMP_MAP_FORCE_TOFROM:
11567 	  case GOMP_MAP_FORCE_PRESENT:
11568 	  case GOMP_MAP_FORCE_DEVICEPTR:
11569 	  case GOMP_MAP_DEVICE_RESIDENT:
11570 	  case GOMP_MAP_LINK:
11571 	  case GOMP_MAP_ATTACH:
11572 	  case GOMP_MAP_DETACH:
11573 	  case GOMP_MAP_FORCE_DETACH:
11574 	    gcc_assert (is_gimple_omp_oacc (stmt));
11575 	    break;
11576 	  default:
11577 	    gcc_unreachable ();
11578 	  }
11579 #endif
11580 	  /* FALLTHRU */
11581       case OMP_CLAUSE_TO:
11582       case OMP_CLAUSE_FROM:
11583       oacc_firstprivate:
11584 	var = OMP_CLAUSE_DECL (c);
11585 	if (!DECL_P (var))
11586 	  {
11587 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
11588 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11589 		    && (OMP_CLAUSE_MAP_KIND (c)
11590 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
11591 	      map_cnt++;
11592 	    continue;
11593 	  }
11594 
11595 	if (DECL_SIZE (var)
11596 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
11597 	  {
11598 	    tree var2 = DECL_VALUE_EXPR (var);
11599 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
11600 	    var2 = TREE_OPERAND (var2, 0);
11601 	    gcc_assert (DECL_P (var2));
11602 	    var = var2;
11603 	  }
11604 
11605 	if (offloaded
11606 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11607 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11608 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
11609 	  {
11610 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11611 	      {
11612 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
11613 		    && varpool_node::get_create (var)->offloadable)
11614 		  continue;
11615 
11616 		tree type = build_pointer_type (TREE_TYPE (var));
11617 		tree new_var = lookup_decl (var, ctx);
11618 		x = create_tmp_var_raw (type, get_name (new_var));
11619 		gimple_add_tmp_var (x);
11620 		x = build_simple_mem_ref (x);
11621 		SET_DECL_VALUE_EXPR (new_var, x);
11622 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11623 	      }
11624 	    continue;
11625 	  }
11626 
11627 	if (!maybe_lookup_field (var, ctx))
11628 	  continue;
11629 
11630 	/* Don't remap compute constructs' reduction variables, because the
11631 	   intermediate result must be local to each gang.  */
11632 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11633 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
11634 	  {
11635 	    x = build_receiver_ref (var, true, ctx);
11636 	    tree new_var = lookup_decl (var, ctx);
11637 
11638 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11639 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11640 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11641 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11642 	      x = build_simple_mem_ref (x);
11643 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11644 	      {
11645 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11646 		if (omp_is_reference (new_var)
11647 		    && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
11648 		        || DECL_BY_REFERENCE (var)))
11649 		  {
11650 		    /* Create a local object to hold the instance
11651 		       value.  */
11652 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
11653 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
11654 		    tree inst = create_tmp_var (type, id);
11655 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
11656 		    x = build_fold_addr_expr (inst);
11657 		  }
11658 		gimplify_assign (new_var, x, &fplist);
11659 	      }
11660 	    else if (DECL_P (new_var))
11661 	      {
11662 		SET_DECL_VALUE_EXPR (new_var, x);
11663 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11664 	      }
11665 	    else
11666 	      gcc_unreachable ();
11667 	  }
11668 	map_cnt++;
11669 	break;
11670 
11671       case OMP_CLAUSE_FIRSTPRIVATE:
11672 	gcc_checking_assert (offloaded);
11673 	if (is_gimple_omp_oacc (ctx->stmt))
11674 	  {
11675 	    /* No 'firstprivate' clauses on OpenACC 'kernels'.  */
11676 	    gcc_checking_assert (!is_oacc_kernels (ctx));
11677 
11678 	    goto oacc_firstprivate;
11679 	  }
11680 	map_cnt++;
11681 	var = OMP_CLAUSE_DECL (c);
11682 	if (!omp_is_reference (var)
11683 	    && !is_gimple_reg_type (TREE_TYPE (var)))
11684 	  {
11685 	    tree new_var = lookup_decl (var, ctx);
11686 	    if (is_variable_sized (var))
11687 	      {
11688 		tree pvar = DECL_VALUE_EXPR (var);
11689 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11690 		pvar = TREE_OPERAND (pvar, 0);
11691 		gcc_assert (DECL_P (pvar));
11692 		tree new_pvar = lookup_decl (pvar, ctx);
11693 		x = build_fold_indirect_ref (new_pvar);
11694 		TREE_THIS_NOTRAP (x) = 1;
11695 	      }
11696 	    else
11697 	      x = build_receiver_ref (var, true, ctx);
11698 	    SET_DECL_VALUE_EXPR (new_var, x);
11699 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11700 	  }
11701 	break;
11702 
11703       case OMP_CLAUSE_PRIVATE:
11704 	gcc_checking_assert (offloaded);
11705 	if (is_gimple_omp_oacc (ctx->stmt))
11706 	  {
11707 	    /* No 'private' clauses on OpenACC 'kernels'.  */
11708 	    gcc_checking_assert (!is_oacc_kernels (ctx));
11709 
11710 	    break;
11711 	  }
11712 	var = OMP_CLAUSE_DECL (c);
11713 	if (is_variable_sized (var))
11714 	  {
11715 	    tree new_var = lookup_decl (var, ctx);
11716 	    tree pvar = DECL_VALUE_EXPR (var);
11717 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11718 	    pvar = TREE_OPERAND (pvar, 0);
11719 	    gcc_assert (DECL_P (pvar));
11720 	    tree new_pvar = lookup_decl (pvar, ctx);
11721 	    x = build_fold_indirect_ref (new_pvar);
11722 	    TREE_THIS_NOTRAP (x) = 1;
11723 	    SET_DECL_VALUE_EXPR (new_var, x);
11724 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11725 	  }
11726 	break;
11727 
11728       case OMP_CLAUSE_USE_DEVICE_PTR:
11729       case OMP_CLAUSE_USE_DEVICE_ADDR:
11730       case OMP_CLAUSE_IS_DEVICE_PTR:
11731 	var = OMP_CLAUSE_DECL (c);
11732 	map_cnt++;
11733 	if (is_variable_sized (var))
11734 	  {
11735 	    tree new_var = lookup_decl (var, ctx);
11736 	    tree pvar = DECL_VALUE_EXPR (var);
11737 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
11738 	    pvar = TREE_OPERAND (pvar, 0);
11739 	    gcc_assert (DECL_P (pvar));
11740 	    tree new_pvar = lookup_decl (pvar, ctx);
11741 	    x = build_fold_indirect_ref (new_pvar);
11742 	    TREE_THIS_NOTRAP (x) = 1;
11743 	    SET_DECL_VALUE_EXPR (new_var, x);
11744 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11745 	  }
11746 	else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
11747 		  && !omp_is_reference (var)
11748 		  && !omp_is_allocatable_or_ptr (var)
11749 		  && !lang_hooks.decls.omp_array_data (var, true))
11750 		 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
11751 	  {
11752 	    tree new_var = lookup_decl (var, ctx);
11753 	    tree type = build_pointer_type (TREE_TYPE (var));
11754 	    x = create_tmp_var_raw (type, get_name (new_var));
11755 	    gimple_add_tmp_var (x);
11756 	    x = build_simple_mem_ref (x);
11757 	    SET_DECL_VALUE_EXPR (new_var, x);
11758 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11759 	  }
11760 	else
11761 	  {
11762 	    tree new_var = lookup_decl (var, ctx);
11763 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
11764 	    gimple_add_tmp_var (x);
11765 	    SET_DECL_VALUE_EXPR (new_var, x);
11766 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
11767 	  }
11768 	break;
11769       }
11770 
11771   if (offloaded)
11772     {
11773       target_nesting_level++;
11774       lower_omp (&tgt_body, ctx);
11775       target_nesting_level--;
11776     }
11777   else if (data_region)
11778     lower_omp (&tgt_body, ctx);
11779 
11780   if (offloaded)
11781     {
11782       /* Declare all the variables created by mapping and the variables
11783 	 declared in the scope of the target body.  */
11784       record_vars_into (ctx->block_vars, child_fn);
11785       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
11786       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
11787     }
11788 
11789   olist = NULL;
11790   ilist = NULL;
11791   if (ctx->record_type)
11792     {
11793       ctx->sender_decl
11794 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
11795       DECL_NAMELESS (ctx->sender_decl) = 1;
11796       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
11797       t = make_tree_vec (3);
11798       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
11799       TREE_VEC_ELT (t, 1)
11800 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
11801 			  ".omp_data_sizes");
11802       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
11803       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
11804       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
11805       tree tkind_type = short_unsigned_type_node;
11806       int talign_shift = 8;
11807       TREE_VEC_ELT (t, 2)
11808 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
11809 			  ".omp_data_kinds");
11810       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
11811       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
11812       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
11813       gimple_omp_target_set_data_arg (stmt, t);
11814 
11815       vec<constructor_elt, va_gc> *vsize;
11816       vec<constructor_elt, va_gc> *vkind;
11817       vec_alloc (vsize, map_cnt);
11818       vec_alloc (vkind, map_cnt);
11819       unsigned int map_idx = 0;
11820 
11821       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
11822 	switch (OMP_CLAUSE_CODE (c))
11823 	  {
11824 	    tree ovar, nc, s, purpose, var, x, type;
11825 	    unsigned int talign;
11826 
11827 	  default:
11828 	    break;
11829 
11830 	  case OMP_CLAUSE_MAP:
11831 	  case OMP_CLAUSE_TO:
11832 	  case OMP_CLAUSE_FROM:
11833 	  oacc_firstprivate_map:
11834 	    nc = c;
11835 	    ovar = OMP_CLAUSE_DECL (c);
11836 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11837 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
11838 		    || (OMP_CLAUSE_MAP_KIND (c)
11839 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
11840 	      break;
11841 	    if (!DECL_P (ovar))
11842 	      {
11843 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11844 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
11845 		  {
11846 		    gcc_checking_assert (OMP_CLAUSE_DECL (OMP_CLAUSE_CHAIN (c))
11847 					 == get_base_address (ovar));
11848 		    nc = OMP_CLAUSE_CHAIN (c);
11849 		    ovar = OMP_CLAUSE_DECL (nc);
11850 		  }
11851 		else
11852 		  {
11853 		    tree x = build_sender_ref (ovar, ctx);
11854 		    tree v
11855 		      = build_fold_addr_expr_with_type (ovar, ptr_type_node);
11856 		    gimplify_assign (x, v, &ilist);
11857 		    nc = NULL_TREE;
11858 		  }
11859 	      }
11860 	    else
11861 	      {
11862 		if (DECL_SIZE (ovar)
11863 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
11864 		  {
11865 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
11866 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
11867 		    ovar2 = TREE_OPERAND (ovar2, 0);
11868 		    gcc_assert (DECL_P (ovar2));
11869 		    ovar = ovar2;
11870 		  }
11871 		if (!maybe_lookup_field (ovar, ctx))
11872 		  continue;
11873 	      }
11874 
11875 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
11876 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
11877 	      talign = DECL_ALIGN_UNIT (ovar);
11878 	    if (nc)
11879 	      {
11880 		var = lookup_decl_in_outer_ctx (ovar, ctx);
11881 		x = build_sender_ref (ovar, ctx);
11882 
11883 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
11884 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
11885 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
11886 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
11887 		  {
11888 		    gcc_assert (offloaded);
11889 		    tree avar
11890 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
11891 		    mark_addressable (avar);
11892 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
11893 		    talign = DECL_ALIGN_UNIT (avar);
11894 		    avar = build_fold_addr_expr (avar);
11895 		    gimplify_assign (x, avar, &ilist);
11896 		  }
11897 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11898 		  {
11899 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
11900 		    if (!omp_is_reference (var))
11901 		      {
11902 			if (is_gimple_reg (var)
11903 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
11904 			  TREE_NO_WARNING (var) = 1;
11905 			var = build_fold_addr_expr (var);
11906 		      }
11907 		    else
11908 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
11909 		    gimplify_assign (x, var, &ilist);
11910 		  }
11911 		else if (is_gimple_reg (var))
11912 		  {
11913 		    gcc_assert (offloaded);
11914 		    tree avar = create_tmp_var (TREE_TYPE (var));
11915 		    mark_addressable (avar);
11916 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
11917 		    if (GOMP_MAP_COPY_TO_P (map_kind)
11918 			|| map_kind == GOMP_MAP_POINTER
11919 			|| map_kind == GOMP_MAP_TO_PSET
11920 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11921 		      {
11922 			/* If we need to initialize a temporary
11923 			   with VAR because it is not addressable, and
11924 			   the variable hasn't been initialized yet, then
11925 			   we'll get a warning for the store to avar.
11926 			   Don't warn in that case, the mapping might
11927 			   be implicit.  */
11928 			TREE_NO_WARNING (var) = 1;
11929 			gimplify_assign (avar, var, &ilist);
11930 		      }
11931 		    avar = build_fold_addr_expr (avar);
11932 		    gimplify_assign (x, avar, &ilist);
11933 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
11934 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
11935 			&& !TYPE_READONLY (TREE_TYPE (var)))
11936 		      {
11937 			x = unshare_expr (x);
11938 			x = build_simple_mem_ref (x);
11939 			gimplify_assign (var, x, &olist);
11940 		      }
11941 		  }
11942 		else
11943 		  {
11944 		    /* While MAP is handled explicitly by the FE,
11945 		       for 'target update', only the identified is passed.  */
11946 		    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
11947 			 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
11948 			&& (omp_is_allocatable_or_ptr (var)
11949 			    && omp_check_optional_argument (var, false)))
11950 		      var = build_fold_indirect_ref (var);
11951 		    else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
11952 			      && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
11953 			     || (!omp_is_allocatable_or_ptr (var)
11954 				 && !omp_check_optional_argument (var, false)))
11955 		      var = build_fold_addr_expr (var);
11956 		    gimplify_assign (x, var, &ilist);
11957 		  }
11958 	      }
11959 	    s = NULL_TREE;
11960 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11961 	      {
11962 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
11963 		s = TREE_TYPE (ovar);
11964 		if (TREE_CODE (s) == REFERENCE_TYPE
11965 		    || omp_check_optional_argument (ovar, false))
11966 		  s = TREE_TYPE (s);
11967 		s = TYPE_SIZE_UNIT (s);
11968 	      }
11969 	    else
11970 	      s = OMP_CLAUSE_SIZE (c);
11971 	    if (s == NULL_TREE)
11972 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
11973 	    s = fold_convert (size_type_node, s);
11974 	    purpose = size_int (map_idx++);
11975 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
11976 	    if (TREE_CODE (s) != INTEGER_CST)
11977 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
11978 
11979 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
11980 	    switch (OMP_CLAUSE_CODE (c))
11981 	      {
11982 	      case OMP_CLAUSE_MAP:
11983 		tkind = OMP_CLAUSE_MAP_KIND (c);
11984 		tkind_zero = tkind;
11985 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
11986 		  switch (tkind)
11987 		    {
11988 		    case GOMP_MAP_ALLOC:
11989 		    case GOMP_MAP_IF_PRESENT:
11990 		    case GOMP_MAP_TO:
11991 		    case GOMP_MAP_FROM:
11992 		    case GOMP_MAP_TOFROM:
11993 		    case GOMP_MAP_ALWAYS_TO:
11994 		    case GOMP_MAP_ALWAYS_FROM:
11995 		    case GOMP_MAP_ALWAYS_TOFROM:
11996 		    case GOMP_MAP_RELEASE:
11997 		    case GOMP_MAP_FORCE_TO:
11998 		    case GOMP_MAP_FORCE_FROM:
11999 		    case GOMP_MAP_FORCE_TOFROM:
12000 		    case GOMP_MAP_FORCE_PRESENT:
12001 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
12002 		      break;
12003 		    case GOMP_MAP_DELETE:
12004 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
12005 		    default:
12006 		      break;
12007 		    }
12008 		if (tkind_zero != tkind)
12009 		  {
12010 		    if (integer_zerop (s))
12011 		      tkind = tkind_zero;
12012 		    else if (integer_nonzerop (s))
12013 		      tkind_zero = tkind;
12014 		  }
12015 		break;
12016 	      case OMP_CLAUSE_FIRSTPRIVATE:
12017 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
12018 		tkind = GOMP_MAP_TO;
12019 		tkind_zero = tkind;
12020 		break;
12021 	      case OMP_CLAUSE_TO:
12022 		tkind = GOMP_MAP_TO;
12023 		tkind_zero = tkind;
12024 		break;
12025 	      case OMP_CLAUSE_FROM:
12026 		tkind = GOMP_MAP_FROM;
12027 		tkind_zero = tkind;
12028 		break;
12029 	      default:
12030 		gcc_unreachable ();
12031 	      }
12032 	    gcc_checking_assert (tkind
12033 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
12034 	    gcc_checking_assert (tkind_zero
12035 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
12036 	    talign = ceil_log2 (talign);
12037 	    tkind |= talign << talign_shift;
12038 	    tkind_zero |= talign << talign_shift;
12039 	    gcc_checking_assert (tkind
12040 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12041 	    gcc_checking_assert (tkind_zero
12042 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12043 	    if (tkind == tkind_zero)
12044 	      x = build_int_cstu (tkind_type, tkind);
12045 	    else
12046 	      {
12047 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
12048 		x = build3 (COND_EXPR, tkind_type,
12049 			    fold_build2 (EQ_EXPR, boolean_type_node,
12050 					 unshare_expr (s), size_zero_node),
12051 			    build_int_cstu (tkind_type, tkind_zero),
12052 			    build_int_cstu (tkind_type, tkind));
12053 	      }
12054 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
12055 	    if (nc && nc != c)
12056 	      c = nc;
12057 	    break;
12058 
12059 	  case OMP_CLAUSE_FIRSTPRIVATE:
12060 	    if (is_gimple_omp_oacc (ctx->stmt))
12061 	      goto oacc_firstprivate_map;
12062 	    ovar = OMP_CLAUSE_DECL (c);
12063 	    if (omp_is_reference (ovar))
12064 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12065 	    else
12066 	      talign = DECL_ALIGN_UNIT (ovar);
12067 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
12068 	    x = build_sender_ref (ovar, ctx);
12069 	    tkind = GOMP_MAP_FIRSTPRIVATE;
12070 	    type = TREE_TYPE (ovar);
12071 	    if (omp_is_reference (ovar))
12072 	      type = TREE_TYPE (type);
12073 	    if ((INTEGRAL_TYPE_P (type)
12074 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
12075 		|| TREE_CODE (type) == POINTER_TYPE)
12076 	      {
12077 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12078 		tree t = var;
12079 		if (omp_is_reference (var))
12080 		  t = build_simple_mem_ref (var);
12081 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12082 		  TREE_NO_WARNING (var) = 1;
12083 		if (TREE_CODE (type) != POINTER_TYPE)
12084 		  t = fold_convert (pointer_sized_int_node, t);
12085 		t = fold_convert (TREE_TYPE (x), t);
12086 		gimplify_assign (x, t, &ilist);
12087 	      }
12088 	    else if (omp_is_reference (var))
12089 	      gimplify_assign (x, var, &ilist);
12090 	    else if (is_gimple_reg (var))
12091 	      {
12092 		tree avar = create_tmp_var (TREE_TYPE (var));
12093 		mark_addressable (avar);
12094 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
12095 		  TREE_NO_WARNING (var) = 1;
12096 		gimplify_assign (avar, var, &ilist);
12097 		avar = build_fold_addr_expr (avar);
12098 		gimplify_assign (x, avar, &ilist);
12099 	      }
12100 	    else
12101 	      {
12102 		var = build_fold_addr_expr (var);
12103 		gimplify_assign (x, var, &ilist);
12104 	      }
12105 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
12106 	      s = size_int (0);
12107 	    else if (omp_is_reference (ovar))
12108 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
12109 	    else
12110 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
12111 	    s = fold_convert (size_type_node, s);
12112 	    purpose = size_int (map_idx++);
12113 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12114 	    if (TREE_CODE (s) != INTEGER_CST)
12115 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
12116 
12117 	    gcc_checking_assert (tkind
12118 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
12119 	    talign = ceil_log2 (talign);
12120 	    tkind |= talign << talign_shift;
12121 	    gcc_checking_assert (tkind
12122 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12123 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12124 				    build_int_cstu (tkind_type, tkind));
12125 	    break;
12126 
12127 	  case OMP_CLAUSE_USE_DEVICE_PTR:
12128 	  case OMP_CLAUSE_USE_DEVICE_ADDR:
12129 	  case OMP_CLAUSE_IS_DEVICE_PTR:
12130 	    ovar = OMP_CLAUSE_DECL (c);
12131 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
12132 
12133 	    if (lang_hooks.decls.omp_array_data (ovar, true))
12134 	      {
12135 		tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
12136 			 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
12137 		x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
12138 	      }
12139 	    else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12140 	      {
12141 		tkind = GOMP_MAP_USE_DEVICE_PTR;
12142 		x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
12143 	      }
12144 	    else
12145 	      {
12146 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
12147 		x = build_sender_ref (ovar, ctx);
12148 	      }
12149 
12150 	    if (is_gimple_omp_oacc (ctx->stmt))
12151 	      {
12152 		gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
12153 
12154 		if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
12155 		  tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
12156 	      }
12157 
12158 	    type = TREE_TYPE (ovar);
12159 	    if (lang_hooks.decls.omp_array_data (ovar, true))
12160 	      var = lang_hooks.decls.omp_array_data (ovar, false);
12161 	    else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12162 		      && !omp_is_reference (ovar)
12163 		      && !omp_is_allocatable_or_ptr (ovar))
12164 		     || TREE_CODE (type) == ARRAY_TYPE)
12165 	      var = build_fold_addr_expr (var);
12166 	    else
12167 	      {
12168 		if (omp_is_reference (ovar)
12169 		    || omp_check_optional_argument (ovar, false)
12170 		    || omp_is_allocatable_or_ptr (ovar))
12171 		  {
12172 		    type = TREE_TYPE (type);
12173 		    if (TREE_CODE (type) != ARRAY_TYPE
12174 			&& ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12175 			    && !omp_is_allocatable_or_ptr (ovar))
12176 			   || (omp_is_reference (ovar)
12177 			       && omp_is_allocatable_or_ptr (ovar))))
12178 		      var = build_simple_mem_ref (var);
12179 		    var = fold_convert (TREE_TYPE (x), var);
12180 		  }
12181 	      }
12182 	    tree present;
12183 	    present = omp_check_optional_argument (ovar, true);
12184 	    if (present)
12185 	      {
12186 		tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12187 		tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12188 		tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12189 		tree new_x = unshare_expr (x);
12190 		gimplify_expr (&present, &ilist, NULL, is_gimple_val,
12191 			       fb_rvalue);
12192 		gcond *cond = gimple_build_cond_from_tree (present,
12193 							   notnull_label,
12194 							   null_label);
12195 		gimple_seq_add_stmt (&ilist, cond);
12196 		gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
12197 		gimplify_assign (new_x, null_pointer_node, &ilist);
12198 		gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
12199 		gimple_seq_add_stmt (&ilist,
12200 				     gimple_build_label (notnull_label));
12201 		gimplify_assign (x, var, &ilist);
12202 		gimple_seq_add_stmt (&ilist,
12203 				     gimple_build_label (opt_arg_label));
12204 	      }
12205 	    else
12206 	      gimplify_assign (x, var, &ilist);
12207 	    s = size_int (0);
12208 	    purpose = size_int (map_idx++);
12209 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
12210 	    gcc_checking_assert (tkind
12211 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
12212 	    gcc_checking_assert (tkind
12213 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
12214 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
12215 				    build_int_cstu (tkind_type, tkind));
12216 	    break;
12217 	  }
12218 
12219       gcc_assert (map_idx == map_cnt);
12220 
12221       DECL_INITIAL (TREE_VEC_ELT (t, 1))
12222 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
12223       DECL_INITIAL (TREE_VEC_ELT (t, 2))
12224 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
12225       for (int i = 1; i <= 2; i++)
12226 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
12227 	  {
12228 	    gimple_seq initlist = NULL;
12229 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
12230 					  TREE_VEC_ELT (t, i)),
12231 				  &initlist, true, NULL_TREE);
12232 	    gimple_seq_add_seq (&ilist, initlist);
12233 
12234 	    tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
12235 	    gimple_seq_add_stmt (&olist,
12236 				 gimple_build_assign (TREE_VEC_ELT (t, i),
12237 						      clobber));
12238 	  }
12239 
12240       tree clobber = build_clobber (ctx->record_type);
12241       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12242 							clobber));
12243     }
12244 
12245   /* Once all the expansions are done, sequence all the different
12246      fragments inside gimple_omp_body.  */
12247 
12248   new_body = NULL;
12249 
12250   if (offloaded
12251       && ctx->record_type)
12252     {
12253       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12254       /* fixup_child_record_type might have changed receiver_decl's type.  */
12255       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12256       gimple_seq_add_stmt (&new_body,
12257 	  		   gimple_build_assign (ctx->receiver_decl, t));
12258     }
12259   gimple_seq_add_seq (&new_body, fplist);
12260 
12261   if (offloaded || data_region)
12262     {
12263       tree prev = NULL_TREE;
12264       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12265 	switch (OMP_CLAUSE_CODE (c))
12266 	  {
12267 	    tree var, x;
12268 	  default:
12269 	    break;
12270 	  case OMP_CLAUSE_FIRSTPRIVATE:
12271 	    if (is_gimple_omp_oacc (ctx->stmt))
12272 	      break;
12273 	    var = OMP_CLAUSE_DECL (c);
12274 	    if (omp_is_reference (var)
12275 		|| is_gimple_reg_type (TREE_TYPE (var)))
12276 	      {
12277 		tree new_var = lookup_decl (var, ctx);
12278 		tree type;
12279 		type = TREE_TYPE (var);
12280 		if (omp_is_reference (var))
12281 		  type = TREE_TYPE (type);
12282 		if ((INTEGRAL_TYPE_P (type)
12283 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
12284 		    || TREE_CODE (type) == POINTER_TYPE)
12285 		  {
12286 		    x = build_receiver_ref (var, false, ctx);
12287 		    if (TREE_CODE (type) != POINTER_TYPE)
12288 		      x = fold_convert (pointer_sized_int_node, x);
12289 		    x = fold_convert (type, x);
12290 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12291 				   fb_rvalue);
12292 		    if (omp_is_reference (var))
12293 		      {
12294 			tree v = create_tmp_var_raw (type, get_name (var));
12295 			gimple_add_tmp_var (v);
12296 			TREE_ADDRESSABLE (v) = 1;
12297 			gimple_seq_add_stmt (&new_body,
12298 					     gimple_build_assign (v, x));
12299 			x = build_fold_addr_expr (v);
12300 		      }
12301 		    gimple_seq_add_stmt (&new_body,
12302 					 gimple_build_assign (new_var, x));
12303 		  }
12304 		else
12305 		  {
12306 		    x = build_receiver_ref (var, !omp_is_reference (var), ctx);
12307 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12308 				   fb_rvalue);
12309 		    gimple_seq_add_stmt (&new_body,
12310 					 gimple_build_assign (new_var, x));
12311 		  }
12312 	      }
12313 	    else if (is_variable_sized (var))
12314 	      {
12315 		tree pvar = DECL_VALUE_EXPR (var);
12316 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12317 		pvar = TREE_OPERAND (pvar, 0);
12318 		gcc_assert (DECL_P (pvar));
12319 		tree new_var = lookup_decl (pvar, ctx);
12320 		x = build_receiver_ref (var, false, ctx);
12321 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12322 		gimple_seq_add_stmt (&new_body,
12323 				     gimple_build_assign (new_var, x));
12324 	      }
12325 	    break;
12326 	  case OMP_CLAUSE_PRIVATE:
12327 	    if (is_gimple_omp_oacc (ctx->stmt))
12328 	      break;
12329 	    var = OMP_CLAUSE_DECL (c);
12330 	    if (omp_is_reference (var))
12331 	      {
12332 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12333 		tree new_var = lookup_decl (var, ctx);
12334 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12335 		if (TREE_CONSTANT (x))
12336 		  {
12337 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
12338 					    get_name (var));
12339 		    gimple_add_tmp_var (x);
12340 		    TREE_ADDRESSABLE (x) = 1;
12341 		    x = build_fold_addr_expr_loc (clause_loc, x);
12342 		  }
12343 		else
12344 		  break;
12345 
12346 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12347 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12348 		gimple_seq_add_stmt (&new_body,
12349 				     gimple_build_assign (new_var, x));
12350 	      }
12351 	    break;
12352 	  case OMP_CLAUSE_USE_DEVICE_PTR:
12353 	  case OMP_CLAUSE_USE_DEVICE_ADDR:
12354 	  case OMP_CLAUSE_IS_DEVICE_PTR:
12355 	    tree new_var;
12356 	    gimple_seq assign_body;
12357 	    bool is_array_data;
12358 	    bool do_optional_check;
12359 	    assign_body = NULL;
12360 	    do_optional_check = false;
12361 	    var = OMP_CLAUSE_DECL (c);
12362 	    is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
12363 
12364 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
12365 	      x = build_sender_ref (is_array_data
12366 				    ? (splay_tree_key) &DECL_NAME (var)
12367 				    : (splay_tree_key) &DECL_UID (var), ctx);
12368 	    else
12369 	      x = build_receiver_ref (var, false, ctx);
12370 
12371 	    if (is_array_data)
12372 	      {
12373 		bool is_ref = omp_is_reference (var);
12374 		do_optional_check = true;
12375 		/* First, we copy the descriptor data from the host; then
12376 		   we update its data to point to the target address.  */
12377 		new_var = lookup_decl (var, ctx);
12378 		new_var = DECL_VALUE_EXPR (new_var);
12379 		tree v = new_var;
12380 
12381 		if (is_ref)
12382 		  {
12383 		    var = build_fold_indirect_ref (var);
12384 		    gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
12385 				   fb_rvalue);
12386 		    v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
12387 		    gimple_add_tmp_var (v);
12388 		    TREE_ADDRESSABLE (v) = 1;
12389 		    gimple_seq_add_stmt (&assign_body,
12390 					 gimple_build_assign (v, var));
12391 		    tree rhs = build_fold_addr_expr (v);
12392 		    gimple_seq_add_stmt (&assign_body,
12393 					 gimple_build_assign (new_var, rhs));
12394 		  }
12395 		else
12396 		  gimple_seq_add_stmt (&assign_body,
12397 				       gimple_build_assign (new_var, var));
12398 
12399 		tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
12400 		gcc_assert (v2);
12401 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12402 		gimple_seq_add_stmt (&assign_body,
12403 				     gimple_build_assign (v2, x));
12404 	      }
12405 	    else if (is_variable_sized (var))
12406 	      {
12407 		tree pvar = DECL_VALUE_EXPR (var);
12408 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12409 		pvar = TREE_OPERAND (pvar, 0);
12410 		gcc_assert (DECL_P (pvar));
12411 		new_var = lookup_decl (pvar, ctx);
12412 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12413 		gimple_seq_add_stmt (&assign_body,
12414 				     gimple_build_assign (new_var, x));
12415 	      }
12416 	    else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12417 		      && !omp_is_reference (var)
12418 		      && !omp_is_allocatable_or_ptr (var))
12419 		     || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12420 	      {
12421 		new_var = lookup_decl (var, ctx);
12422 		new_var = DECL_VALUE_EXPR (new_var);
12423 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
12424 		new_var = TREE_OPERAND (new_var, 0);
12425 		gcc_assert (DECL_P (new_var));
12426 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12427 		gimple_seq_add_stmt (&assign_body,
12428 				     gimple_build_assign (new_var, x));
12429 	      }
12430 	    else
12431 	      {
12432 		tree type = TREE_TYPE (var);
12433 		new_var = lookup_decl (var, ctx);
12434 		if (omp_is_reference (var))
12435 		  {
12436 		    type = TREE_TYPE (type);
12437 		    if (TREE_CODE (type) != ARRAY_TYPE
12438 			&& (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
12439 			    || (omp_is_reference (var)
12440 				&& omp_is_allocatable_or_ptr (var))))
12441 		      {
12442 			tree v = create_tmp_var_raw (type, get_name (var));
12443 			gimple_add_tmp_var (v);
12444 			TREE_ADDRESSABLE (v) = 1;
12445 			x = fold_convert (type, x);
12446 			gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
12447 				       fb_rvalue);
12448 			gimple_seq_add_stmt (&assign_body,
12449 					     gimple_build_assign (v, x));
12450 			x = build_fold_addr_expr (v);
12451 			do_optional_check = true;
12452 		      }
12453 		  }
12454 		new_var = DECL_VALUE_EXPR (new_var);
12455 		x = fold_convert (TREE_TYPE (new_var), x);
12456 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
12457 		gimple_seq_add_stmt (&assign_body,
12458 				     gimple_build_assign (new_var, x));
12459 	      }
12460 	    tree present;
12461 	    present = (do_optional_check
12462 		       ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
12463 		       : NULL_TREE);
12464 	    if (present)
12465 	      {
12466 		tree null_label = create_artificial_label (UNKNOWN_LOCATION);
12467 		tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
12468 		tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
12469 		glabel *null_glabel = gimple_build_label (null_label);
12470 		glabel *notnull_glabel = gimple_build_label (notnull_label);
12471 		ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
12472 		gimplify_expr (&x, &new_body, NULL, is_gimple_val,
12473 					   fb_rvalue);
12474 		gimplify_expr (&present, &new_body, NULL, is_gimple_val,
12475 			       fb_rvalue);
12476 		gcond *cond = gimple_build_cond_from_tree (present,
12477 							   notnull_label,
12478 							   null_label);
12479 		gimple_seq_add_stmt (&new_body, cond);
12480 		gimple_seq_add_stmt (&new_body, null_glabel);
12481 		gimplify_assign (new_var, null_pointer_node, &new_body);
12482 		gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
12483 		gimple_seq_add_stmt (&new_body, notnull_glabel);
12484 		gimple_seq_add_seq (&new_body, assign_body);
12485 		gimple_seq_add_stmt (&new_body,
12486 				     gimple_build_label (opt_arg_label));
12487 	      }
12488 	    else
12489 	      gimple_seq_add_seq (&new_body, assign_body);
12490 	    break;
12491 	  }
12492       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
12493 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
12494 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
12495 	 or references to VLAs.  */
12496       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12497 	switch (OMP_CLAUSE_CODE (c))
12498 	  {
12499 	    tree var;
12500 	  default:
12501 	    break;
12502 	  case OMP_CLAUSE_MAP:
12503 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12504 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12505 	      {
12506 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12507 		poly_int64 offset = 0;
12508 		gcc_assert (prev);
12509 		var = OMP_CLAUSE_DECL (c);
12510 		if (DECL_P (var)
12511 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
12512 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
12513 								      ctx))
12514 		    && varpool_node::get_create (var)->offloadable)
12515 		  break;
12516 		if (TREE_CODE (var) == INDIRECT_REF
12517 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
12518 		  var = TREE_OPERAND (var, 0);
12519 		if (TREE_CODE (var) == COMPONENT_REF)
12520 		  {
12521 		    var = get_addr_base_and_unit_offset (var, &offset);
12522 		    gcc_assert (var != NULL_TREE && DECL_P (var));
12523 		  }
12524 		else if (DECL_SIZE (var)
12525 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12526 		  {
12527 		    tree var2 = DECL_VALUE_EXPR (var);
12528 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12529 		    var2 = TREE_OPERAND (var2, 0);
12530 		    gcc_assert (DECL_P (var2));
12531 		    var = var2;
12532 		  }
12533 		tree new_var = lookup_decl (var, ctx), x;
12534 		tree type = TREE_TYPE (new_var);
12535 		bool is_ref;
12536 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
12537 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
12538 			== COMPONENT_REF))
12539 		  {
12540 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
12541 		    is_ref = true;
12542 		    new_var = build2 (MEM_REF, type,
12543 				      build_fold_addr_expr (new_var),
12544 				      build_int_cst (build_pointer_type (type),
12545 						     offset));
12546 		  }
12547 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
12548 		  {
12549 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
12550 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
12551 		    new_var = build2 (MEM_REF, type,
12552 				      build_fold_addr_expr (new_var),
12553 				      build_int_cst (build_pointer_type (type),
12554 						     offset));
12555 		  }
12556 		else
12557 		  is_ref = omp_is_reference (var);
12558 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
12559 		  is_ref = false;
12560 		bool ref_to_array = false;
12561 		if (is_ref)
12562 		  {
12563 		    type = TREE_TYPE (type);
12564 		    if (TREE_CODE (type) == ARRAY_TYPE)
12565 		      {
12566 			type = build_pointer_type (type);
12567 			ref_to_array = true;
12568 		      }
12569 		  }
12570 		else if (TREE_CODE (type) == ARRAY_TYPE)
12571 		  {
12572 		    tree decl2 = DECL_VALUE_EXPR (new_var);
12573 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
12574 		    decl2 = TREE_OPERAND (decl2, 0);
12575 		    gcc_assert (DECL_P (decl2));
12576 		    new_var = decl2;
12577 		    type = TREE_TYPE (new_var);
12578 		  }
12579 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
12580 		x = fold_convert_loc (clause_loc, type, x);
12581 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
12582 		  {
12583 		    tree bias = OMP_CLAUSE_SIZE (c);
12584 		    if (DECL_P (bias))
12585 		      bias = lookup_decl (bias, ctx);
12586 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
12587 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
12588 					    bias);
12589 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
12590 					 TREE_TYPE (x), x, bias);
12591 		  }
12592 		if (ref_to_array)
12593 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12594 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12595 		if (is_ref && !ref_to_array)
12596 		  {
12597 		    tree t = create_tmp_var_raw (type, get_name (var));
12598 		    gimple_add_tmp_var (t);
12599 		    TREE_ADDRESSABLE (t) = 1;
12600 		    gimple_seq_add_stmt (&new_body,
12601 					 gimple_build_assign (t, x));
12602 		    x = build_fold_addr_expr_loc (clause_loc, t);
12603 		  }
12604 		gimple_seq_add_stmt (&new_body,
12605 				     gimple_build_assign (new_var, x));
12606 		prev = NULL_TREE;
12607 	      }
12608 	    else if (OMP_CLAUSE_CHAIN (c)
12609 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
12610 			== OMP_CLAUSE_MAP
12611 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12612 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
12613 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
12614 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12615 	      prev = c;
12616 	    break;
12617 	  case OMP_CLAUSE_PRIVATE:
12618 	    var = OMP_CLAUSE_DECL (c);
12619 	    if (is_variable_sized (var))
12620 	      {
12621 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12622 		tree new_var = lookup_decl (var, ctx);
12623 		tree pvar = DECL_VALUE_EXPR (var);
12624 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12625 		pvar = TREE_OPERAND (pvar, 0);
12626 		gcc_assert (DECL_P (pvar));
12627 		tree new_pvar = lookup_decl (pvar, ctx);
12628 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12629 		tree al = size_int (DECL_ALIGN (var));
12630 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
12631 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12632 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
12633 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12634 		gimple_seq_add_stmt (&new_body,
12635 				     gimple_build_assign (new_pvar, x));
12636 	      }
12637 	    else if (omp_is_reference (var) && !is_gimple_omp_oacc (ctx->stmt))
12638 	      {
12639 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
12640 		tree new_var = lookup_decl (var, ctx);
12641 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
12642 		if (TREE_CONSTANT (x))
12643 		  break;
12644 		else
12645 		  {
12646 		    tree atmp
12647 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
12648 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
12649 		    tree al = size_int (TYPE_ALIGN (rtype));
12650 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
12651 		  }
12652 
12653 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
12654 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
12655 		gimple_seq_add_stmt (&new_body,
12656 				     gimple_build_assign (new_var, x));
12657 	      }
12658 	    break;
12659 	  }
12660 
12661       gimple_seq fork_seq = NULL;
12662       gimple_seq join_seq = NULL;
12663 
12664       if (offloaded && is_gimple_omp_oacc (ctx->stmt))
12665 	{
12666 	  /* If there are reductions on the offloaded region itself, treat
12667 	     them as a dummy GANG loop.  */
12668 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
12669 
12670 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
12671 				 false, NULL, NULL, &fork_seq, &join_seq, ctx);
12672 	}
12673 
12674       gimple_seq_add_seq (&new_body, fork_seq);
12675       gimple_seq_add_seq (&new_body, tgt_body);
12676       gimple_seq_add_seq (&new_body, join_seq);
12677 
12678       if (offloaded)
12679 	{
12680 	  new_body = maybe_catch_exception (new_body);
12681 	  gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12682 	}
12683       gimple_omp_set_body (stmt, new_body);
12684     }
12685 
12686   bind = gimple_build_bind (NULL, NULL,
12687 			    tgt_bind ? gimple_bind_block (tgt_bind)
12688 				     : NULL_TREE);
12689   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12690   gimple_bind_add_seq (bind, ilist);
12691   gimple_bind_add_stmt (bind, stmt);
12692   gimple_bind_add_seq (bind, olist);
12693 
12694   pop_gimplify_context (NULL);
12695 
12696   if (dep_bind)
12697     {
12698       gimple_bind_add_seq (dep_bind, dep_ilist);
12699       gimple_bind_add_stmt (dep_bind, bind);
12700       gimple_bind_add_seq (dep_bind, dep_olist);
12701       pop_gimplify_context (dep_bind);
12702     }
12703 }
12704 
12705 /* Expand code for an OpenMP teams directive.  */
12706 
12707 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)12708 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12709 {
12710   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
12711   push_gimplify_context ();
12712 
12713   tree block = make_node (BLOCK);
12714   gbind *bind = gimple_build_bind (NULL, NULL, block);
12715   gsi_replace (gsi_p, bind, true);
12716   gimple_seq bind_body = NULL;
12717   gimple_seq dlist = NULL;
12718   gimple_seq olist = NULL;
12719 
12720   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12721 				    OMP_CLAUSE_NUM_TEAMS);
12722   if (num_teams == NULL_TREE)
12723     num_teams = build_int_cst (unsigned_type_node, 0);
12724   else
12725     {
12726       num_teams = OMP_CLAUSE_NUM_TEAMS_EXPR (num_teams);
12727       num_teams = fold_convert (unsigned_type_node, num_teams);
12728       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
12729     }
12730   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
12731 				       OMP_CLAUSE_THREAD_LIMIT);
12732   if (thread_limit == NULL_TREE)
12733     thread_limit = build_int_cst (unsigned_type_node, 0);
12734   else
12735     {
12736       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
12737       thread_limit = fold_convert (unsigned_type_node, thread_limit);
12738       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
12739 		     fb_rvalue);
12740     }
12741 
12742   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
12743 			   &bind_body, &dlist, ctx, NULL);
12744   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
12745   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
12746 			   NULL, ctx);
12747   if (!gimple_omp_teams_grid_phony (teams_stmt))
12748     {
12749       gimple_seq_add_stmt (&bind_body, teams_stmt);
12750       location_t loc = gimple_location (teams_stmt);
12751       tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS);
12752       gimple *call = gimple_build_call (decl, 2, num_teams, thread_limit);
12753       gimple_set_location (call, loc);
12754       gimple_seq_add_stmt (&bind_body, call);
12755     }
12756 
12757   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
12758   gimple_omp_set_body (teams_stmt, NULL);
12759   gimple_seq_add_seq (&bind_body, olist);
12760   gimple_seq_add_seq (&bind_body, dlist);
12761   if (!gimple_omp_teams_grid_phony (teams_stmt))
12762     gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
12763   gimple_bind_set_body (bind, bind_body);
12764 
12765   pop_gimplify_context (bind);
12766 
12767   gimple_bind_append_vars (bind, ctx->block_vars);
12768   BLOCK_VARS (block) = ctx->block_vars;
12769   if (BLOCK_VARS (block))
12770     TREE_USED (block) = 1;
12771 }
12772 
12773 /* Expand code within an artificial GIMPLE_OMP_GRID_BODY OMP construct.  */
12774 
12775 static void
lower_omp_grid_body(gimple_stmt_iterator * gsi_p,omp_context * ctx)12776 lower_omp_grid_body (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12777 {
12778   gimple *stmt = gsi_stmt (*gsi_p);
12779   lower_omp (gimple_omp_body_ptr (stmt), ctx);
12780   gimple_seq_add_stmt (gimple_omp_body_ptr (stmt),
12781 		       gimple_build_omp_return (false));
12782 }
12783 
12784 
12785 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
12786    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
12787    of OMP context, but with task_shared_vars set.  */
12788 
12789 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)12790 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
12791     			void *data)
12792 {
12793   tree t = *tp;
12794 
12795   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
12796   if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
12797       && data == NULL
12798       && DECL_HAS_VALUE_EXPR_P (t))
12799     return t;
12800 
12801   if (task_shared_vars
12802       && DECL_P (t)
12803       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
12804     return t;
12805 
12806   /* If a global variable has been privatized, TREE_CONSTANT on
12807      ADDR_EXPR might be wrong.  */
12808   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
12809     recompute_tree_invariant_for_addr_expr (t);
12810 
12811   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
12812   return NULL_TREE;
12813 }
12814 
12815 /* Data to be communicated between lower_omp_regimplify_operands and
12816    lower_omp_regimplify_operands_p.  */
12817 
12818 struct lower_omp_regimplify_operands_data
12819 {
12820   omp_context *ctx;
12821   vec<tree> *decls;
12822 };
12823 
12824 /* Helper function for lower_omp_regimplify_operands.  Find
12825    omp_member_access_dummy_var vars and adjust temporarily their
12826    DECL_VALUE_EXPRs if needed.  */
12827 
12828 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)12829 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
12830 				 void *data)
12831 {
12832   tree t = omp_member_access_dummy_var (*tp);
12833   if (t)
12834     {
12835       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
12836       lower_omp_regimplify_operands_data *ldata
12837 	= (lower_omp_regimplify_operands_data *) wi->info;
12838       tree o = maybe_lookup_decl (t, ldata->ctx);
12839       if (o != t)
12840 	{
12841 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
12842 	  ldata->decls->safe_push (*tp);
12843 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
12844 	  SET_DECL_VALUE_EXPR (*tp, v);
12845 	}
12846     }
12847   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
12848   return NULL_TREE;
12849 }
12850 
12851 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
12852    of omp_member_access_dummy_var vars during regimplification.  */
12853 
12854 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)12855 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
12856 			       gimple_stmt_iterator *gsi_p)
12857 {
12858   auto_vec<tree, 10> decls;
12859   if (ctx)
12860     {
12861       struct walk_stmt_info wi;
12862       memset (&wi, '\0', sizeof (wi));
12863       struct lower_omp_regimplify_operands_data data;
12864       data.ctx = ctx;
12865       data.decls = &decls;
12866       wi.info = &data;
12867       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
12868     }
12869   gimple_regimplify_operands (stmt, gsi_p);
12870   while (!decls.is_empty ())
12871     {
12872       tree t = decls.pop ();
12873       tree v = decls.pop ();
12874       SET_DECL_VALUE_EXPR (t, v);
12875     }
12876 }
12877 
12878 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)12879 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12880 {
12881   gimple *stmt = gsi_stmt (*gsi_p);
12882   struct walk_stmt_info wi;
12883   gcall *call_stmt;
12884 
12885   if (gimple_has_location (stmt))
12886     input_location = gimple_location (stmt);
12887 
12888   if (task_shared_vars)
12889     memset (&wi, '\0', sizeof (wi));
12890 
12891   /* If we have issued syntax errors, avoid doing any heavy lifting.
12892      Just replace the OMP directives with a NOP to avoid
12893      confusing RTL expansion.  */
12894   if (seen_error () && is_gimple_omp (stmt))
12895     {
12896       gsi_replace (gsi_p, gimple_build_nop (), true);
12897       return;
12898     }
12899 
12900   switch (gimple_code (stmt))
12901     {
12902     case GIMPLE_COND:
12903       {
12904 	gcond *cond_stmt = as_a <gcond *> (stmt);
12905 	if ((ctx || task_shared_vars)
12906 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
12907 			   lower_omp_regimplify_p,
12908 			   ctx ? NULL : &wi, NULL)
12909 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
12910 			      lower_omp_regimplify_p,
12911 			      ctx ? NULL : &wi, NULL)))
12912 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
12913       }
12914       break;
12915     case GIMPLE_CATCH:
12916       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
12917       break;
12918     case GIMPLE_EH_FILTER:
12919       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
12920       break;
12921     case GIMPLE_TRY:
12922       lower_omp (gimple_try_eval_ptr (stmt), ctx);
12923       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
12924       break;
12925     case GIMPLE_TRANSACTION:
12926       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
12927 		 ctx);
12928       break;
12929     case GIMPLE_BIND:
12930       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
12931       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
12932       break;
12933     case GIMPLE_OMP_PARALLEL:
12934     case GIMPLE_OMP_TASK:
12935       ctx = maybe_lookup_ctx (stmt);
12936       gcc_assert (ctx);
12937       if (ctx->cancellable)
12938 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12939       lower_omp_taskreg (gsi_p, ctx);
12940       break;
12941     case GIMPLE_OMP_FOR:
12942       ctx = maybe_lookup_ctx (stmt);
12943       gcc_assert (ctx);
12944       if (ctx->cancellable)
12945 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12946       lower_omp_for (gsi_p, ctx);
12947       break;
12948     case GIMPLE_OMP_SECTIONS:
12949       ctx = maybe_lookup_ctx (stmt);
12950       gcc_assert (ctx);
12951       if (ctx->cancellable)
12952 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
12953       lower_omp_sections (gsi_p, ctx);
12954       break;
12955     case GIMPLE_OMP_SINGLE:
12956       ctx = maybe_lookup_ctx (stmt);
12957       gcc_assert (ctx);
12958       lower_omp_single (gsi_p, ctx);
12959       break;
12960     case GIMPLE_OMP_MASTER:
12961       ctx = maybe_lookup_ctx (stmt);
12962       gcc_assert (ctx);
12963       lower_omp_master (gsi_p, ctx);
12964       break;
12965     case GIMPLE_OMP_TASKGROUP:
12966       ctx = maybe_lookup_ctx (stmt);
12967       gcc_assert (ctx);
12968       lower_omp_taskgroup (gsi_p, ctx);
12969       break;
12970     case GIMPLE_OMP_ORDERED:
12971       ctx = maybe_lookup_ctx (stmt);
12972       gcc_assert (ctx);
12973       lower_omp_ordered (gsi_p, ctx);
12974       break;
12975     case GIMPLE_OMP_SCAN:
12976       ctx = maybe_lookup_ctx (stmt);
12977       gcc_assert (ctx);
12978       lower_omp_scan (gsi_p, ctx);
12979       break;
12980     case GIMPLE_OMP_CRITICAL:
12981       ctx = maybe_lookup_ctx (stmt);
12982       gcc_assert (ctx);
12983       lower_omp_critical (gsi_p, ctx);
12984       break;
12985     case GIMPLE_OMP_ATOMIC_LOAD:
12986       if ((ctx || task_shared_vars)
12987 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
12988 			  as_a <gomp_atomic_load *> (stmt)),
12989 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
12990 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
12991       break;
12992     case GIMPLE_OMP_TARGET:
12993       ctx = maybe_lookup_ctx (stmt);
12994       gcc_assert (ctx);
12995       lower_omp_target (gsi_p, ctx);
12996       break;
12997     case GIMPLE_OMP_TEAMS:
12998       ctx = maybe_lookup_ctx (stmt);
12999       gcc_assert (ctx);
13000       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
13001 	lower_omp_taskreg (gsi_p, ctx);
13002       else
13003 	lower_omp_teams (gsi_p, ctx);
13004       break;
13005     case GIMPLE_OMP_GRID_BODY:
13006       ctx = maybe_lookup_ctx (stmt);
13007       gcc_assert (ctx);
13008       lower_omp_grid_body (gsi_p, ctx);
13009       break;
13010     case GIMPLE_CALL:
13011       tree fndecl;
13012       call_stmt = as_a <gcall *> (stmt);
13013       fndecl = gimple_call_fndecl (call_stmt);
13014       if (fndecl
13015 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13016 	switch (DECL_FUNCTION_CODE (fndecl))
13017 	  {
13018 	  case BUILT_IN_GOMP_BARRIER:
13019 	    if (ctx == NULL)
13020 	      break;
13021 	    /* FALLTHRU */
13022 	  case BUILT_IN_GOMP_CANCEL:
13023 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
13024 	    omp_context *cctx;
13025 	    cctx = ctx;
13026 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
13027 	      cctx = cctx->outer;
13028 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
13029 	    if (!cctx->cancellable)
13030 	      {
13031 		if (DECL_FUNCTION_CODE (fndecl)
13032 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
13033 		  {
13034 		    stmt = gimple_build_nop ();
13035 		    gsi_replace (gsi_p, stmt, false);
13036 		  }
13037 		break;
13038 	      }
13039 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
13040 	      {
13041 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
13042 		gimple_call_set_fndecl (call_stmt, fndecl);
13043 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
13044 	      }
13045 	    tree lhs;
13046 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
13047 	    gimple_call_set_lhs (call_stmt, lhs);
13048 	    tree fallthru_label;
13049 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
13050 	    gimple *g;
13051 	    g = gimple_build_label (fallthru_label);
13052 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13053 	    g = gimple_build_cond (NE_EXPR, lhs,
13054 				   fold_convert (TREE_TYPE (lhs),
13055 						 boolean_false_node),
13056 				   cctx->cancel_label, fallthru_label);
13057 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13058 	    break;
13059 	  default:
13060 	    break;
13061 	  }
13062       goto regimplify;
13063 
13064     case GIMPLE_ASSIGN:
13065       for (omp_context *up = ctx; up; up = up->outer)
13066 	{
13067 	  if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
13068 	      || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
13069 	      || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
13070 	      || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
13071 	      || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
13072 	      || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
13073 		  && (gimple_omp_target_kind (up->stmt)
13074 		      == GF_OMP_TARGET_KIND_DATA)))
13075 	    continue;
13076 	  else if (!up->lastprivate_conditional_map)
13077 	    break;
13078 	  tree lhs = get_base_address (gimple_assign_lhs (stmt));
13079 	  if (TREE_CODE (lhs) == MEM_REF
13080 	      && DECL_P (TREE_OPERAND (lhs, 0))
13081 	      && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
13082 						     0))) == REFERENCE_TYPE)
13083 	    lhs = TREE_OPERAND (lhs, 0);
13084 	  if (DECL_P (lhs))
13085 	    if (tree *v = up->lastprivate_conditional_map->get (lhs))
13086 	      {
13087 		tree clauses;
13088 		if (up->combined_into_simd_safelen1)
13089 		  {
13090 		    up = up->outer;
13091 		    if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
13092 		      up = up->outer;
13093 		  }
13094 		if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
13095 		  clauses = gimple_omp_for_clauses (up->stmt);
13096 		else
13097 		  clauses = gimple_omp_sections_clauses (up->stmt);
13098 		tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
13099 		if (!OMP_CLAUSE__CONDTEMP__ITER (c))
13100 		  c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
13101 				       OMP_CLAUSE__CONDTEMP_);
13102 		gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
13103 		gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
13104 		gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
13105 	      }
13106 	}
13107       /* FALLTHRU */
13108 
13109     default:
13110     regimplify:
13111       if ((ctx || task_shared_vars)
13112 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
13113 			     ctx ? NULL : &wi))
13114 	{
13115 	  /* Just remove clobbers, this should happen only if we have
13116 	     "privatized" local addressable variables in SIMD regions,
13117 	     the clobber isn't needed in that case and gimplifying address
13118 	     of the ARRAY_REF into a pointer and creating MEM_REF based
13119 	     clobber would create worse code than we get with the clobber
13120 	     dropped.  */
13121 	  if (gimple_clobber_p (stmt))
13122 	    {
13123 	      gsi_replace (gsi_p, gimple_build_nop (), true);
13124 	      break;
13125 	    }
13126 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
13127 	}
13128       break;
13129     }
13130 }
13131 
13132 static void
lower_omp(gimple_seq * body,omp_context * ctx)13133 lower_omp (gimple_seq *body, omp_context *ctx)
13134 {
13135   location_t saved_location = input_location;
13136   gimple_stmt_iterator gsi;
13137   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13138     lower_omp_1 (&gsi, ctx);
13139   /* During gimplification, we haven't folded statments inside offloading
13140      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
13141   if (target_nesting_level || taskreg_nesting_level)
13142     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
13143       fold_stmt (&gsi);
13144   input_location = saved_location;
13145 }
13146 
13147 /* Main entry point.  */
13148 
13149 static unsigned int
execute_lower_omp(void)13150 execute_lower_omp (void)
13151 {
13152   gimple_seq body;
13153   int i;
13154   omp_context *ctx;
13155 
13156   /* This pass always runs, to provide PROP_gimple_lomp.
13157      But often, there is nothing to do.  */
13158   if (flag_openacc == 0 && flag_openmp == 0
13159       && flag_openmp_simd == 0)
13160     return 0;
13161 
13162   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
13163 				 delete_omp_context);
13164 
13165   body = gimple_body (current_function_decl);
13166 
13167   if (hsa_gen_requested_p ())
13168     omp_grid_gridify_all_targets (&body);
13169 
13170   scan_omp (&body, NULL);
13171   gcc_assert (taskreg_nesting_level == 0);
13172   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
13173     finish_taskreg_scan (ctx);
13174   taskreg_contexts.release ();
13175 
13176   if (all_contexts->root)
13177     {
13178       if (task_shared_vars)
13179 	push_gimplify_context ();
13180       lower_omp (&body, NULL);
13181       if (task_shared_vars)
13182 	pop_gimplify_context (NULL);
13183     }
13184 
13185   if (all_contexts)
13186     {
13187       splay_tree_delete (all_contexts);
13188       all_contexts = NULL;
13189     }
13190   BITMAP_FREE (task_shared_vars);
13191   BITMAP_FREE (global_nonaddressable_vars);
13192 
13193   /* If current function is a method, remove artificial dummy VAR_DECL created
13194      for non-static data member privatization, they aren't needed for
13195      debuginfo nor anything else, have been already replaced everywhere in the
13196      IL and cause problems with LTO.  */
13197   if (DECL_ARGUMENTS (current_function_decl)
13198       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
13199       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
13200 	  == POINTER_TYPE))
13201     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
13202 
13203   gomp_task *task_stmt;
13204   unsigned j;
13205   FOR_EACH_VEC_ELT (task_cpyfns, j, task_stmt)
13206     finalize_task_copyfn (task_stmt);
13207   task_cpyfns.release ();
13208   return 0;
13209 }
13210 
13211 namespace {
13212 
13213 const pass_data pass_data_lower_omp =
13214 {
13215   GIMPLE_PASS, /* type */
13216   "omplower", /* name */
13217   OPTGROUP_OMP, /* optinfo_flags */
13218   TV_NONE, /* tv_id */
13219   PROP_gimple_any, /* properties_required */
13220   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
13221   0, /* properties_destroyed */
13222   0, /* todo_flags_start */
13223   0, /* todo_flags_finish */
13224 };
13225 
13226 class pass_lower_omp : public gimple_opt_pass
13227 {
13228 public:
pass_lower_omp(gcc::context * ctxt)13229   pass_lower_omp (gcc::context *ctxt)
13230     : gimple_opt_pass (pass_data_lower_omp, ctxt)
13231   {}
13232 
13233   /* opt_pass methods: */
execute(function *)13234   virtual unsigned int execute (function *) { return execute_lower_omp (); }
13235 
13236 }; // class pass_lower_omp
13237 
13238 } // anon namespace
13239 
13240 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)13241 make_pass_lower_omp (gcc::context *ctxt)
13242 {
13243   return new pass_lower_omp (ctxt);
13244 }
13245 
13246 /* The following is a utility to diagnose structured block violations.
13247    It is not part of the "omplower" pass, as that's invoked too late.  It
13248    should be invoked by the respective front ends after gimplification.  */
13249 
13250 static splay_tree all_labels;
13251 
13252 /* Check for mismatched contexts and generate an error if needed.  Return
13253    true if an error is detected.  */
13254 
13255 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)13256 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
13257 	       gimple *branch_ctx, gimple *label_ctx)
13258 {
13259   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
13260   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
13261 
13262   if (label_ctx == branch_ctx)
13263     return false;
13264 
13265   const char* kind = NULL;
13266 
13267   if (flag_openacc)
13268     {
13269       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
13270 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
13271 	{
13272 	  gcc_checking_assert (kind == NULL);
13273 	  kind = "OpenACC";
13274 	}
13275     }
13276   if (kind == NULL)
13277     {
13278       gcc_checking_assert (flag_openmp || flag_openmp_simd);
13279       kind = "OpenMP";
13280     }
13281 
13282   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
13283      so we could traverse it and issue a correct "exit" or "enter" error
13284      message upon a structured block violation.
13285 
13286      We built the context by building a list with tree_cons'ing, but there is
13287      no easy counterpart in gimple tuples.  It seems like far too much work
13288      for issuing exit/enter error messages.  If someone really misses the
13289      distinct error message... patches welcome.  */
13290 
13291 #if 0
13292   /* Try to avoid confusing the user by producing and error message
13293      with correct "exit" or "enter" verbiage.  We prefer "exit"
13294      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
13295   if (branch_ctx == NULL)
13296     exit_p = false;
13297   else
13298     {
13299       while (label_ctx)
13300 	{
13301 	  if (TREE_VALUE (label_ctx) == branch_ctx)
13302 	    {
13303 	      exit_p = false;
13304 	      break;
13305 	    }
13306 	  label_ctx = TREE_CHAIN (label_ctx);
13307 	}
13308     }
13309 
13310   if (exit_p)
13311     error ("invalid exit from %s structured block", kind);
13312   else
13313     error ("invalid entry to %s structured block", kind);
13314 #endif
13315 
13316   /* If it's obvious we have an invalid entry, be specific about the error.  */
13317   if (branch_ctx == NULL)
13318     error ("invalid entry to %s structured block", kind);
13319   else
13320     {
13321       /* Otherwise, be vague and lazy, but efficient.  */
13322       error ("invalid branch to/from %s structured block", kind);
13323     }
13324 
13325   gsi_replace (gsi_p, gimple_build_nop (), false);
13326   return true;
13327 }
13328 
13329 /* Pass 1: Create a minimal tree of structured blocks, and record
13330    where each label is found.  */
13331 
13332 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)13333 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13334     	       struct walk_stmt_info *wi)
13335 {
13336   gimple *context = (gimple *) wi->info;
13337   gimple *inner_context;
13338   gimple *stmt = gsi_stmt (*gsi_p);
13339 
13340   *handled_ops_p = true;
13341 
13342   switch (gimple_code (stmt))
13343     {
13344     WALK_SUBSTMTS;
13345 
13346     case GIMPLE_OMP_PARALLEL:
13347     case GIMPLE_OMP_TASK:
13348     case GIMPLE_OMP_SECTIONS:
13349     case GIMPLE_OMP_SINGLE:
13350     case GIMPLE_OMP_SECTION:
13351     case GIMPLE_OMP_MASTER:
13352     case GIMPLE_OMP_ORDERED:
13353     case GIMPLE_OMP_SCAN:
13354     case GIMPLE_OMP_CRITICAL:
13355     case GIMPLE_OMP_TARGET:
13356     case GIMPLE_OMP_TEAMS:
13357     case GIMPLE_OMP_TASKGROUP:
13358       /* The minimal context here is just the current OMP construct.  */
13359       inner_context = stmt;
13360       wi->info = inner_context;
13361       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13362       wi->info = context;
13363       break;
13364 
13365     case GIMPLE_OMP_FOR:
13366       inner_context = stmt;
13367       wi->info = inner_context;
13368       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13369 	 walk them.  */
13370       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
13371 	  	       diagnose_sb_1, NULL, wi);
13372       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
13373       wi->info = context;
13374       break;
13375 
13376     case GIMPLE_LABEL:
13377       splay_tree_insert (all_labels,
13378 			 (splay_tree_key) gimple_label_label (
13379 					    as_a <glabel *> (stmt)),
13380 			 (splay_tree_value) context);
13381       break;
13382 
13383     default:
13384       break;
13385     }
13386 
13387   return NULL_TREE;
13388 }
13389 
13390 /* Pass 2: Check each branch and see if its context differs from that of
13391    the destination label's context.  */
13392 
13393 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)13394 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
13395     	       struct walk_stmt_info *wi)
13396 {
13397   gimple *context = (gimple *) wi->info;
13398   splay_tree_node n;
13399   gimple *stmt = gsi_stmt (*gsi_p);
13400 
13401   *handled_ops_p = true;
13402 
13403   switch (gimple_code (stmt))
13404     {
13405     WALK_SUBSTMTS;
13406 
13407     case GIMPLE_OMP_PARALLEL:
13408     case GIMPLE_OMP_TASK:
13409     case GIMPLE_OMP_SECTIONS:
13410     case GIMPLE_OMP_SINGLE:
13411     case GIMPLE_OMP_SECTION:
13412     case GIMPLE_OMP_MASTER:
13413     case GIMPLE_OMP_ORDERED:
13414     case GIMPLE_OMP_SCAN:
13415     case GIMPLE_OMP_CRITICAL:
13416     case GIMPLE_OMP_TARGET:
13417     case GIMPLE_OMP_TEAMS:
13418     case GIMPLE_OMP_TASKGROUP:
13419       wi->info = stmt;
13420       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13421       wi->info = context;
13422       break;
13423 
13424     case GIMPLE_OMP_FOR:
13425       wi->info = stmt;
13426       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
13427 	 walk them.  */
13428       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
13429 			   diagnose_sb_2, NULL, wi);
13430       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
13431       wi->info = context;
13432       break;
13433 
13434     case GIMPLE_COND:
13435 	{
13436 	  gcond *cond_stmt = as_a <gcond *> (stmt);
13437 	  tree lab = gimple_cond_true_label (cond_stmt);
13438 	  if (lab)
13439 	    {
13440 	      n = splay_tree_lookup (all_labels,
13441 				     (splay_tree_key) lab);
13442 	      diagnose_sb_0 (gsi_p, context,
13443 			     n ? (gimple *) n->value : NULL);
13444 	    }
13445 	  lab = gimple_cond_false_label (cond_stmt);
13446 	  if (lab)
13447 	    {
13448 	      n = splay_tree_lookup (all_labels,
13449 				     (splay_tree_key) lab);
13450 	      diagnose_sb_0 (gsi_p, context,
13451 			     n ? (gimple *) n->value : NULL);
13452 	    }
13453 	}
13454       break;
13455 
13456     case GIMPLE_GOTO:
13457       {
13458 	tree lab = gimple_goto_dest (stmt);
13459 	if (TREE_CODE (lab) != LABEL_DECL)
13460 	  break;
13461 
13462 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13463 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
13464       }
13465       break;
13466 
13467     case GIMPLE_SWITCH:
13468       {
13469 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
13470 	unsigned int i;
13471 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
13472 	  {
13473 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
13474 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
13475 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
13476 	      break;
13477 	  }
13478       }
13479       break;
13480 
13481     case GIMPLE_RETURN:
13482       diagnose_sb_0 (gsi_p, context, NULL);
13483       break;
13484 
13485     default:
13486       break;
13487     }
13488 
13489   return NULL_TREE;
13490 }
13491 
13492 static unsigned int
diagnose_omp_structured_block_errors(void)13493 diagnose_omp_structured_block_errors (void)
13494 {
13495   struct walk_stmt_info wi;
13496   gimple_seq body = gimple_body (current_function_decl);
13497 
13498   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
13499 
13500   memset (&wi, 0, sizeof (wi));
13501   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
13502 
13503   memset (&wi, 0, sizeof (wi));
13504   wi.want_locations = true;
13505   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
13506 
13507   gimple_set_body (current_function_decl, body);
13508 
13509   splay_tree_delete (all_labels);
13510   all_labels = NULL;
13511 
13512   return 0;
13513 }
13514 
13515 namespace {
13516 
13517 const pass_data pass_data_diagnose_omp_blocks =
13518 {
13519   GIMPLE_PASS, /* type */
13520   "*diagnose_omp_blocks", /* name */
13521   OPTGROUP_OMP, /* optinfo_flags */
13522   TV_NONE, /* tv_id */
13523   PROP_gimple_any, /* properties_required */
13524   0, /* properties_provided */
13525   0, /* properties_destroyed */
13526   0, /* todo_flags_start */
13527   0, /* todo_flags_finish */
13528 };
13529 
13530 class pass_diagnose_omp_blocks : public gimple_opt_pass
13531 {
13532 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)13533   pass_diagnose_omp_blocks (gcc::context *ctxt)
13534     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
13535   {}
13536 
13537   /* opt_pass methods: */
gate(function *)13538   virtual bool gate (function *)
13539   {
13540     return flag_openacc || flag_openmp || flag_openmp_simd;
13541   }
execute(function *)13542   virtual unsigned int execute (function *)
13543     {
13544       return diagnose_omp_structured_block_errors ();
13545     }
13546 
13547 }; // class pass_diagnose_omp_blocks
13548 
13549 } // anon namespace
13550 
13551 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)13552 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
13553 {
13554   return new pass_diagnose_omp_blocks (ctxt);
13555 }
13556 
13557 
13558 #include "gt-omp-low.h"
13559