1 /* Lowering pass for OMP directives.  Converts OMP directives into explicit
2    calls to the runtime library (libgomp), data marshalling to implement data
3    sharing and copying clauses, offloading to accelerators, and more.
4 
5    Contributed by Diego Novillo <dnovillo@redhat.com>
6 
7    Copyright (C) 2005-2021 Free Software Foundation, Inc.
8 
9 This file is part of GCC.
10 
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15 
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
19 for more details.
20 
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3.  If not see
23 <http://www.gnu.org/licenses/>.  */
24 
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "tree.h"
31 #include "gimple.h"
32 #include "tree-pass.h"
33 #include "ssa.h"
34 #include "cgraph.h"
35 #include "pretty-print.h"
36 #include "diagnostic-core.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "internal-fn.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "gimple-walk.h"
45 #include "tree-iterator.h"
46 #include "tree-inline.h"
47 #include "langhooks.h"
48 #include "tree-dfa.h"
49 #include "tree-ssa.h"
50 #include "splay-tree.h"
51 #include "omp-general.h"
52 #include "omp-low.h"
53 #include "gimple-low.h"
54 #include "alloc-pool.h"
55 #include "symbol-summary.h"
56 #include "tree-nested.h"
57 #include "context.h"
58 #include "gomp-constants.h"
59 #include "gimple-pretty-print.h"
60 #include "stringpool.h"
61 #include "attribs.h"
62 #include "omp-offload.h"
63 
64 /* Lowering of OMP parallel and workshare constructs proceeds in two
65    phases.  The first phase scans the function looking for OMP statements
66    and then for variables that must be replaced to satisfy data sharing
67    clauses.  The second phase expands code for the constructs, as well as
68    re-gimplifying things when variables have been replaced with complex
69    expressions.
70 
71    Final code generation is done by pass_expand_omp.  The flowgraph is
72    scanned for regions which are then moved to a new
73    function, to be invoked by the thread library, or offloaded.  */
74 
75 /* Context structure.  Used to store information about each parallel
76    directive in the code.  */
77 
78 struct omp_context
79 {
80   /* This field must be at the beginning, as we do "inheritance": Some
81      callback functions for tree-inline.c (e.g., omp_copy_decl)
82      receive a copy_body_data pointer that is up-casted to an
83      omp_context pointer.  */
84   copy_body_data cb;
85 
86   /* The tree of contexts corresponding to the encountered constructs.  */
87   struct omp_context *outer;
88   gimple *stmt;
89 
90   /* Map variables to fields in a structure that allows communication
91      between sending and receiving threads.  */
92   splay_tree field_map;
93   tree record_type;
94   tree sender_decl;
95   tree receiver_decl;
96 
97   /* These are used just by task contexts, if task firstprivate fn is
98      needed.  srecord_type is used to communicate from the thread
99      that encountered the task construct to task firstprivate fn,
100      record_type is allocated by GOMP_task, initialized by task firstprivate
101      fn and passed to the task body fn.  */
102   splay_tree sfield_map;
103   tree srecord_type;
104 
105   /* A chain of variables to add to the top-level block surrounding the
106      construct.  In the case of a parallel, this is in the child function.  */
107   tree block_vars;
108 
109   /* Label to which GOMP_cancel{,llation_point} and explicit and implicit
110      barriers should jump to during omplower pass.  */
111   tree cancel_label;
112 
113   /* The sibling GIMPLE_OMP_FOR simd with _simt_ clause or NULL
114      otherwise.  */
115   gimple *simt_stmt;
116 
117   /* For task reductions registered in this context, a vector containing
118      the length of the private copies block (if constant, otherwise NULL)
119      and then offsets (if constant, otherwise NULL) for each entry.  */
120   vec<tree> task_reductions;
121 
122   /* A hash map from the reduction clauses to the registered array
123      elts.  */
124   hash_map<tree, unsigned> *task_reduction_map;
125 
126   /* And a hash map from the lastprivate(conditional:) variables to their
127      corresponding tracking loop iteration variables.  */
128   hash_map<tree, tree> *lastprivate_conditional_map;
129 
130   /* And a hash map from the allocate variables to their corresponding
131      allocators.  */
132   hash_map<tree, tree> *allocate_map;
133 
134   /* A tree_list of the reduction clauses in this context. This is
135     only used for checking the consistency of OpenACC reduction
136     clauses in scan_omp_for and is not guaranteed to contain a valid
137     value outside of this function. */
138   tree local_reduction_clauses;
139 
140   /* A tree_list of the reduction clauses in outer contexts. This is
141     only used for checking the consistency of OpenACC reduction
142     clauses in scan_omp_for and is not guaranteed to contain a valid
143     value outside of this function. */
144   tree outer_reduction_clauses;
145 
146   /* Nesting depth of this context.  Used to beautify error messages re
147      invalid gotos.  The outermost ctx is depth 1, with depth 0 being
148      reserved for the main body of the function.  */
149   int depth;
150 
151   /* True if this parallel directive is nested within another.  */
152   bool is_nested;
153 
154   /* True if this construct can be cancelled.  */
155   bool cancellable;
156 
157   /* True if lower_omp_1 should look up lastprivate conditional in parent
158      context.  */
159   bool combined_into_simd_safelen1;
160 
161   /* True if there is nested scan context with inclusive clause.  */
162   bool scan_inclusive;
163 
164   /* True if there is nested scan context with exclusive clause.  */
165   bool scan_exclusive;
166 
167   /* True in the second simd loop of for simd with inscan reductions.  */
168   bool for_simd_scan_phase;
169 
170   /* True if there is order(concurrent) clause on the construct.  */
171   bool order_concurrent;
172 
173   /* True if there is bind clause on the construct (i.e. a loop construct).  */
174   bool loop_p;
175 
176   /* Only used for omp target contexts.  True if a teams construct is
177      strictly nested in it.  */
178   bool teams_nested_p;
179 
180   /* Only used for omp target contexts.  True if an OpenMP construct other
181      than teams is strictly nested in it.  */
182   bool nonteams_nested_p;
183 
184   /* Candidates for adjusting OpenACC privatization level.  */
185   vec<tree> oacc_privatization_candidates;
186 };
187 
188 static splay_tree all_contexts;
189 static int taskreg_nesting_level;
190 static int target_nesting_level;
191 static bitmap task_shared_vars;
192 static bitmap global_nonaddressable_vars;
193 static vec<omp_context *> taskreg_contexts;
194 
195 static void scan_omp (gimple_seq *, omp_context *);
196 static tree scan_omp_1_op (tree *, int *, void *);
197 
198 #define WALK_SUBSTMTS  \
199     case GIMPLE_BIND: \
200     case GIMPLE_TRY: \
201     case GIMPLE_CATCH: \
202     case GIMPLE_EH_FILTER: \
203     case GIMPLE_TRANSACTION: \
204       /* The sub-statements for these should be walked.  */ \
205       *handled_ops_p = false; \
206       break;
207 
208 /* Return whether CTX represents an OpenACC 'parallel' or 'serial' construct.
209    (This doesn't include OpenACC 'kernels' decomposed parts.)  */
210 
211 static bool
is_oacc_parallel_or_serial(omp_context * ctx)212 is_oacc_parallel_or_serial (omp_context *ctx)
213 {
214   enum gimple_code outer_type = gimple_code (ctx->stmt);
215   return ((outer_type == GIMPLE_OMP_TARGET)
216 	  && ((gimple_omp_target_kind (ctx->stmt)
217 	       == GF_OMP_TARGET_KIND_OACC_PARALLEL)
218 	      || (gimple_omp_target_kind (ctx->stmt)
219 		  == GF_OMP_TARGET_KIND_OACC_SERIAL)));
220 }
221 
222 /* Return whether CTX represents an OpenACC 'kernels' construct.
223    (This doesn't include OpenACC 'kernels' decomposed parts.)  */
224 
225 static bool
is_oacc_kernels(omp_context * ctx)226 is_oacc_kernels (omp_context *ctx)
227 {
228   enum gimple_code outer_type = gimple_code (ctx->stmt);
229   return ((outer_type == GIMPLE_OMP_TARGET)
230 	  && (gimple_omp_target_kind (ctx->stmt)
231 	      == GF_OMP_TARGET_KIND_OACC_KERNELS));
232 }
233 
234 /* Return whether CTX represents an OpenACC 'kernels' decomposed part.  */
235 
236 static bool
is_oacc_kernels_decomposed_part(omp_context * ctx)237 is_oacc_kernels_decomposed_part (omp_context *ctx)
238 {
239   enum gimple_code outer_type = gimple_code (ctx->stmt);
240   return ((outer_type == GIMPLE_OMP_TARGET)
241 	  && ((gimple_omp_target_kind (ctx->stmt)
242 	       == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED)
243 	      || (gimple_omp_target_kind (ctx->stmt)
244 		  == GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE)
245 	      || (gimple_omp_target_kind (ctx->stmt)
246 		  == GF_OMP_TARGET_KIND_OACC_DATA_KERNELS)));
247 }
248 
249 /* Return true if STMT corresponds to an OpenMP target region.  */
250 static bool
is_omp_target(gimple * stmt)251 is_omp_target (gimple *stmt)
252 {
253   if (gimple_code (stmt) == GIMPLE_OMP_TARGET)
254     {
255       int kind = gimple_omp_target_kind (stmt);
256       return (kind == GF_OMP_TARGET_KIND_REGION
257 	      || kind == GF_OMP_TARGET_KIND_DATA
258 	      || kind == GF_OMP_TARGET_KIND_ENTER_DATA
259 	      || kind == GF_OMP_TARGET_KIND_EXIT_DATA);
260     }
261   return false;
262 }
263 
264 /* If DECL is the artificial dummy VAR_DECL created for non-static
265    data member privatization, return the underlying "this" parameter,
266    otherwise return NULL.  */
267 
268 tree
omp_member_access_dummy_var(tree decl)269 omp_member_access_dummy_var (tree decl)
270 {
271   if (!VAR_P (decl)
272       || !DECL_ARTIFICIAL (decl)
273       || !DECL_IGNORED_P (decl)
274       || !DECL_HAS_VALUE_EXPR_P (decl)
275       || !lang_hooks.decls.omp_disregard_value_expr (decl, false))
276     return NULL_TREE;
277 
278   tree v = DECL_VALUE_EXPR (decl);
279   if (TREE_CODE (v) != COMPONENT_REF)
280     return NULL_TREE;
281 
282   while (1)
283     switch (TREE_CODE (v))
284       {
285       case COMPONENT_REF:
286       case MEM_REF:
287       case INDIRECT_REF:
288       CASE_CONVERT:
289       case POINTER_PLUS_EXPR:
290 	v = TREE_OPERAND (v, 0);
291 	continue;
292       case PARM_DECL:
293 	if (DECL_CONTEXT (v) == current_function_decl
294 	    && DECL_ARTIFICIAL (v)
295 	    && TREE_CODE (TREE_TYPE (v)) == POINTER_TYPE)
296 	  return v;
297 	return NULL_TREE;
298       default:
299 	return NULL_TREE;
300       }
301 }
302 
303 /* Helper for unshare_and_remap, called through walk_tree.  */
304 
305 static tree
unshare_and_remap_1(tree * tp,int * walk_subtrees,void * data)306 unshare_and_remap_1 (tree *tp, int *walk_subtrees, void *data)
307 {
308   tree *pair = (tree *) data;
309   if (*tp == pair[0])
310     {
311       *tp = unshare_expr (pair[1]);
312       *walk_subtrees = 0;
313     }
314   else if (IS_TYPE_OR_DECL_P (*tp))
315     *walk_subtrees = 0;
316   return NULL_TREE;
317 }
318 
319 /* Return unshare_expr (X) with all occurrences of FROM
320    replaced with TO.  */
321 
322 static tree
unshare_and_remap(tree x,tree from,tree to)323 unshare_and_remap (tree x, tree from, tree to)
324 {
325   tree pair[2] = { from, to };
326   x = unshare_expr (x);
327   walk_tree (&x, unshare_and_remap_1, pair, NULL);
328   return x;
329 }
330 
331 /* Convenience function for calling scan_omp_1_op on tree operands.  */
332 
333 static inline tree
scan_omp_op(tree * tp,omp_context * ctx)334 scan_omp_op (tree *tp, omp_context *ctx)
335 {
336   struct walk_stmt_info wi;
337 
338   memset (&wi, 0, sizeof (wi));
339   wi.info = ctx;
340   wi.want_locations = true;
341 
342   return walk_tree (tp, scan_omp_1_op, &wi, NULL);
343 }
344 
345 static void lower_omp (gimple_seq *, omp_context *);
346 static tree lookup_decl_in_outer_ctx (tree, omp_context *);
347 static tree maybe_lookup_decl_in_outer_ctx (tree, omp_context *);
348 
349 /* Return true if CTX is for an omp parallel.  */
350 
351 static inline bool
is_parallel_ctx(omp_context * ctx)352 is_parallel_ctx (omp_context *ctx)
353 {
354   return gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL;
355 }
356 
357 
358 /* Return true if CTX is for an omp task.  */
359 
360 static inline bool
is_task_ctx(omp_context * ctx)361 is_task_ctx (omp_context *ctx)
362 {
363   return gimple_code (ctx->stmt) == GIMPLE_OMP_TASK;
364 }
365 
366 
367 /* Return true if CTX is for an omp taskloop.  */
368 
369 static inline bool
is_taskloop_ctx(omp_context * ctx)370 is_taskloop_ctx (omp_context *ctx)
371 {
372   return gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
373 	 && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP;
374 }
375 
376 
377 /* Return true if CTX is for a host omp teams.  */
378 
379 static inline bool
is_host_teams_ctx(omp_context * ctx)380 is_host_teams_ctx (omp_context *ctx)
381 {
382   return gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
383 	 && gimple_omp_teams_host (as_a <gomp_teams *> (ctx->stmt));
384 }
385 
386 /* Return true if CTX is for an omp parallel or omp task or host omp teams
387    (the last one is strictly not a task region in OpenMP speak, but we
388    need to treat it similarly).  */
389 
390 static inline bool
is_taskreg_ctx(omp_context * ctx)391 is_taskreg_ctx (omp_context *ctx)
392 {
393   return is_parallel_ctx (ctx) || is_task_ctx (ctx) || is_host_teams_ctx (ctx);
394 }
395 
396 /* Return true if EXPR is variable sized.  */
397 
398 static inline bool
is_variable_sized(const_tree expr)399 is_variable_sized (const_tree expr)
400 {
401   return !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (expr)));
402 }
403 
404 /* Lookup variables.  The "maybe" form
405    allows for the variable form to not have been entered, otherwise we
406    assert that the variable must have been entered.  */
407 
408 static inline tree
lookup_decl(tree var,omp_context * ctx)409 lookup_decl (tree var, omp_context *ctx)
410 {
411   tree *n = ctx->cb.decl_map->get (var);
412   return *n;
413 }
414 
415 static inline tree
maybe_lookup_decl(const_tree var,omp_context * ctx)416 maybe_lookup_decl (const_tree var, omp_context *ctx)
417 {
418   tree *n = ctx->cb.decl_map->get (const_cast<tree> (var));
419   return n ? *n : NULL_TREE;
420 }
421 
422 static inline tree
lookup_field(tree var,omp_context * ctx)423 lookup_field (tree var, omp_context *ctx)
424 {
425   splay_tree_node n;
426   n = splay_tree_lookup (ctx->field_map, (splay_tree_key) var);
427   return (tree) n->value;
428 }
429 
430 static inline tree
lookup_sfield(splay_tree_key key,omp_context * ctx)431 lookup_sfield (splay_tree_key key, omp_context *ctx)
432 {
433   splay_tree_node n;
434   n = splay_tree_lookup (ctx->sfield_map
435 			 ? ctx->sfield_map : ctx->field_map, key);
436   return (tree) n->value;
437 }
438 
439 static inline tree
lookup_sfield(tree var,omp_context * ctx)440 lookup_sfield (tree var, omp_context *ctx)
441 {
442   return lookup_sfield ((splay_tree_key) var, ctx);
443 }
444 
445 static inline tree
maybe_lookup_field(splay_tree_key key,omp_context * ctx)446 maybe_lookup_field (splay_tree_key key, omp_context *ctx)
447 {
448   splay_tree_node n;
449   n = splay_tree_lookup (ctx->field_map, key);
450   return n ? (tree) n->value : NULL_TREE;
451 }
452 
453 static inline tree
maybe_lookup_field(tree var,omp_context * ctx)454 maybe_lookup_field (tree var, omp_context *ctx)
455 {
456   return maybe_lookup_field ((splay_tree_key) var, ctx);
457 }
458 
459 /* Return true if DECL should be copied by pointer.  SHARED_CTX is
460    the parallel context if DECL is to be shared.  */
461 
462 static bool
use_pointer_for_field(tree decl,omp_context * shared_ctx)463 use_pointer_for_field (tree decl, omp_context *shared_ctx)
464 {
465   if (AGGREGATE_TYPE_P (TREE_TYPE (decl))
466       || TYPE_ATOMIC (TREE_TYPE (decl)))
467     return true;
468 
469   /* We can only use copy-in/copy-out semantics for shared variables
470      when we know the value is not accessible from an outer scope.  */
471   if (shared_ctx)
472     {
473       gcc_assert (!is_gimple_omp_oacc (shared_ctx->stmt));
474 
475       /* ??? Trivially accessible from anywhere.  But why would we even
476 	 be passing an address in this case?  Should we simply assert
477 	 this to be false, or should we have a cleanup pass that removes
478 	 these from the list of mappings?  */
479       if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, shared_ctx)))
480 	return true;
481 
482       /* For variables with DECL_HAS_VALUE_EXPR_P set, we cannot tell
483 	 without analyzing the expression whether or not its location
484 	 is accessible to anyone else.  In the case of nested parallel
485 	 regions it certainly may be.  */
486       if (TREE_CODE (decl) != RESULT_DECL && DECL_HAS_VALUE_EXPR_P (decl))
487 	return true;
488 
489       /* Do not use copy-in/copy-out for variables that have their
490 	 address taken.  */
491       if (is_global_var (decl))
492 	{
493 	  /* For file scope vars, track whether we've seen them as
494 	     non-addressable initially and in that case, keep the same
495 	     answer for the duration of the pass, even when they are made
496 	     addressable later on e.g. through reduction expansion.  Global
497 	     variables which weren't addressable before the pass will not
498 	     have their privatized copies address taken.  See PR91216.  */
499 	  if (!TREE_ADDRESSABLE (decl))
500 	    {
501 	      if (!global_nonaddressable_vars)
502 		global_nonaddressable_vars = BITMAP_ALLOC (NULL);
503 	      bitmap_set_bit (global_nonaddressable_vars, DECL_UID (decl));
504 	    }
505 	  else if (!global_nonaddressable_vars
506 		   || !bitmap_bit_p (global_nonaddressable_vars,
507 				     DECL_UID (decl)))
508 	    return true;
509 	}
510       else if (TREE_ADDRESSABLE (decl))
511 	return true;
512 
513       /* lower_send_shared_vars only uses copy-in, but not copy-out
514 	 for these.  */
515       if (TREE_READONLY (decl)
516 	  || ((TREE_CODE (decl) == RESULT_DECL
517 	       || TREE_CODE (decl) == PARM_DECL)
518 	      && DECL_BY_REFERENCE (decl)))
519 	return false;
520 
521       /* Disallow copy-in/out in nested parallel if
522 	 decl is shared in outer parallel, otherwise
523 	 each thread could store the shared variable
524 	 in its own copy-in location, making the
525 	 variable no longer really shared.  */
526       if (shared_ctx->is_nested)
527 	{
528 	  omp_context *up;
529 
530 	  for (up = shared_ctx->outer; up; up = up->outer)
531 	    if ((is_taskreg_ctx (up)
532 		 || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
533 		     && is_gimple_omp_offloaded (up->stmt)))
534 		&& maybe_lookup_decl (decl, up))
535 	      break;
536 
537 	  if (up)
538 	    {
539 	      tree c;
540 
541 	      if (gimple_code (up->stmt) == GIMPLE_OMP_TARGET)
542 		{
543 		  for (c = gimple_omp_target_clauses (up->stmt);
544 		       c; c = OMP_CLAUSE_CHAIN (c))
545 		    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
546 			&& OMP_CLAUSE_DECL (c) == decl)
547 		      break;
548 		}
549 	      else
550 		for (c = gimple_omp_taskreg_clauses (up->stmt);
551 		     c; c = OMP_CLAUSE_CHAIN (c))
552 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
553 		      && OMP_CLAUSE_DECL (c) == decl)
554 		    break;
555 
556 	      if (c)
557 		goto maybe_mark_addressable_and_ret;
558 	    }
559 	}
560 
561       /* For tasks avoid using copy-in/out.  As tasks can be
562 	 deferred or executed in different thread, when GOMP_task
563 	 returns, the task hasn't necessarily terminated.  */
564       if (is_task_ctx (shared_ctx))
565 	{
566 	  tree outer;
567 	maybe_mark_addressable_and_ret:
568 	  outer = maybe_lookup_decl_in_outer_ctx (decl, shared_ctx);
569 	  if (is_gimple_reg (outer) && !omp_member_access_dummy_var (outer))
570 	    {
571 	      /* Taking address of OUTER in lower_send_shared_vars
572 		 might need regimplification of everything that uses the
573 		 variable.  */
574 	      if (!task_shared_vars)
575 		task_shared_vars = BITMAP_ALLOC (NULL);
576 	      bitmap_set_bit (task_shared_vars, DECL_UID (outer));
577 	      TREE_ADDRESSABLE (outer) = 1;
578 	    }
579 	  return true;
580 	}
581     }
582 
583   return false;
584 }
585 
586 /* Construct a new automatic decl similar to VAR.  */
587 
588 static tree
omp_copy_decl_2(tree var,tree name,tree type,omp_context * ctx)589 omp_copy_decl_2 (tree var, tree name, tree type, omp_context *ctx)
590 {
591   tree copy = copy_var_decl (var, name, type);
592 
593   DECL_CONTEXT (copy) = current_function_decl;
594 
595   if (ctx)
596     {
597       DECL_CHAIN (copy) = ctx->block_vars;
598       ctx->block_vars = copy;
599     }
600   else
601     record_vars (copy);
602 
603   /* If VAR is listed in task_shared_vars, it means it wasn't
604      originally addressable and is just because task needs to take
605      it's address.  But we don't need to take address of privatizations
606      from that var.  */
607   if (TREE_ADDRESSABLE (var)
608       && ((task_shared_vars
609 	   && bitmap_bit_p (task_shared_vars, DECL_UID (var)))
610 	  || (global_nonaddressable_vars
611 	      && bitmap_bit_p (global_nonaddressable_vars, DECL_UID (var)))))
612     TREE_ADDRESSABLE (copy) = 0;
613 
614   return copy;
615 }
616 
617 static tree
omp_copy_decl_1(tree var,omp_context * ctx)618 omp_copy_decl_1 (tree var, omp_context *ctx)
619 {
620   return omp_copy_decl_2 (var, DECL_NAME (var), TREE_TYPE (var), ctx);
621 }
622 
623 /* Build COMPONENT_REF and set TREE_THIS_VOLATILE and TREE_READONLY on it
624    as appropriate.  */
625 /* See also 'gcc/omp-oacc-neuter-broadcast.cc:oacc_build_component_ref'.  */
626 
627 static tree
omp_build_component_ref(tree obj,tree field)628 omp_build_component_ref (tree obj, tree field)
629 {
630   tree ret = build3 (COMPONENT_REF, TREE_TYPE (field), obj, field, NULL);
631   if (TREE_THIS_VOLATILE (field))
632     TREE_THIS_VOLATILE (ret) |= 1;
633   if (TREE_READONLY (field))
634     TREE_READONLY (ret) |= 1;
635   return ret;
636 }
637 
638 /* Build tree nodes to access the field for VAR on the receiver side.  */
639 
640 static tree
build_receiver_ref(tree var,bool by_ref,omp_context * ctx)641 build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
642 {
643   tree x, field = lookup_field (var, ctx);
644 
645   /* If the receiver record type was remapped in the child function,
646      remap the field into the new record type.  */
647   x = maybe_lookup_field (field, ctx);
648   if (x != NULL)
649     field = x;
650 
651   x = build_simple_mem_ref (ctx->receiver_decl);
652   TREE_THIS_NOTRAP (x) = 1;
653   x = omp_build_component_ref (x, field);
654   if (by_ref)
655     {
656       x = build_simple_mem_ref (x);
657       TREE_THIS_NOTRAP (x) = 1;
658     }
659 
660   return x;
661 }
662 
663 /* Build tree nodes to access VAR in the scope outer to CTX.  In the case
664    of a parallel, this is a component reference; for workshare constructs
665    this is some variable.  */
666 
667 static tree
668 build_outer_var_ref (tree var, omp_context *ctx,
669 		     enum omp_clause_code code = OMP_CLAUSE_ERROR)
670 {
671   tree x;
672   omp_context *outer = ctx->outer;
673   for (; outer; outer = outer->outer)
674     {
675       if (gimple_code (outer->stmt) == GIMPLE_OMP_TASKGROUP)
676 	continue;
677       if (gimple_code (outer->stmt) == GIMPLE_OMP_SCOPE
678 	  && !maybe_lookup_decl (var, outer))
679 	continue;
680       break;
681     }
682 
683   if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx)))
684     x = var;
685   else if (is_variable_sized (var))
686     {
687       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
688       x = build_outer_var_ref (x, ctx, code);
689       x = build_simple_mem_ref (x);
690     }
691   else if (is_taskreg_ctx (ctx))
692     {
693       bool by_ref = use_pointer_for_field (var, NULL);
694       x = build_receiver_ref (var, by_ref, ctx);
695     }
696   else if ((gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
697 	    && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
698 	   || ctx->loop_p
699 	   || (code == OMP_CLAUSE_PRIVATE
700 	       && (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
701 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS
702 		   || gimple_code (ctx->stmt) == GIMPLE_OMP_SINGLE)))
703     {
704       /* #pragma omp simd isn't a worksharing construct, and can reference
705 	 even private vars in its linear etc. clauses.
706 	 Similarly for OMP_CLAUSE_PRIVATE with outer ref, that can refer
707 	 to private vars in all worksharing constructs.  */
708       x = NULL_TREE;
709       if (outer && is_taskreg_ctx (outer))
710 	x = lookup_decl (var, outer);
711       else if (outer)
712 	x = maybe_lookup_decl_in_outer_ctx (var, ctx);
713       if (x == NULL_TREE)
714 	x = var;
715     }
716   else if (code == OMP_CLAUSE_LASTPRIVATE && is_taskloop_ctx (ctx))
717     {
718       gcc_assert (outer);
719       splay_tree_node n
720 	= splay_tree_lookup (outer->field_map,
721 			     (splay_tree_key) &DECL_UID (var));
722       if (n == NULL)
723 	{
724 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, outer)))
725 	    x = var;
726 	  else
727 	    x = lookup_decl (var, outer);
728 	}
729       else
730 	{
731 	  tree field = (tree) n->value;
732 	  /* If the receiver record type was remapped in the child function,
733 	     remap the field into the new record type.  */
734 	  x = maybe_lookup_field (field, outer);
735 	  if (x != NULL)
736 	    field = x;
737 
738 	  x = build_simple_mem_ref (outer->receiver_decl);
739 	  x = omp_build_component_ref (x, field);
740 	  if (use_pointer_for_field (var, outer))
741 	    x = build_simple_mem_ref (x);
742 	}
743     }
744   else if (outer)
745     x = lookup_decl (var, outer);
746   else if (omp_privatize_by_reference (var))
747     /* This can happen with orphaned constructs.  If var is reference, it is
748        possible it is shared and as such valid.  */
749     x = var;
750   else if (omp_member_access_dummy_var (var))
751     x = var;
752   else
753     gcc_unreachable ();
754 
755   if (x == var)
756     {
757       tree t = omp_member_access_dummy_var (var);
758       if (t)
759 	{
760 	  x = DECL_VALUE_EXPR (var);
761 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
762 	  if (o != t)
763 	    x = unshare_and_remap (x, t, o);
764 	  else
765 	    x = unshare_expr (x);
766 	}
767     }
768 
769   if (omp_privatize_by_reference (var))
770     x = build_simple_mem_ref (x);
771 
772   return x;
773 }
774 
775 /* Build tree nodes to access the field for VAR on the sender side.  */
776 
777 static tree
build_sender_ref(splay_tree_key key,omp_context * ctx)778 build_sender_ref (splay_tree_key key, omp_context *ctx)
779 {
780   tree field = lookup_sfield (key, ctx);
781   return omp_build_component_ref (ctx->sender_decl, field);
782 }
783 
784 static tree
build_sender_ref(tree var,omp_context * ctx)785 build_sender_ref (tree var, omp_context *ctx)
786 {
787   return build_sender_ref ((splay_tree_key) var, ctx);
788 }
789 
790 /* Add a new field for VAR inside the structure CTX->SENDER_DECL.  If
791    BASE_POINTERS_RESTRICT, declare the field with restrict.  */
792 
793 static void
install_var_field(tree var,bool by_ref,int mask,omp_context * ctx)794 install_var_field (tree var, bool by_ref, int mask, omp_context *ctx)
795 {
796   tree field, type, sfield = NULL_TREE;
797   splay_tree_key key = (splay_tree_key) var;
798 
799   if ((mask & 16) != 0)
800     {
801       key = (splay_tree_key) &DECL_NAME (var);
802       gcc_checking_assert (key != (splay_tree_key) var);
803     }
804   if ((mask & 8) != 0)
805     {
806       key = (splay_tree_key) &DECL_UID (var);
807       gcc_checking_assert (key != (splay_tree_key) var);
808     }
809   gcc_assert ((mask & 1) == 0
810 	      || !splay_tree_lookup (ctx->field_map, key));
811   gcc_assert ((mask & 2) == 0 || !ctx->sfield_map
812 	      || !splay_tree_lookup (ctx->sfield_map, key));
813   gcc_assert ((mask & 3) == 3
814 	      || !is_gimple_omp_oacc (ctx->stmt));
815 
816   type = TREE_TYPE (var);
817   if ((mask & 16) != 0)
818     type = lang_hooks.decls.omp_array_data (var, true);
819 
820   /* Prevent redeclaring the var in the split-off function with a restrict
821      pointer type.  Note that we only clear type itself, restrict qualifiers in
822      the pointed-to type will be ignored by points-to analysis.  */
823   if (POINTER_TYPE_P (type)
824       && TYPE_RESTRICT (type))
825     type = build_qualified_type (type, TYPE_QUALS (type) & ~TYPE_QUAL_RESTRICT);
826 
827   if (mask & 4)
828     {
829       gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
830       type = build_pointer_type (build_pointer_type (type));
831     }
832   else if (by_ref)
833     type = build_pointer_type (type);
834   else if ((mask & (32 | 3)) == 1
835 	   && omp_privatize_by_reference (var))
836     type = TREE_TYPE (type);
837 
838   field = build_decl (DECL_SOURCE_LOCATION (var),
839 		      FIELD_DECL, DECL_NAME (var), type);
840 
841   /* Remember what variable this field was created for.  This does have a
842      side effect of making dwarf2out ignore this member, so for helpful
843      debugging we clear it later in delete_omp_context.  */
844   DECL_ABSTRACT_ORIGIN (field) = var;
845   if ((mask & 16) == 0 && type == TREE_TYPE (var))
846     {
847       SET_DECL_ALIGN (field, DECL_ALIGN (var));
848       DECL_USER_ALIGN (field) = DECL_USER_ALIGN (var);
849       TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (var);
850     }
851   else
852     SET_DECL_ALIGN (field, TYPE_ALIGN (type));
853 
854   if ((mask & 3) == 3)
855     {
856       insert_field_into_struct (ctx->record_type, field);
857       if (ctx->srecord_type)
858 	{
859 	  sfield = build_decl (DECL_SOURCE_LOCATION (var),
860 			       FIELD_DECL, DECL_NAME (var), type);
861 	  DECL_ABSTRACT_ORIGIN (sfield) = var;
862 	  SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
863 	  DECL_USER_ALIGN (sfield) = DECL_USER_ALIGN (field);
864 	  TREE_THIS_VOLATILE (sfield) = TREE_THIS_VOLATILE (field);
865 	  insert_field_into_struct (ctx->srecord_type, sfield);
866 	}
867     }
868   else
869     {
870       if (ctx->srecord_type == NULL_TREE)
871 	{
872 	  tree t;
873 
874 	  ctx->srecord_type = lang_hooks.types.make_type (RECORD_TYPE);
875 	  ctx->sfield_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
876 	  for (t = TYPE_FIELDS (ctx->record_type); t ; t = TREE_CHAIN (t))
877 	    {
878 	      sfield = build_decl (DECL_SOURCE_LOCATION (t),
879 				   FIELD_DECL, DECL_NAME (t), TREE_TYPE (t));
880 	      DECL_ABSTRACT_ORIGIN (sfield) = DECL_ABSTRACT_ORIGIN (t);
881 	      insert_field_into_struct (ctx->srecord_type, sfield);
882 	      splay_tree_insert (ctx->sfield_map,
883 				 (splay_tree_key) DECL_ABSTRACT_ORIGIN (t),
884 				 (splay_tree_value) sfield);
885 	    }
886 	}
887       sfield = field;
888       insert_field_into_struct ((mask & 1) ? ctx->record_type
889 				: ctx->srecord_type, field);
890     }
891 
892   if (mask & 1)
893     splay_tree_insert (ctx->field_map, key, (splay_tree_value) field);
894   if ((mask & 2) && ctx->sfield_map)
895     splay_tree_insert (ctx->sfield_map, key, (splay_tree_value) sfield);
896 }
897 
898 static tree
install_var_local(tree var,omp_context * ctx)899 install_var_local (tree var, omp_context *ctx)
900 {
901   tree new_var = omp_copy_decl_1 (var, ctx);
902   insert_decl_map (&ctx->cb, var, new_var);
903   return new_var;
904 }
905 
906 /* Adjust the replacement for DECL in CTX for the new context.  This means
907    copying the DECL_VALUE_EXPR, and fixing up the type.  */
908 
909 static void
fixup_remapped_decl(tree decl,omp_context * ctx,bool private_debug)910 fixup_remapped_decl (tree decl, omp_context *ctx, bool private_debug)
911 {
912   tree new_decl, size;
913 
914   new_decl = lookup_decl (decl, ctx);
915 
916   TREE_TYPE (new_decl) = remap_type (TREE_TYPE (decl), &ctx->cb);
917 
918   if ((!TREE_CONSTANT (DECL_SIZE (new_decl)) || private_debug)
919       && DECL_HAS_VALUE_EXPR_P (decl))
920     {
921       tree ve = DECL_VALUE_EXPR (decl);
922       walk_tree (&ve, copy_tree_body_r, &ctx->cb, NULL);
923       SET_DECL_VALUE_EXPR (new_decl, ve);
924       DECL_HAS_VALUE_EXPR_P (new_decl) = 1;
925     }
926 
927   if (!TREE_CONSTANT (DECL_SIZE (new_decl)))
928     {
929       size = remap_decl (DECL_SIZE (decl), &ctx->cb);
930       if (size == error_mark_node)
931 	size = TYPE_SIZE (TREE_TYPE (new_decl));
932       DECL_SIZE (new_decl) = size;
933 
934       size = remap_decl (DECL_SIZE_UNIT (decl), &ctx->cb);
935       if (size == error_mark_node)
936 	size = TYPE_SIZE_UNIT (TREE_TYPE (new_decl));
937       DECL_SIZE_UNIT (new_decl) = size;
938     }
939 }
940 
941 /* The callback for remap_decl.  Search all containing contexts for a
942    mapping of the variable; this avoids having to duplicate the splay
943    tree ahead of time.  We know a mapping doesn't already exist in the
944    given context.  Create new mappings to implement default semantics.  */
945 
946 static tree
omp_copy_decl(tree var,copy_body_data * cb)947 omp_copy_decl (tree var, copy_body_data *cb)
948 {
949   omp_context *ctx = (omp_context *) cb;
950   tree new_var;
951 
952   if (TREE_CODE (var) == LABEL_DECL)
953     {
954       if (FORCED_LABEL (var) || DECL_NONLOCAL (var))
955 	return var;
956       new_var = create_artificial_label (DECL_SOURCE_LOCATION (var));
957       DECL_CONTEXT (new_var) = current_function_decl;
958       insert_decl_map (&ctx->cb, var, new_var);
959       return new_var;
960     }
961 
962   while (!is_taskreg_ctx (ctx))
963     {
964       ctx = ctx->outer;
965       if (ctx == NULL)
966 	return var;
967       new_var = maybe_lookup_decl (var, ctx);
968       if (new_var)
969 	return new_var;
970     }
971 
972   if (is_global_var (var) || decl_function_context (var) != ctx->cb.src_fn)
973     return var;
974 
975   return error_mark_node;
976 }
977 
978 /* Create a new context, with OUTER_CTX being the surrounding context.  */
979 
980 static omp_context *
new_omp_context(gimple * stmt,omp_context * outer_ctx)981 new_omp_context (gimple *stmt, omp_context *outer_ctx)
982 {
983   omp_context *ctx = XCNEW (omp_context);
984 
985   splay_tree_insert (all_contexts, (splay_tree_key) stmt,
986 		     (splay_tree_value) ctx);
987   ctx->stmt = stmt;
988 
989   if (outer_ctx)
990     {
991       ctx->outer = outer_ctx;
992       ctx->cb = outer_ctx->cb;
993       ctx->cb.block = NULL;
994       ctx->depth = outer_ctx->depth + 1;
995     }
996   else
997     {
998       ctx->cb.src_fn = current_function_decl;
999       ctx->cb.dst_fn = current_function_decl;
1000       ctx->cb.src_node = cgraph_node::get (current_function_decl);
1001       gcc_checking_assert (ctx->cb.src_node);
1002       ctx->cb.dst_node = ctx->cb.src_node;
1003       ctx->cb.src_cfun = cfun;
1004       ctx->cb.copy_decl = omp_copy_decl;
1005       ctx->cb.eh_lp_nr = 0;
1006       ctx->cb.transform_call_graph_edges = CB_CGE_MOVE;
1007       ctx->cb.adjust_array_error_bounds = true;
1008       ctx->cb.dont_remap_vla_if_no_change = true;
1009       ctx->depth = 1;
1010     }
1011 
1012   ctx->cb.decl_map = new hash_map<tree, tree>;
1013 
1014   return ctx;
1015 }
1016 
1017 static gimple_seq maybe_catch_exception (gimple_seq);
1018 
1019 /* Finalize task copyfn.  */
1020 
1021 static void
finalize_task_copyfn(gomp_task * task_stmt)1022 finalize_task_copyfn (gomp_task *task_stmt)
1023 {
1024   struct function *child_cfun;
1025   tree child_fn;
1026   gimple_seq seq = NULL, new_seq;
1027   gbind *bind;
1028 
1029   child_fn = gimple_omp_task_copy_fn (task_stmt);
1030   if (child_fn == NULL_TREE)
1031     return;
1032 
1033   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
1034   DECL_STRUCT_FUNCTION (child_fn)->curr_properties = cfun->curr_properties;
1035 
1036   push_cfun (child_cfun);
1037   bind = gimplify_body (child_fn, false);
1038   gimple_seq_add_stmt (&seq, bind);
1039   new_seq = maybe_catch_exception (seq);
1040   if (new_seq != seq)
1041     {
1042       bind = gimple_build_bind (NULL, new_seq, NULL);
1043       seq = NULL;
1044       gimple_seq_add_stmt (&seq, bind);
1045     }
1046   gimple_set_body (child_fn, seq);
1047   pop_cfun ();
1048 
1049   /* Inform the callgraph about the new function.  */
1050   cgraph_node *node = cgraph_node::get_create (child_fn);
1051   node->parallelized_function = 1;
1052   cgraph_node::add_new_function (child_fn, false);
1053 }
1054 
1055 /* Destroy a omp_context data structures.  Called through the splay tree
1056    value delete callback.  */
1057 
1058 static void
delete_omp_context(splay_tree_value value)1059 delete_omp_context (splay_tree_value value)
1060 {
1061   omp_context *ctx = (omp_context *) value;
1062 
1063   delete ctx->cb.decl_map;
1064 
1065   if (ctx->field_map)
1066     splay_tree_delete (ctx->field_map);
1067   if (ctx->sfield_map)
1068     splay_tree_delete (ctx->sfield_map);
1069 
1070   /* We hijacked DECL_ABSTRACT_ORIGIN earlier.  We need to clear it before
1071      it produces corrupt debug information.  */
1072   if (ctx->record_type)
1073     {
1074       tree t;
1075       for (t = TYPE_FIELDS (ctx->record_type); t ; t = DECL_CHAIN (t))
1076 	DECL_ABSTRACT_ORIGIN (t) = NULL;
1077     }
1078   if (ctx->srecord_type)
1079     {
1080       tree t;
1081       for (t = TYPE_FIELDS (ctx->srecord_type); t ; t = DECL_CHAIN (t))
1082 	DECL_ABSTRACT_ORIGIN (t) = NULL;
1083     }
1084 
1085   if (is_task_ctx (ctx))
1086     finalize_task_copyfn (as_a <gomp_task *> (ctx->stmt));
1087 
1088   if (ctx->task_reduction_map)
1089     {
1090       ctx->task_reductions.release ();
1091       delete ctx->task_reduction_map;
1092     }
1093 
1094   delete ctx->lastprivate_conditional_map;
1095   delete ctx->allocate_map;
1096 
1097   XDELETE (ctx);
1098 }
1099 
1100 /* Fix up RECEIVER_DECL with a type that has been remapped to the child
1101    context.  */
1102 
1103 static void
fixup_child_record_type(omp_context * ctx)1104 fixup_child_record_type (omp_context *ctx)
1105 {
1106   tree f, type = ctx->record_type;
1107 
1108   if (!ctx->receiver_decl)
1109     return;
1110   /* ??? It isn't sufficient to just call remap_type here, because
1111      variably_modified_type_p doesn't work the way we expect for
1112      record types.  Testing each field for whether it needs remapping
1113      and creating a new record by hand works, however.  */
1114   for (f = TYPE_FIELDS (type); f ; f = DECL_CHAIN (f))
1115     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
1116       break;
1117   if (f)
1118     {
1119       tree name, new_fields = NULL;
1120 
1121       type = lang_hooks.types.make_type (RECORD_TYPE);
1122       name = DECL_NAME (TYPE_NAME (ctx->record_type));
1123       name = build_decl (DECL_SOURCE_LOCATION (ctx->receiver_decl),
1124 			 TYPE_DECL, name, type);
1125       TYPE_NAME (type) = name;
1126 
1127       for (f = TYPE_FIELDS (ctx->record_type); f ; f = DECL_CHAIN (f))
1128 	{
1129 	  tree new_f = copy_node (f);
1130 	  DECL_CONTEXT (new_f) = type;
1131 	  TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &ctx->cb);
1132 	  DECL_CHAIN (new_f) = new_fields;
1133 	  walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &ctx->cb, NULL);
1134 	  walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r,
1135 		     &ctx->cb, NULL);
1136 	  walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
1137 		     &ctx->cb, NULL);
1138 	  new_fields = new_f;
1139 
1140 	  /* Arrange to be able to look up the receiver field
1141 	     given the sender field.  */
1142 	  splay_tree_insert (ctx->field_map, (splay_tree_key) f,
1143 			     (splay_tree_value) new_f);
1144 	}
1145       TYPE_FIELDS (type) = nreverse (new_fields);
1146       layout_type (type);
1147     }
1148 
1149   /* In a target region we never modify any of the pointers in *.omp_data_i,
1150      so attempt to help the optimizers.  */
1151   if (is_gimple_omp_offloaded (ctx->stmt))
1152     type = build_qualified_type (type, TYPE_QUAL_CONST);
1153 
1154   TREE_TYPE (ctx->receiver_decl)
1155     = build_qualified_type (build_reference_type (type), TYPE_QUAL_RESTRICT);
1156 }
1157 
1158 /* Instantiate decls as necessary in CTX to satisfy the data sharing
1159    specified by CLAUSES.  */
1160 
1161 static void
scan_sharing_clauses(tree clauses,omp_context * ctx)1162 scan_sharing_clauses (tree clauses, omp_context *ctx)
1163 {
1164   tree c, decl;
1165   bool scan_array_reductions = false;
1166 
1167   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1168     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_ALLOCATE
1169 	&& (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c) == NULL_TREE
1170 	    /* omp_default_mem_alloc is 1 */
1171 	    || !integer_onep (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1172 	    || OMP_CLAUSE_ALLOCATE_ALIGN (c) != NULL_TREE))
1173       {
1174 	if (ctx->allocate_map == NULL)
1175 	  ctx->allocate_map = new hash_map<tree, tree>;
1176 	tree val = integer_zero_node;
1177 	if (OMP_CLAUSE_ALLOCATE_ALLOCATOR (c))
1178 	  val = OMP_CLAUSE_ALLOCATE_ALLOCATOR (c);
1179 	if (OMP_CLAUSE_ALLOCATE_ALIGN (c))
1180 	  val = build_tree_list (val, OMP_CLAUSE_ALLOCATE_ALIGN (c));
1181 	ctx->allocate_map->put (OMP_CLAUSE_DECL (c), val);
1182       }
1183 
1184   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1185     {
1186       bool by_ref;
1187 
1188       switch (OMP_CLAUSE_CODE (c))
1189 	{
1190 	case OMP_CLAUSE_PRIVATE:
1191 	  decl = OMP_CLAUSE_DECL (c);
1192 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
1193 	    goto do_private;
1194 	  else if (!is_variable_sized (decl))
1195 	    install_var_local (decl, ctx);
1196 	  break;
1197 
1198 	case OMP_CLAUSE_SHARED:
1199 	  decl = OMP_CLAUSE_DECL (c);
1200 	  if (ctx->allocate_map && ctx->allocate_map->get (decl))
1201 	    ctx->allocate_map->remove (decl);
1202 	  /* Ignore shared directives in teams construct inside of
1203 	     target construct.  */
1204 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1205 	      && !is_host_teams_ctx (ctx))
1206 	    {
1207 	      /* Global variables don't need to be copied,
1208 		 the receiver side will use them directly.  */
1209 	      tree odecl = maybe_lookup_decl_in_outer_ctx (decl, ctx);
1210 	      if (is_global_var (odecl))
1211 		break;
1212 	      insert_decl_map (&ctx->cb, decl, odecl);
1213 	      break;
1214 	    }
1215 	  gcc_assert (is_taskreg_ctx (ctx));
1216 	  gcc_assert (!COMPLETE_TYPE_P (TREE_TYPE (decl))
1217 		      || !is_variable_sized (decl));
1218 	  /* Global variables don't need to be copied,
1219 	     the receiver side will use them directly.  */
1220 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1221 	    break;
1222 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1223 	    {
1224 	      use_pointer_for_field (decl, ctx);
1225 	      break;
1226 	    }
1227 	  by_ref = use_pointer_for_field (decl, NULL);
1228 	  if ((! TREE_READONLY (decl) && !OMP_CLAUSE_SHARED_READONLY (c))
1229 	      || TREE_ADDRESSABLE (decl)
1230 	      || by_ref
1231 	      || omp_privatize_by_reference (decl))
1232 	    {
1233 	      by_ref = use_pointer_for_field (decl, ctx);
1234 	      install_var_field (decl, by_ref, 3, ctx);
1235 	      install_var_local (decl, ctx);
1236 	      break;
1237 	    }
1238 	  /* We don't need to copy const scalar vars back.  */
1239 	  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_FIRSTPRIVATE);
1240 	  goto do_private;
1241 
1242 	case OMP_CLAUSE_REDUCTION:
1243 	  /* Collect 'reduction' clauses on OpenACC compute construct.  */
1244 	  if (is_gimple_omp_oacc (ctx->stmt)
1245 	      && is_gimple_omp_offloaded (ctx->stmt))
1246 	    {
1247 	      /* No 'reduction' clauses on OpenACC 'kernels'.  */
1248 	      gcc_checking_assert (!is_oacc_kernels (ctx));
1249 	      /* Likewise, on OpenACC 'kernels' decomposed parts.  */
1250 	      gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
1251 
1252 	      ctx->local_reduction_clauses
1253 		= tree_cons (NULL, c, ctx->local_reduction_clauses);
1254 	    }
1255 	  /* FALLTHRU */
1256 
1257 	case OMP_CLAUSE_IN_REDUCTION:
1258 	  decl = OMP_CLAUSE_DECL (c);
1259 	  if (ctx->allocate_map
1260 	      && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1261 		   && (OMP_CLAUSE_REDUCTION_INSCAN (c)
1262 		       || OMP_CLAUSE_REDUCTION_TASK (c)))
1263 		  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1264 		  || is_task_ctx (ctx)))
1265 	    {
1266 	      /* For now.  */
1267 	      if (ctx->allocate_map->get (decl))
1268 		ctx->allocate_map->remove (decl);
1269 	    }
1270 	  if (TREE_CODE (decl) == MEM_REF)
1271 	    {
1272 	      tree t = TREE_OPERAND (decl, 0);
1273 	      if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1274 		t = TREE_OPERAND (t, 0);
1275 	      if (TREE_CODE (t) == INDIRECT_REF
1276 		  || TREE_CODE (t) == ADDR_EXPR)
1277 		t = TREE_OPERAND (t, 0);
1278 	      if (is_omp_target (ctx->stmt))
1279 		{
1280 		  if (is_variable_sized (t))
1281 		    {
1282 		      gcc_assert (DECL_HAS_VALUE_EXPR_P (t));
1283 		      t = DECL_VALUE_EXPR (t);
1284 		      gcc_assert (TREE_CODE (t) == INDIRECT_REF);
1285 		      t = TREE_OPERAND (t, 0);
1286 		      gcc_assert (DECL_P (t));
1287 		    }
1288 		  tree at = t;
1289 		  if (ctx->outer)
1290 		    scan_omp_op (&at, ctx->outer);
1291 		  tree nt = omp_copy_decl_1 (at, ctx->outer);
1292 		  splay_tree_insert (ctx->field_map,
1293 				     (splay_tree_key) &DECL_CONTEXT (t),
1294 				     (splay_tree_value) nt);
1295 		  if (at != t)
1296 		    splay_tree_insert (ctx->field_map,
1297 				       (splay_tree_key) &DECL_CONTEXT (at),
1298 				       (splay_tree_value) nt);
1299 		  break;
1300 		}
1301 	      install_var_local (t, ctx);
1302 	      if (is_taskreg_ctx (ctx)
1303 		  && (!is_global_var (maybe_lookup_decl_in_outer_ctx (t, ctx))
1304 		      || (is_task_ctx (ctx)
1305 			  && (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
1306 			      || (TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1307 				  && (TREE_CODE (TREE_TYPE (TREE_TYPE (t)))
1308 				      == POINTER_TYPE)))))
1309 		  && !is_variable_sized (t)
1310 		  && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
1311 		      || (!OMP_CLAUSE_REDUCTION_TASK (c)
1312 			  && !is_task_ctx (ctx))))
1313 		{
1314 		  by_ref = use_pointer_for_field (t, NULL);
1315 		  if (is_task_ctx (ctx)
1316 		      && TREE_CODE (TREE_TYPE (t)) == REFERENCE_TYPE
1317 		      && TREE_CODE (TREE_TYPE (TREE_TYPE (t))) == POINTER_TYPE)
1318 		    {
1319 		      install_var_field (t, false, 1, ctx);
1320 		      install_var_field (t, by_ref, 2, ctx);
1321 		    }
1322 		  else
1323 		    install_var_field (t, by_ref, 3, ctx);
1324 		}
1325 	      break;
1326 	    }
1327 	  if (is_omp_target (ctx->stmt))
1328 	    {
1329 	      tree at = decl;
1330 	      if (ctx->outer)
1331 		scan_omp_op (&at, ctx->outer);
1332 	      tree nt = omp_copy_decl_1 (at, ctx->outer);
1333 	      splay_tree_insert (ctx->field_map,
1334 				 (splay_tree_key) &DECL_CONTEXT (decl),
1335 				 (splay_tree_value) nt);
1336 	      if (at != decl)
1337 		splay_tree_insert (ctx->field_map,
1338 				   (splay_tree_key) &DECL_CONTEXT (at),
1339 				   (splay_tree_value) nt);
1340 	      break;
1341 	    }
1342 	  if (is_task_ctx (ctx)
1343 	      || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1344 		  && OMP_CLAUSE_REDUCTION_TASK (c)
1345 		  && is_parallel_ctx (ctx)))
1346 	    {
1347 	      /* Global variables don't need to be copied,
1348 		 the receiver side will use them directly.  */
1349 	      if (!is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1350 		{
1351 		  by_ref = use_pointer_for_field (decl, ctx);
1352 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
1353 		    install_var_field (decl, by_ref, 3, ctx);
1354 		}
1355 	      install_var_local (decl, ctx);
1356 	      break;
1357 	    }
1358 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1359 	      && OMP_CLAUSE_REDUCTION_TASK (c))
1360 	    {
1361 	      install_var_local (decl, ctx);
1362 	      break;
1363 	    }
1364 	  goto do_private;
1365 
1366 	case OMP_CLAUSE_LASTPRIVATE:
1367 	  /* Let the corresponding firstprivate clause create
1368 	     the variable.  */
1369 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1370 	    break;
1371 	  /* FALLTHRU */
1372 
1373 	case OMP_CLAUSE_FIRSTPRIVATE:
1374 	case OMP_CLAUSE_LINEAR:
1375 	  decl = OMP_CLAUSE_DECL (c);
1376 	do_private:
1377 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1378 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1379 	      && is_gimple_omp_offloaded (ctx->stmt))
1380 	    {
1381 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1382 		{
1383 		  by_ref = !omp_privatize_by_reference (decl);
1384 		  install_var_field (decl, by_ref, 3, ctx);
1385 		}
1386 	      else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1387 		install_var_field (decl, true, 3, ctx);
1388 	      else
1389 		install_var_field (decl, false, 3, ctx);
1390 	    }
1391 	  if (is_variable_sized (decl))
1392 	    {
1393 	      if (is_task_ctx (ctx))
1394 		{
1395 		  if (ctx->allocate_map
1396 		      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
1397 		    {
1398 		      /* For now.  */
1399 		      if (ctx->allocate_map->get (decl))
1400 			ctx->allocate_map->remove (decl);
1401 		    }
1402 		  install_var_field (decl, false, 1, ctx);
1403 		}
1404 	      break;
1405 	    }
1406 	  else if (is_taskreg_ctx (ctx))
1407 	    {
1408 	      bool global
1409 		= is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx));
1410 	      by_ref = use_pointer_for_field (decl, NULL);
1411 
1412 	      if (is_task_ctx (ctx)
1413 		  && (global || by_ref || omp_privatize_by_reference (decl)))
1414 		{
1415 		  if (ctx->allocate_map
1416 		      && ctx->allocate_map->get (decl))
1417 		    install_var_field (decl, by_ref, 32 | 1, ctx);
1418 		  else
1419 		    install_var_field (decl, false, 1, ctx);
1420 		  if (!global)
1421 		    install_var_field (decl, by_ref, 2, ctx);
1422 		}
1423 	      else if (!global)
1424 		install_var_field (decl, by_ref, 3, ctx);
1425 	    }
1426 	  install_var_local (decl, ctx);
1427 	  break;
1428 
1429 	case OMP_CLAUSE_USE_DEVICE_PTR:
1430 	case OMP_CLAUSE_USE_DEVICE_ADDR:
1431 	  decl = OMP_CLAUSE_DECL (c);
1432 
1433 	  /* Fortran array descriptors.  */
1434 	  if (lang_hooks.decls.omp_array_data (decl, true))
1435 	    install_var_field (decl, false, 19, ctx);
1436 	  else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
1437 		    && !omp_privatize_by_reference (decl)
1438 		    && !omp_is_allocatable_or_ptr (decl))
1439 		   || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1440 	    install_var_field (decl, true, 11, ctx);
1441 	  else
1442 	    install_var_field (decl, false, 11, ctx);
1443 	  if (DECL_SIZE (decl)
1444 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1445 	    {
1446 	      tree decl2 = DECL_VALUE_EXPR (decl);
1447 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1448 	      decl2 = TREE_OPERAND (decl2, 0);
1449 	      gcc_assert (DECL_P (decl2));
1450 	      install_var_local (decl2, ctx);
1451 	    }
1452 	  install_var_local (decl, ctx);
1453 	  break;
1454 
1455 	case OMP_CLAUSE_IS_DEVICE_PTR:
1456 	  decl = OMP_CLAUSE_DECL (c);
1457 	  goto do_private;
1458 
1459 	case OMP_CLAUSE__LOOPTEMP_:
1460 	case OMP_CLAUSE__REDUCTEMP_:
1461 	  gcc_assert (is_taskreg_ctx (ctx));
1462 	  decl = OMP_CLAUSE_DECL (c);
1463 	  install_var_field (decl, false, 3, ctx);
1464 	  install_var_local (decl, ctx);
1465 	  break;
1466 
1467 	case OMP_CLAUSE_COPYPRIVATE:
1468 	case OMP_CLAUSE_COPYIN:
1469 	  decl = OMP_CLAUSE_DECL (c);
1470 	  by_ref = use_pointer_for_field (decl, NULL);
1471 	  install_var_field (decl, by_ref, 3, ctx);
1472 	  break;
1473 
1474 	case OMP_CLAUSE_FINAL:
1475 	case OMP_CLAUSE_IF:
1476 	case OMP_CLAUSE_NUM_THREADS:
1477 	case OMP_CLAUSE_NUM_TEAMS:
1478 	case OMP_CLAUSE_THREAD_LIMIT:
1479 	case OMP_CLAUSE_DEVICE:
1480 	case OMP_CLAUSE_SCHEDULE:
1481 	case OMP_CLAUSE_DIST_SCHEDULE:
1482 	case OMP_CLAUSE_DEPEND:
1483 	case OMP_CLAUSE_PRIORITY:
1484 	case OMP_CLAUSE_GRAINSIZE:
1485 	case OMP_CLAUSE_NUM_TASKS:
1486 	case OMP_CLAUSE_NUM_GANGS:
1487 	case OMP_CLAUSE_NUM_WORKERS:
1488 	case OMP_CLAUSE_VECTOR_LENGTH:
1489 	case OMP_CLAUSE_DETACH:
1490 	case OMP_CLAUSE_FILTER:
1491 	  if (ctx->outer)
1492 	    scan_omp_op (&OMP_CLAUSE_OPERAND (c, 0), ctx->outer);
1493 	  break;
1494 
1495 	case OMP_CLAUSE_TO:
1496 	case OMP_CLAUSE_FROM:
1497 	case OMP_CLAUSE_MAP:
1498 	  if (ctx->outer)
1499 	    scan_omp_op (&OMP_CLAUSE_SIZE (c), ctx->outer);
1500 	  decl = OMP_CLAUSE_DECL (c);
1501 	  /* Global variables with "omp declare target" attribute
1502 	     don't need to be copied, the receiver side will use them
1503 	     directly.  However, global variables with "omp declare target link"
1504 	     attribute need to be copied.  Or when ALWAYS modifier is used.  */
1505 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1506 	      && DECL_P (decl)
1507 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1508 		   && (OMP_CLAUSE_MAP_KIND (c)
1509 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE)
1510 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
1511 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH)
1512 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1513 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TO
1514 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_FROM
1515 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_TOFROM
1516 	      && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
1517 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1518 	      && varpool_node::get_create (decl)->offloadable
1519 	      && !lookup_attribute ("omp declare target link",
1520 				    DECL_ATTRIBUTES (decl)))
1521 	    break;
1522 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1523 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER)
1524 	    {
1525 	      /* Ignore GOMP_MAP_POINTER kind for arrays in regions that are
1526 		 not offloaded; there is nothing to map for those.  */
1527 	      if (!is_gimple_omp_offloaded (ctx->stmt)
1528 		  && !POINTER_TYPE_P (TREE_TYPE (decl))
1529 		  && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
1530 		break;
1531 	    }
1532 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1533 	      && DECL_P (decl)
1534 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1535 		  || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1536 	      && is_omp_target (ctx->stmt))
1537 	    {
1538 	      /* If this is an offloaded region, an attach operation should
1539 		 only exist when the pointer variable is mapped in a prior
1540 		 clause.  */
1541 	      if (is_gimple_omp_offloaded (ctx->stmt))
1542 		gcc_assert
1543 		  (maybe_lookup_decl (decl, ctx)
1544 		   || (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1545 		       && lookup_attribute ("omp declare target",
1546 					    DECL_ATTRIBUTES (decl))));
1547 
1548 	      /* By itself, attach/detach is generated as part of pointer
1549 		 variable mapping and should not create new variables in the
1550 		 offloaded region, however sender refs for it must be created
1551 		 for its address to be passed to the runtime.  */
1552 	      tree field
1553 		= build_decl (OMP_CLAUSE_LOCATION (c),
1554 			      FIELD_DECL, NULL_TREE, ptr_type_node);
1555 	      SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1556 	      insert_field_into_struct (ctx->record_type, field);
1557 	      /* To not clash with a map of the pointer variable itself,
1558 		 attach/detach maps have their field looked up by the *clause*
1559 		 tree expression, not the decl.  */
1560 	      gcc_assert (!splay_tree_lookup (ctx->field_map,
1561 					      (splay_tree_key) c));
1562 	      splay_tree_insert (ctx->field_map, (splay_tree_key) c,
1563 				 (splay_tree_value) field);
1564 	      break;
1565 	    }
1566 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1567 	      && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
1568 		  || (OMP_CLAUSE_MAP_KIND (c)
1569 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
1570 	    {
1571 	      if (TREE_CODE (decl) == COMPONENT_REF
1572 		  || (TREE_CODE (decl) == INDIRECT_REF
1573 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
1574 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
1575 			  == REFERENCE_TYPE)))
1576 		break;
1577 	      if (DECL_SIZE (decl)
1578 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1579 		{
1580 		  tree decl2 = DECL_VALUE_EXPR (decl);
1581 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1582 		  decl2 = TREE_OPERAND (decl2, 0);
1583 		  gcc_assert (DECL_P (decl2));
1584 		  install_var_local (decl2, ctx);
1585 		}
1586 	      install_var_local (decl, ctx);
1587 	      break;
1588 	    }
1589 	  if (DECL_P (decl))
1590 	    {
1591 	      if (DECL_SIZE (decl)
1592 		  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1593 		{
1594 		  tree decl2 = DECL_VALUE_EXPR (decl);
1595 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1596 		  decl2 = TREE_OPERAND (decl2, 0);
1597 		  gcc_assert (DECL_P (decl2));
1598 		  install_var_field (decl2, true, 3, ctx);
1599 		  install_var_local (decl2, ctx);
1600 		  install_var_local (decl, ctx);
1601 		}
1602 	      else
1603 		{
1604 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
1605 		      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1606 		      && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
1607 		      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1608 		    install_var_field (decl, true, 7, ctx);
1609 		  else
1610 		    install_var_field (decl, true, 3, ctx);
1611 		  if (is_gimple_omp_offloaded (ctx->stmt)
1612 		      && !(is_gimple_omp_oacc (ctx->stmt)
1613 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
1614 		    install_var_local (decl, ctx);
1615 		}
1616 	    }
1617 	  else
1618 	    {
1619 	      tree base = get_base_address (decl);
1620 	      tree nc = OMP_CLAUSE_CHAIN (c);
1621 	      if (DECL_P (base)
1622 		  && nc != NULL_TREE
1623 		  && OMP_CLAUSE_CODE (nc) == OMP_CLAUSE_MAP
1624 		  && OMP_CLAUSE_DECL (nc) == base
1625 		  && OMP_CLAUSE_MAP_KIND (nc) == GOMP_MAP_POINTER
1626 		  && integer_zerop (OMP_CLAUSE_SIZE (nc)))
1627 		{
1628 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c) = 1;
1629 		  OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (nc) = 1;
1630 		}
1631 	      else
1632 		{
1633 		  if (ctx->outer)
1634 		    {
1635 		      scan_omp_op (&OMP_CLAUSE_DECL (c), ctx->outer);
1636 		      decl = OMP_CLAUSE_DECL (c);
1637 		    }
1638 		  gcc_assert (!splay_tree_lookup (ctx->field_map,
1639 						  (splay_tree_key) decl));
1640 		  tree field
1641 		    = build_decl (OMP_CLAUSE_LOCATION (c),
1642 				  FIELD_DECL, NULL_TREE, ptr_type_node);
1643 		  SET_DECL_ALIGN (field, TYPE_ALIGN (ptr_type_node));
1644 		  insert_field_into_struct (ctx->record_type, field);
1645 		  splay_tree_insert (ctx->field_map, (splay_tree_key) decl,
1646 				     (splay_tree_value) field);
1647 		}
1648 	    }
1649 	  break;
1650 
1651 	case OMP_CLAUSE_ORDER:
1652 	  ctx->order_concurrent = true;
1653 	  break;
1654 
1655 	case OMP_CLAUSE_BIND:
1656 	  ctx->loop_p = true;
1657 	  break;
1658 
1659 	case OMP_CLAUSE_NOWAIT:
1660 	case OMP_CLAUSE_ORDERED:
1661 	case OMP_CLAUSE_COLLAPSE:
1662 	case OMP_CLAUSE_UNTIED:
1663 	case OMP_CLAUSE_MERGEABLE:
1664 	case OMP_CLAUSE_PROC_BIND:
1665 	case OMP_CLAUSE_SAFELEN:
1666 	case OMP_CLAUSE_SIMDLEN:
1667 	case OMP_CLAUSE_THREADS:
1668 	case OMP_CLAUSE_SIMD:
1669 	case OMP_CLAUSE_NOGROUP:
1670 	case OMP_CLAUSE_DEFAULTMAP:
1671 	case OMP_CLAUSE_ASYNC:
1672 	case OMP_CLAUSE_WAIT:
1673 	case OMP_CLAUSE_GANG:
1674 	case OMP_CLAUSE_WORKER:
1675 	case OMP_CLAUSE_VECTOR:
1676 	case OMP_CLAUSE_INDEPENDENT:
1677 	case OMP_CLAUSE_AUTO:
1678 	case OMP_CLAUSE_SEQ:
1679 	case OMP_CLAUSE_TILE:
1680 	case OMP_CLAUSE__SIMT_:
1681 	case OMP_CLAUSE_DEFAULT:
1682 	case OMP_CLAUSE_NONTEMPORAL:
1683 	case OMP_CLAUSE_IF_PRESENT:
1684 	case OMP_CLAUSE_FINALIZE:
1685 	case OMP_CLAUSE_TASK_REDUCTION:
1686 	case OMP_CLAUSE_ALLOCATE:
1687 	  break;
1688 
1689 	case OMP_CLAUSE_ALIGNED:
1690 	  decl = OMP_CLAUSE_DECL (c);
1691 	  if (is_global_var (decl)
1692 	      && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1693 	    install_var_local (decl, ctx);
1694 	  break;
1695 
1696 	case OMP_CLAUSE__CONDTEMP_:
1697 	  decl = OMP_CLAUSE_DECL (c);
1698 	  if (is_parallel_ctx (ctx))
1699 	    {
1700 	      install_var_field (decl, false, 3, ctx);
1701 	      install_var_local (decl, ctx);
1702 	    }
1703 	  else if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
1704 		   && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
1705 		   && !OMP_CLAUSE__CONDTEMP__ITER (c))
1706 	    install_var_local (decl, ctx);
1707 	  break;
1708 
1709 	case OMP_CLAUSE__CACHE_:
1710 	case OMP_CLAUSE_NOHOST:
1711 	default:
1712 	  gcc_unreachable ();
1713 	}
1714     }
1715 
1716   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1717     {
1718       switch (OMP_CLAUSE_CODE (c))
1719 	{
1720 	case OMP_CLAUSE_LASTPRIVATE:
1721 	  /* Let the corresponding firstprivate clause create
1722 	     the variable.  */
1723 	  if (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1724 	    scan_array_reductions = true;
1725 	  if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
1726 	    break;
1727 	  /* FALLTHRU */
1728 
1729 	case OMP_CLAUSE_FIRSTPRIVATE:
1730 	case OMP_CLAUSE_PRIVATE:
1731 	case OMP_CLAUSE_LINEAR:
1732 	case OMP_CLAUSE_IS_DEVICE_PTR:
1733 	  decl = OMP_CLAUSE_DECL (c);
1734 	  if (is_variable_sized (decl))
1735 	    {
1736 	      if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
1737 		   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IS_DEVICE_PTR)
1738 		  && is_gimple_omp_offloaded (ctx->stmt))
1739 		{
1740 		  tree decl2 = DECL_VALUE_EXPR (decl);
1741 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1742 		  decl2 = TREE_OPERAND (decl2, 0);
1743 		  gcc_assert (DECL_P (decl2));
1744 		  install_var_local (decl2, ctx);
1745 		  fixup_remapped_decl (decl2, ctx, false);
1746 		}
1747 	      install_var_local (decl, ctx);
1748 	    }
1749 	  fixup_remapped_decl (decl, ctx,
1750 			       OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE
1751 			       && OMP_CLAUSE_PRIVATE_DEBUG (c));
1752 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1753 	      && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1754 	    scan_array_reductions = true;
1755 	  break;
1756 
1757 	case OMP_CLAUSE_REDUCTION:
1758 	case OMP_CLAUSE_IN_REDUCTION:
1759 	  decl = OMP_CLAUSE_DECL (c);
1760 	  if (TREE_CODE (decl) != MEM_REF && !is_omp_target (ctx->stmt))
1761 	    {
1762 	      if (is_variable_sized (decl))
1763 		install_var_local (decl, ctx);
1764 	      fixup_remapped_decl (decl, ctx, false);
1765 	    }
1766 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1767 	    scan_array_reductions = true;
1768 	  break;
1769 
1770 	case OMP_CLAUSE_TASK_REDUCTION:
1771 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1772 	    scan_array_reductions = true;
1773 	  break;
1774 
1775 	case OMP_CLAUSE_SHARED:
1776 	  /* Ignore shared directives in teams construct inside of
1777 	     target construct.  */
1778 	  if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
1779 	      && !is_host_teams_ctx (ctx))
1780 	    break;
1781 	  decl = OMP_CLAUSE_DECL (c);
1782 	  if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
1783 	    break;
1784 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
1785 	    {
1786 	      if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl,
1787 								 ctx->outer)))
1788 		break;
1789 	      bool by_ref = use_pointer_for_field (decl, ctx);
1790 	      install_var_field (decl, by_ref, 11, ctx);
1791 	      break;
1792 	    }
1793 	  fixup_remapped_decl (decl, ctx, false);
1794 	  break;
1795 
1796 	case OMP_CLAUSE_MAP:
1797 	  if (!is_gimple_omp_offloaded (ctx->stmt))
1798 	    break;
1799 	  decl = OMP_CLAUSE_DECL (c);
1800 	  if (DECL_P (decl)
1801 	      && ((OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
1802 		   && (OMP_CLAUSE_MAP_KIND (c)
1803 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
1804 		  || TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
1805 	      && is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx))
1806 	      && varpool_node::get_create (decl)->offloadable)
1807 	    break;
1808 	  if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
1809 	       || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
1810 	      && is_omp_target (ctx->stmt)
1811 	      && !is_gimple_omp_offloaded (ctx->stmt))
1812 	    break;
1813 	  if (DECL_P (decl))
1814 	    {
1815 	      if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
1816 		   || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
1817 		  && TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE
1818 		  && !COMPLETE_TYPE_P (TREE_TYPE (decl)))
1819 		{
1820 		  tree new_decl = lookup_decl (decl, ctx);
1821 		  TREE_TYPE (new_decl)
1822 		    = remap_type (TREE_TYPE (decl), &ctx->cb);
1823 		}
1824 	      else if (DECL_SIZE (decl)
1825 		       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1826 		{
1827 		  tree decl2 = DECL_VALUE_EXPR (decl);
1828 		  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
1829 		  decl2 = TREE_OPERAND (decl2, 0);
1830 		  gcc_assert (DECL_P (decl2));
1831 		  fixup_remapped_decl (decl2, ctx, false);
1832 		  fixup_remapped_decl (decl, ctx, true);
1833 		}
1834 	      else
1835 		fixup_remapped_decl (decl, ctx, false);
1836 	    }
1837 	  break;
1838 
1839 	case OMP_CLAUSE_COPYPRIVATE:
1840 	case OMP_CLAUSE_COPYIN:
1841 	case OMP_CLAUSE_DEFAULT:
1842 	case OMP_CLAUSE_IF:
1843 	case OMP_CLAUSE_NUM_THREADS:
1844 	case OMP_CLAUSE_NUM_TEAMS:
1845 	case OMP_CLAUSE_THREAD_LIMIT:
1846 	case OMP_CLAUSE_DEVICE:
1847 	case OMP_CLAUSE_SCHEDULE:
1848 	case OMP_CLAUSE_DIST_SCHEDULE:
1849 	case OMP_CLAUSE_NOWAIT:
1850 	case OMP_CLAUSE_ORDERED:
1851 	case OMP_CLAUSE_COLLAPSE:
1852 	case OMP_CLAUSE_UNTIED:
1853 	case OMP_CLAUSE_FINAL:
1854 	case OMP_CLAUSE_MERGEABLE:
1855 	case OMP_CLAUSE_PROC_BIND:
1856 	case OMP_CLAUSE_SAFELEN:
1857 	case OMP_CLAUSE_SIMDLEN:
1858 	case OMP_CLAUSE_ALIGNED:
1859 	case OMP_CLAUSE_DEPEND:
1860 	case OMP_CLAUSE_DETACH:
1861 	case OMP_CLAUSE_ALLOCATE:
1862 	case OMP_CLAUSE__LOOPTEMP_:
1863 	case OMP_CLAUSE__REDUCTEMP_:
1864 	case OMP_CLAUSE_TO:
1865 	case OMP_CLAUSE_FROM:
1866 	case OMP_CLAUSE_PRIORITY:
1867 	case OMP_CLAUSE_GRAINSIZE:
1868 	case OMP_CLAUSE_NUM_TASKS:
1869 	case OMP_CLAUSE_THREADS:
1870 	case OMP_CLAUSE_SIMD:
1871 	case OMP_CLAUSE_NOGROUP:
1872 	case OMP_CLAUSE_DEFAULTMAP:
1873 	case OMP_CLAUSE_ORDER:
1874 	case OMP_CLAUSE_BIND:
1875 	case OMP_CLAUSE_USE_DEVICE_PTR:
1876 	case OMP_CLAUSE_USE_DEVICE_ADDR:
1877 	case OMP_CLAUSE_NONTEMPORAL:
1878 	case OMP_CLAUSE_ASYNC:
1879 	case OMP_CLAUSE_WAIT:
1880 	case OMP_CLAUSE_NUM_GANGS:
1881 	case OMP_CLAUSE_NUM_WORKERS:
1882 	case OMP_CLAUSE_VECTOR_LENGTH:
1883 	case OMP_CLAUSE_GANG:
1884 	case OMP_CLAUSE_WORKER:
1885 	case OMP_CLAUSE_VECTOR:
1886 	case OMP_CLAUSE_INDEPENDENT:
1887 	case OMP_CLAUSE_AUTO:
1888 	case OMP_CLAUSE_SEQ:
1889 	case OMP_CLAUSE_TILE:
1890 	case OMP_CLAUSE__SIMT_:
1891 	case OMP_CLAUSE_IF_PRESENT:
1892 	case OMP_CLAUSE_FINALIZE:
1893 	case OMP_CLAUSE_FILTER:
1894 	case OMP_CLAUSE__CONDTEMP_:
1895 	  break;
1896 
1897 	case OMP_CLAUSE__CACHE_:
1898 	case OMP_CLAUSE_NOHOST:
1899 	default:
1900 	  gcc_unreachable ();
1901 	}
1902     }
1903 
1904   gcc_checking_assert (!scan_array_reductions
1905 		       || !is_gimple_omp_oacc (ctx->stmt));
1906   if (scan_array_reductions)
1907     {
1908       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
1909 	if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
1910 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
1911 	     || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
1912 	    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
1913 	  {
1914 	    omp_context *rctx = ctx;
1915 	    if (is_omp_target (ctx->stmt))
1916 	      rctx = ctx->outer;
1917 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c), rctx);
1918 	    scan_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), rctx);
1919 	  }
1920 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
1921 		 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
1922 	  scan_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
1923 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
1924 		 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
1925 	  scan_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
1926     }
1927 }
1928 
1929 /* Create a new name for omp child function.  Returns an identifier. */
1930 
1931 static tree
create_omp_child_function_name(bool task_copy)1932 create_omp_child_function_name (bool task_copy)
1933 {
1934   return clone_function_name_numbered (current_function_decl,
1935 				       task_copy ? "_omp_cpyfn" : "_omp_fn");
1936 }
1937 
1938 /* Return true if CTX may belong to offloaded code: either if current function
1939    is offloaded, or any enclosing context corresponds to a target region.  */
1940 
1941 static bool
omp_maybe_offloaded_ctx(omp_context * ctx)1942 omp_maybe_offloaded_ctx (omp_context *ctx)
1943 {
1944   if (cgraph_node::get (current_function_decl)->offloadable)
1945     return true;
1946   for (; ctx; ctx = ctx->outer)
1947     if (is_gimple_omp_offloaded (ctx->stmt))
1948       return true;
1949   return false;
1950 }
1951 
1952 /* Build a decl for the omp child function.  It'll not contain a body
1953    yet, just the bare decl.  */
1954 
1955 static void
create_omp_child_function(omp_context * ctx,bool task_copy)1956 create_omp_child_function (omp_context *ctx, bool task_copy)
1957 {
1958   tree decl, type, name, t;
1959 
1960   name = create_omp_child_function_name (task_copy);
1961   if (task_copy)
1962     type = build_function_type_list (void_type_node, ptr_type_node,
1963 				     ptr_type_node, NULL_TREE);
1964   else
1965     type = build_function_type_list (void_type_node, ptr_type_node, NULL_TREE);
1966 
1967   decl = build_decl (gimple_location (ctx->stmt), FUNCTION_DECL, name, type);
1968 
1969   gcc_checking_assert (!is_gimple_omp_oacc (ctx->stmt)
1970 		       || !task_copy);
1971   if (!task_copy)
1972     ctx->cb.dst_fn = decl;
1973   else
1974     gimple_omp_task_set_copy_fn (ctx->stmt, decl);
1975 
1976   TREE_STATIC (decl) = 1;
1977   TREE_USED (decl) = 1;
1978   DECL_ARTIFICIAL (decl) = 1;
1979   DECL_IGNORED_P (decl) = 0;
1980   TREE_PUBLIC (decl) = 0;
1981   DECL_UNINLINABLE (decl) = 1;
1982   DECL_EXTERNAL (decl) = 0;
1983   DECL_CONTEXT (decl) = NULL_TREE;
1984   DECL_INITIAL (decl) = make_node (BLOCK);
1985   BLOCK_SUPERCONTEXT (DECL_INITIAL (decl)) = decl;
1986   DECL_ATTRIBUTES (decl) = DECL_ATTRIBUTES (current_function_decl);
1987   /* Remove omp declare simd attribute from the new attributes.  */
1988   if (tree a = lookup_attribute ("omp declare simd", DECL_ATTRIBUTES (decl)))
1989     {
1990       while (tree a2 = lookup_attribute ("omp declare simd", TREE_CHAIN (a)))
1991 	a = a2;
1992       a = TREE_CHAIN (a);
1993       for (tree *p = &DECL_ATTRIBUTES (decl); *p != a;)
1994 	if (is_attribute_p ("omp declare simd", get_attribute_name (*p)))
1995 	  *p = TREE_CHAIN (*p);
1996 	else
1997 	  {
1998 	    tree chain = TREE_CHAIN (*p);
1999 	    *p = copy_node (*p);
2000 	    p = &TREE_CHAIN (*p);
2001 	    *p = chain;
2002 	  }
2003     }
2004   DECL_FUNCTION_SPECIFIC_OPTIMIZATION (decl)
2005     = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (current_function_decl);
2006   DECL_FUNCTION_SPECIFIC_TARGET (decl)
2007     = DECL_FUNCTION_SPECIFIC_TARGET (current_function_decl);
2008   DECL_FUNCTION_VERSIONED (decl)
2009     = DECL_FUNCTION_VERSIONED (current_function_decl);
2010 
2011   if (omp_maybe_offloaded_ctx (ctx))
2012     {
2013       cgraph_node::get_create (decl)->offloadable = 1;
2014       if (ENABLE_OFFLOADING)
2015 	g->have_offload = true;
2016     }
2017 
2018   if (cgraph_node::get_create (decl)->offloadable)
2019     {
2020       const char *target_attr = (is_gimple_omp_offloaded (ctx->stmt)
2021 				 ? "omp target entrypoint"
2022 				 : "omp declare target");
2023       if (lookup_attribute ("omp declare target",
2024 			    DECL_ATTRIBUTES (current_function_decl)))
2025 	{
2026 	  if (is_gimple_omp_offloaded (ctx->stmt))
2027 	    DECL_ATTRIBUTES (decl)
2028 	      = remove_attribute ("omp declare target",
2029 				  copy_list (DECL_ATTRIBUTES (decl)));
2030 	  else
2031 	    target_attr = NULL;
2032 	}
2033       if (target_attr)
2034 	DECL_ATTRIBUTES (decl)
2035 	  = tree_cons (get_identifier (target_attr),
2036 		       NULL_TREE, DECL_ATTRIBUTES (decl));
2037     }
2038 
2039   t = build_decl (DECL_SOURCE_LOCATION (decl),
2040 		  RESULT_DECL, NULL_TREE, void_type_node);
2041   DECL_ARTIFICIAL (t) = 1;
2042   DECL_IGNORED_P (t) = 1;
2043   DECL_CONTEXT (t) = decl;
2044   DECL_RESULT (decl) = t;
2045 
2046   tree data_name = get_identifier (".omp_data_i");
2047   t = build_decl (DECL_SOURCE_LOCATION (decl), PARM_DECL, data_name,
2048 		  ptr_type_node);
2049   DECL_ARTIFICIAL (t) = 1;
2050   DECL_NAMELESS (t) = 1;
2051   DECL_ARG_TYPE (t) = ptr_type_node;
2052   DECL_CONTEXT (t) = current_function_decl;
2053   TREE_USED (t) = 1;
2054   TREE_READONLY (t) = 1;
2055   DECL_ARGUMENTS (decl) = t;
2056   if (!task_copy)
2057     ctx->receiver_decl = t;
2058   else
2059     {
2060       t = build_decl (DECL_SOURCE_LOCATION (decl),
2061 		      PARM_DECL, get_identifier (".omp_data_o"),
2062 		      ptr_type_node);
2063       DECL_ARTIFICIAL (t) = 1;
2064       DECL_NAMELESS (t) = 1;
2065       DECL_ARG_TYPE (t) = ptr_type_node;
2066       DECL_CONTEXT (t) = current_function_decl;
2067       TREE_USED (t) = 1;
2068       TREE_ADDRESSABLE (t) = 1;
2069       DECL_CHAIN (t) = DECL_ARGUMENTS (decl);
2070       DECL_ARGUMENTS (decl) = t;
2071     }
2072 
2073   /* Allocate memory for the function structure.  The call to
2074      allocate_struct_function clobbers CFUN, so we need to restore
2075      it afterward.  */
2076   push_struct_function (decl);
2077   cfun->function_end_locus = gimple_location (ctx->stmt);
2078   init_tree_ssa (cfun);
2079   pop_cfun ();
2080 }
2081 
2082 /* Callback for walk_gimple_seq.  Check if combined parallel
2083    contains gimple_omp_for_combined_into_p OMP_FOR.  */
2084 
2085 tree
omp_find_combined_for(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2086 omp_find_combined_for (gimple_stmt_iterator *gsi_p,
2087 		       bool *handled_ops_p,
2088 		       struct walk_stmt_info *wi)
2089 {
2090   gimple *stmt = gsi_stmt (*gsi_p);
2091 
2092   *handled_ops_p = true;
2093   switch (gimple_code (stmt))
2094     {
2095     WALK_SUBSTMTS;
2096 
2097     case GIMPLE_OMP_FOR:
2098       if (gimple_omp_for_combined_into_p (stmt)
2099 	  && gimple_omp_for_kind (stmt)
2100 	     == *(const enum gf_mask *) (wi->info))
2101 	{
2102 	  wi->info = stmt;
2103 	  return integer_zero_node;
2104 	}
2105       break;
2106     default:
2107       break;
2108     }
2109   return NULL;
2110 }
2111 
2112 /* Add _LOOPTEMP_/_REDUCTEMP_ clauses on OpenMP parallel or task.  */
2113 
2114 static void
add_taskreg_looptemp_clauses(enum gf_mask msk,gimple * stmt,omp_context * outer_ctx)2115 add_taskreg_looptemp_clauses (enum gf_mask msk, gimple *stmt,
2116 			      omp_context *outer_ctx)
2117 {
2118   struct walk_stmt_info wi;
2119 
2120   memset (&wi, 0, sizeof (wi));
2121   wi.val_only = true;
2122   wi.info = (void *) &msk;
2123   walk_gimple_seq (gimple_omp_body (stmt), omp_find_combined_for, NULL, &wi);
2124   if (wi.info != (void *) &msk)
2125     {
2126       gomp_for *for_stmt = as_a <gomp_for *> ((gimple *) wi.info);
2127       struct omp_for_data fd;
2128       omp_extract_for_data (for_stmt, &fd, NULL);
2129       /* We need two temporaries with fd.loop.v type (istart/iend)
2130 	 and then (fd.collapse - 1) temporaries with the same
2131 	 type for count2 ... countN-1 vars if not constant.  */
2132       size_t count = 2, i;
2133       tree type = fd.iter_type;
2134       if (fd.collapse > 1
2135 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
2136 	{
2137 	  count += fd.collapse - 1;
2138 	  /* If there are lastprivate clauses on the inner
2139 	     GIMPLE_OMP_FOR, add one more temporaries for the total number
2140 	     of iterations (product of count1 ... countN-1).  */
2141 	  if (omp_find_clause (gimple_omp_for_clauses (for_stmt),
2142 			       OMP_CLAUSE_LASTPRIVATE)
2143 	      || (msk == GF_OMP_FOR_KIND_FOR
2144 		  && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2145 				      OMP_CLAUSE_LASTPRIVATE)))
2146 	    {
2147 	      tree temp = create_tmp_var (type);
2148 	      tree c = build_omp_clause (UNKNOWN_LOCATION,
2149 					 OMP_CLAUSE__LOOPTEMP_);
2150 	      insert_decl_map (&outer_ctx->cb, temp, temp);
2151 	      OMP_CLAUSE_DECL (c) = temp;
2152 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2153 	      gimple_omp_taskreg_set_clauses (stmt, c);
2154 	    }
2155 	  if (fd.non_rect
2156 	      && fd.last_nonrect == fd.first_nonrect + 1)
2157 	    if (tree v = gimple_omp_for_index (for_stmt, fd.last_nonrect))
2158 	      if (!TYPE_UNSIGNED (TREE_TYPE (v)))
2159 		{
2160 		  v = gimple_omp_for_index (for_stmt, fd.first_nonrect);
2161 		  tree type2 = TREE_TYPE (v);
2162 		  count++;
2163 		  for (i = 0; i < 3; i++)
2164 		    {
2165 		      tree temp = create_tmp_var (type2);
2166 		      tree c = build_omp_clause (UNKNOWN_LOCATION,
2167 						 OMP_CLAUSE__LOOPTEMP_);
2168 		      insert_decl_map (&outer_ctx->cb, temp, temp);
2169 		      OMP_CLAUSE_DECL (c) = temp;
2170 		      OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2171 		      gimple_omp_taskreg_set_clauses (stmt, c);
2172 		    }
2173 		}
2174 	}
2175       for (i = 0; i < count; i++)
2176 	{
2177 	  tree temp = create_tmp_var (type);
2178 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
2179 	  insert_decl_map (&outer_ctx->cb, temp, temp);
2180 	  OMP_CLAUSE_DECL (c) = temp;
2181 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_taskreg_clauses (stmt);
2182 	  gimple_omp_taskreg_set_clauses (stmt, c);
2183 	}
2184     }
2185   if (msk == GF_OMP_FOR_KIND_TASKLOOP
2186       && omp_find_clause (gimple_omp_task_clauses (stmt),
2187 			  OMP_CLAUSE_REDUCTION))
2188     {
2189       tree type = build_pointer_type (pointer_sized_int_node);
2190       tree temp = create_tmp_var (type);
2191       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2192       insert_decl_map (&outer_ctx->cb, temp, temp);
2193       OMP_CLAUSE_DECL (c) = temp;
2194       OMP_CLAUSE_CHAIN (c) = gimple_omp_task_clauses (stmt);
2195       gimple_omp_task_set_clauses (stmt, c);
2196     }
2197 }
2198 
2199 /* Scan an OpenMP parallel directive.  */
2200 
2201 static void
scan_omp_parallel(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2202 scan_omp_parallel (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2203 {
2204   omp_context *ctx;
2205   tree name;
2206   gomp_parallel *stmt = as_a <gomp_parallel *> (gsi_stmt (*gsi));
2207 
2208   /* Ignore parallel directives with empty bodies, unless there
2209      are copyin clauses.  */
2210   if (optimize > 0
2211       && empty_body_p (gimple_omp_body (stmt))
2212       && omp_find_clause (gimple_omp_parallel_clauses (stmt),
2213 			  OMP_CLAUSE_COPYIN) == NULL)
2214     {
2215       gsi_replace (gsi, gimple_build_nop (), false);
2216       return;
2217     }
2218 
2219   if (gimple_omp_parallel_combined_p (stmt))
2220     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_FOR, stmt, outer_ctx);
2221   for (tree c = omp_find_clause (gimple_omp_parallel_clauses (stmt),
2222 				 OMP_CLAUSE_REDUCTION);
2223        c; c = omp_find_clause (OMP_CLAUSE_CHAIN (c), OMP_CLAUSE_REDUCTION))
2224     if (OMP_CLAUSE_REDUCTION_TASK (c))
2225       {
2226 	tree type = build_pointer_type (pointer_sized_int_node);
2227 	tree temp = create_tmp_var (type);
2228 	tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
2229 	if (outer_ctx)
2230 	  insert_decl_map (&outer_ctx->cb, temp, temp);
2231 	OMP_CLAUSE_DECL (c) = temp;
2232 	OMP_CLAUSE_CHAIN (c) = gimple_omp_parallel_clauses (stmt);
2233 	gimple_omp_parallel_set_clauses (stmt, c);
2234 	break;
2235       }
2236     else if (OMP_CLAUSE_CHAIN (c) == NULL_TREE)
2237       break;
2238 
2239   ctx = new_omp_context (stmt, outer_ctx);
2240   taskreg_contexts.safe_push (ctx);
2241   if (taskreg_nesting_level > 1)
2242     ctx->is_nested = true;
2243   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2244   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2245   name = create_tmp_var_name (".omp_data_s");
2246   name = build_decl (gimple_location (stmt),
2247 		     TYPE_DECL, name, ctx->record_type);
2248   DECL_ARTIFICIAL (name) = 1;
2249   DECL_NAMELESS (name) = 1;
2250   TYPE_NAME (ctx->record_type) = name;
2251   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2252   create_omp_child_function (ctx, false);
2253   gimple_omp_parallel_set_child_fn (stmt, ctx->cb.dst_fn);
2254 
2255   scan_sharing_clauses (gimple_omp_parallel_clauses (stmt), ctx);
2256   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2257 
2258   if (TYPE_FIELDS (ctx->record_type) == NULL)
2259     ctx->record_type = ctx->receiver_decl = NULL;
2260 }
2261 
2262 /* Scan an OpenMP task directive.  */
2263 
2264 static void
scan_omp_task(gimple_stmt_iterator * gsi,omp_context * outer_ctx)2265 scan_omp_task (gimple_stmt_iterator *gsi, omp_context *outer_ctx)
2266 {
2267   omp_context *ctx;
2268   tree name, t;
2269   gomp_task *stmt = as_a <gomp_task *> (gsi_stmt (*gsi));
2270 
2271   /* Ignore task directives with empty bodies, unless they have depend
2272      clause.  */
2273   if (optimize > 0
2274       && gimple_omp_body (stmt)
2275       && empty_body_p (gimple_omp_body (stmt))
2276       && !omp_find_clause (gimple_omp_task_clauses (stmt), OMP_CLAUSE_DEPEND))
2277     {
2278       gsi_replace (gsi, gimple_build_nop (), false);
2279       return;
2280     }
2281 
2282   if (gimple_omp_task_taskloop_p (stmt))
2283     add_taskreg_looptemp_clauses (GF_OMP_FOR_KIND_TASKLOOP, stmt, outer_ctx);
2284 
2285   ctx = new_omp_context (stmt, outer_ctx);
2286 
2287   if (gimple_omp_task_taskwait_p (stmt))
2288     {
2289       scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2290       return;
2291     }
2292 
2293   taskreg_contexts.safe_push (ctx);
2294   if (taskreg_nesting_level > 1)
2295     ctx->is_nested = true;
2296   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2297   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2298   name = create_tmp_var_name (".omp_data_s");
2299   name = build_decl (gimple_location (stmt),
2300 		     TYPE_DECL, name, ctx->record_type);
2301   DECL_ARTIFICIAL (name) = 1;
2302   DECL_NAMELESS (name) = 1;
2303   TYPE_NAME (ctx->record_type) = name;
2304   TYPE_ARTIFICIAL (ctx->record_type) = 1;
2305   create_omp_child_function (ctx, false);
2306   gimple_omp_task_set_child_fn (stmt, ctx->cb.dst_fn);
2307 
2308   scan_sharing_clauses (gimple_omp_task_clauses (stmt), ctx);
2309 
2310   if (ctx->srecord_type)
2311     {
2312       name = create_tmp_var_name (".omp_data_a");
2313       name = build_decl (gimple_location (stmt),
2314 			 TYPE_DECL, name, ctx->srecord_type);
2315       DECL_ARTIFICIAL (name) = 1;
2316       DECL_NAMELESS (name) = 1;
2317       TYPE_NAME (ctx->srecord_type) = name;
2318       TYPE_ARTIFICIAL (ctx->srecord_type) = 1;
2319       create_omp_child_function (ctx, true);
2320     }
2321 
2322   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2323 
2324   if (TYPE_FIELDS (ctx->record_type) == NULL)
2325     {
2326       ctx->record_type = ctx->receiver_decl = NULL;
2327       t = build_int_cst (long_integer_type_node, 0);
2328       gimple_omp_task_set_arg_size (stmt, t);
2329       t = build_int_cst (long_integer_type_node, 1);
2330       gimple_omp_task_set_arg_align (stmt, t);
2331     }
2332 }
2333 
2334 /* Helper function for finish_taskreg_scan, called through walk_tree.
2335    If maybe_lookup_decl_in_outer_context returns non-NULL for some
2336    tree, replace it in the expression.  */
2337 
2338 static tree
finish_taskreg_remap(tree * tp,int * walk_subtrees,void * data)2339 finish_taskreg_remap (tree *tp, int *walk_subtrees, void *data)
2340 {
2341   if (VAR_P (*tp))
2342     {
2343       omp_context *ctx = (omp_context *) data;
2344       tree t = maybe_lookup_decl_in_outer_ctx (*tp, ctx);
2345       if (t != *tp)
2346 	{
2347 	  if (DECL_HAS_VALUE_EXPR_P (t))
2348 	    t = unshare_expr (DECL_VALUE_EXPR (t));
2349 	  *tp = t;
2350 	}
2351       *walk_subtrees = 0;
2352     }
2353   else if (IS_TYPE_OR_DECL_P (*tp))
2354     *walk_subtrees = 0;
2355   return NULL_TREE;
2356 }
2357 
2358 /* If any decls have been made addressable during scan_omp,
2359    adjust their fields if needed, and layout record types
2360    of parallel/task constructs.  */
2361 
2362 static void
finish_taskreg_scan(omp_context * ctx)2363 finish_taskreg_scan (omp_context *ctx)
2364 {
2365   if (ctx->record_type == NULL_TREE)
2366     return;
2367 
2368   /* If any task_shared_vars were needed, verify all
2369      OMP_CLAUSE_SHARED clauses on GIMPLE_OMP_{PARALLEL,TASK,TEAMS}
2370      statements if use_pointer_for_field hasn't changed
2371      because of that.  If it did, update field types now.  */
2372   if (task_shared_vars)
2373     {
2374       tree c;
2375 
2376       for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2377 	   c; c = OMP_CLAUSE_CHAIN (c))
2378 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
2379 	    && !OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
2380 	  {
2381 	    tree decl = OMP_CLAUSE_DECL (c);
2382 
2383 	    /* Global variables don't need to be copied,
2384 	       the receiver side will use them directly.  */
2385 	    if (is_global_var (maybe_lookup_decl_in_outer_ctx (decl, ctx)))
2386 	      continue;
2387 	    if (!bitmap_bit_p (task_shared_vars, DECL_UID (decl))
2388 		|| !use_pointer_for_field (decl, ctx))
2389 	      continue;
2390 	    tree field = lookup_field (decl, ctx);
2391 	    if (TREE_CODE (TREE_TYPE (field)) == POINTER_TYPE
2392 		&& TREE_TYPE (TREE_TYPE (field)) == TREE_TYPE (decl))
2393 	      continue;
2394 	    TREE_TYPE (field) = build_pointer_type (TREE_TYPE (decl));
2395 	    TREE_THIS_VOLATILE (field) = 0;
2396 	    DECL_USER_ALIGN (field) = 0;
2397 	    SET_DECL_ALIGN (field, TYPE_ALIGN (TREE_TYPE (field)));
2398 	    if (TYPE_ALIGN (ctx->record_type) < DECL_ALIGN (field))
2399 	      SET_TYPE_ALIGN (ctx->record_type, DECL_ALIGN (field));
2400 	    if (ctx->srecord_type)
2401 	      {
2402 		tree sfield = lookup_sfield (decl, ctx);
2403 		TREE_TYPE (sfield) = TREE_TYPE (field);
2404 		TREE_THIS_VOLATILE (sfield) = 0;
2405 		DECL_USER_ALIGN (sfield) = 0;
2406 		SET_DECL_ALIGN (sfield, DECL_ALIGN (field));
2407 		if (TYPE_ALIGN (ctx->srecord_type) < DECL_ALIGN (sfield))
2408 		  SET_TYPE_ALIGN (ctx->srecord_type, DECL_ALIGN (sfield));
2409 	      }
2410 	  }
2411     }
2412 
2413   if (gimple_code (ctx->stmt) == GIMPLE_OMP_PARALLEL)
2414     {
2415       tree clauses = gimple_omp_parallel_clauses (ctx->stmt);
2416       tree c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2417       if (c)
2418 	{
2419 	  /* Move the _reductemp_ clause first.  GOMP_parallel_reductions
2420 	     expects to find it at the start of data.  */
2421 	  tree f = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2422 	  tree *p = &TYPE_FIELDS (ctx->record_type);
2423 	  while (*p)
2424 	    if (*p == f)
2425 	      {
2426 		*p = DECL_CHAIN (*p);
2427 		break;
2428 	      }
2429 	    else
2430 	      p = &DECL_CHAIN (*p);
2431 	  DECL_CHAIN (f) = TYPE_FIELDS (ctx->record_type);
2432 	  TYPE_FIELDS (ctx->record_type) = f;
2433 	}
2434       layout_type (ctx->record_type);
2435       fixup_child_record_type (ctx);
2436     }
2437   else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
2438     {
2439       layout_type (ctx->record_type);
2440       fixup_child_record_type (ctx);
2441     }
2442   else
2443     {
2444       location_t loc = gimple_location (ctx->stmt);
2445       tree *p, vla_fields = NULL_TREE, *q = &vla_fields;
2446       tree detach_clause
2447 	= omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
2448 			   OMP_CLAUSE_DETACH);
2449       /* Move VLA fields to the end.  */
2450       p = &TYPE_FIELDS (ctx->record_type);
2451       while (*p)
2452 	if (!TYPE_SIZE_UNIT (TREE_TYPE (*p))
2453 	    || ! TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (*p))))
2454 	  {
2455 	    *q = *p;
2456 	    *p = TREE_CHAIN (*p);
2457 	    TREE_CHAIN (*q) = NULL_TREE;
2458 	    q = &TREE_CHAIN (*q);
2459 	  }
2460 	else
2461 	  p = &DECL_CHAIN (*p);
2462       *p = vla_fields;
2463       if (gimple_omp_task_taskloop_p (ctx->stmt))
2464 	{
2465 	  /* Move fields corresponding to first and second _looptemp_
2466 	     clause first.  There are filled by GOMP_taskloop
2467 	     and thus need to be in specific positions.  */
2468 	  tree clauses = gimple_omp_task_clauses (ctx->stmt);
2469 	  tree c1 = omp_find_clause (clauses, OMP_CLAUSE__LOOPTEMP_);
2470 	  tree c2 = omp_find_clause (OMP_CLAUSE_CHAIN (c1),
2471 				     OMP_CLAUSE__LOOPTEMP_);
2472 	  tree c3 = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
2473 	  tree f1 = lookup_field (OMP_CLAUSE_DECL (c1), ctx);
2474 	  tree f2 = lookup_field (OMP_CLAUSE_DECL (c2), ctx);
2475 	  tree f3 = c3 ? lookup_field (OMP_CLAUSE_DECL (c3), ctx) : NULL_TREE;
2476 	  p = &TYPE_FIELDS (ctx->record_type);
2477 	  while (*p)
2478 	    if (*p == f1 || *p == f2 || *p == f3)
2479 	      *p = DECL_CHAIN (*p);
2480 	    else
2481 	      p = &DECL_CHAIN (*p);
2482 	  DECL_CHAIN (f1) = f2;
2483 	  if (c3)
2484 	    {
2485 	      DECL_CHAIN (f2) = f3;
2486 	      DECL_CHAIN (f3) = TYPE_FIELDS (ctx->record_type);
2487 	    }
2488 	  else
2489 	    DECL_CHAIN (f2) = TYPE_FIELDS (ctx->record_type);
2490 	  TYPE_FIELDS (ctx->record_type) = f1;
2491 	  if (ctx->srecord_type)
2492 	    {
2493 	      f1 = lookup_sfield (OMP_CLAUSE_DECL (c1), ctx);
2494 	      f2 = lookup_sfield (OMP_CLAUSE_DECL (c2), ctx);
2495 	      if (c3)
2496 		f3 = lookup_sfield (OMP_CLAUSE_DECL (c3), ctx);
2497 	      p = &TYPE_FIELDS (ctx->srecord_type);
2498 	      while (*p)
2499 		if (*p == f1 || *p == f2 || *p == f3)
2500 		  *p = DECL_CHAIN (*p);
2501 		else
2502 		  p = &DECL_CHAIN (*p);
2503 	      DECL_CHAIN (f1) = f2;
2504 	      DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2505 	      if (c3)
2506 		{
2507 		  DECL_CHAIN (f2) = f3;
2508 		  DECL_CHAIN (f3) = TYPE_FIELDS (ctx->srecord_type);
2509 		}
2510 	      else
2511 		DECL_CHAIN (f2) = TYPE_FIELDS (ctx->srecord_type);
2512 	      TYPE_FIELDS (ctx->srecord_type) = f1;
2513 	    }
2514 	}
2515       if (detach_clause)
2516 	{
2517 	  tree c, field;
2518 
2519 	  /* Look for a firstprivate clause with the detach event handle.  */
2520 	  for (c = gimple_omp_taskreg_clauses (ctx->stmt);
2521 	       c; c = OMP_CLAUSE_CHAIN (c))
2522 	    {
2523 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
2524 		continue;
2525 	      if (maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c), ctx)
2526 		  == OMP_CLAUSE_DECL (detach_clause))
2527 		break;
2528 	    }
2529 
2530 	  gcc_assert (c);
2531 	  field = lookup_field (OMP_CLAUSE_DECL (c), ctx);
2532 
2533 	  /* Move field corresponding to the detach clause first.
2534 	     This is filled by GOMP_task and needs to be in a
2535 	     specific position.  */
2536 	  p = &TYPE_FIELDS (ctx->record_type);
2537 	  while (*p)
2538 	    if (*p == field)
2539 	      *p = DECL_CHAIN (*p);
2540 	    else
2541 	      p = &DECL_CHAIN (*p);
2542 	  DECL_CHAIN (field) = TYPE_FIELDS (ctx->record_type);
2543 	  TYPE_FIELDS (ctx->record_type) = field;
2544 	  if (ctx->srecord_type)
2545 	    {
2546 	      field = lookup_sfield (OMP_CLAUSE_DECL (c), ctx);
2547 	      p = &TYPE_FIELDS (ctx->srecord_type);
2548 	      while (*p)
2549 		if (*p == field)
2550 		  *p = DECL_CHAIN (*p);
2551 		else
2552 		  p = &DECL_CHAIN (*p);
2553 	      DECL_CHAIN (field) = TYPE_FIELDS (ctx->srecord_type);
2554 	      TYPE_FIELDS (ctx->srecord_type) = field;
2555 	    }
2556 	}
2557       layout_type (ctx->record_type);
2558       fixup_child_record_type (ctx);
2559       if (ctx->srecord_type)
2560 	layout_type (ctx->srecord_type);
2561       tree t = fold_convert_loc (loc, long_integer_type_node,
2562 				 TYPE_SIZE_UNIT (ctx->record_type));
2563       if (TREE_CODE (t) != INTEGER_CST)
2564 	{
2565 	  t = unshare_expr (t);
2566 	  walk_tree (&t, finish_taskreg_remap, ctx, NULL);
2567 	}
2568       gimple_omp_task_set_arg_size (ctx->stmt, t);
2569       t = build_int_cst (long_integer_type_node,
2570 			 TYPE_ALIGN_UNIT (ctx->record_type));
2571       gimple_omp_task_set_arg_align (ctx->stmt, t);
2572     }
2573 }
2574 
2575 /* Find the enclosing offload context.  */
2576 
2577 static omp_context *
enclosing_target_ctx(omp_context * ctx)2578 enclosing_target_ctx (omp_context *ctx)
2579 {
2580   for (; ctx; ctx = ctx->outer)
2581     if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET)
2582       break;
2583 
2584   return ctx;
2585 }
2586 
2587 /* Return whether CTX's parent compute construct is an OpenACC 'kernels'
2588    construct.
2589    (This doesn't include OpenACC 'kernels' decomposed parts.)  */
2590 
2591 static bool
ctx_in_oacc_kernels_region(omp_context * ctx)2592 ctx_in_oacc_kernels_region (omp_context *ctx)
2593 {
2594   for (;ctx != NULL; ctx = ctx->outer)
2595     {
2596       gimple *stmt = ctx->stmt;
2597       if (gimple_code (stmt) == GIMPLE_OMP_TARGET
2598 	  && gimple_omp_target_kind (stmt) == GF_OMP_TARGET_KIND_OACC_KERNELS)
2599 	return true;
2600     }
2601 
2602   return false;
2603 }
2604 
2605 /* Check the parallelism clauses inside a OpenACC 'kernels' region.
2606    (This doesn't include OpenACC 'kernels' decomposed parts.)
2607    Until kernels handling moves to use the same loop indirection
2608    scheme as parallel, we need to do this checking early.  */
2609 
2610 static unsigned
check_oacc_kernel_gwv(gomp_for * stmt,omp_context * ctx)2611 check_oacc_kernel_gwv (gomp_for *stmt, omp_context *ctx)
2612 {
2613   bool checking = true;
2614   unsigned outer_mask = 0;
2615   unsigned this_mask = 0;
2616   bool has_seq = false, has_auto = false;
2617 
2618   if (ctx->outer)
2619     outer_mask = check_oacc_kernel_gwv (NULL,  ctx->outer);
2620   if (!stmt)
2621     {
2622       checking = false;
2623       if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR)
2624 	return outer_mask;
2625       stmt = as_a <gomp_for *> (ctx->stmt);
2626     }
2627 
2628   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2629     {
2630       switch (OMP_CLAUSE_CODE (c))
2631 	{
2632 	case OMP_CLAUSE_GANG:
2633 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_GANG);
2634 	  break;
2635 	case OMP_CLAUSE_WORKER:
2636 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_WORKER);
2637 	  break;
2638 	case OMP_CLAUSE_VECTOR:
2639 	  this_mask |= GOMP_DIM_MASK (GOMP_DIM_VECTOR);
2640 	  break;
2641 	case OMP_CLAUSE_SEQ:
2642 	  has_seq = true;
2643 	  break;
2644 	case OMP_CLAUSE_AUTO:
2645 	  has_auto = true;
2646 	  break;
2647 	default:
2648 	  break;
2649 	}
2650     }
2651 
2652   if (checking)
2653     {
2654       if (has_seq && (this_mask || has_auto))
2655 	error_at (gimple_location (stmt), "%<seq%> overrides other"
2656 		  " OpenACC loop specifiers");
2657       else if (has_auto && this_mask)
2658 	error_at (gimple_location (stmt), "%<auto%> conflicts with other"
2659 		  " OpenACC loop specifiers");
2660 
2661       if (this_mask & outer_mask)
2662 	error_at (gimple_location (stmt), "inner loop uses same"
2663 		  " OpenACC parallelism as containing loop");
2664     }
2665 
2666   return outer_mask | this_mask;
2667 }
2668 
2669 /* Scan a GIMPLE_OMP_FOR.  */
2670 
2671 static omp_context *
scan_omp_for(gomp_for * stmt,omp_context * outer_ctx)2672 scan_omp_for (gomp_for *stmt, omp_context *outer_ctx)
2673 {
2674   omp_context *ctx;
2675   size_t i;
2676   tree clauses = gimple_omp_for_clauses (stmt);
2677 
2678   ctx = new_omp_context (stmt, outer_ctx);
2679 
2680   if (is_gimple_omp_oacc (stmt))
2681     {
2682       omp_context *tgt = enclosing_target_ctx (outer_ctx);
2683 
2684       if (!(tgt && is_oacc_kernels (tgt)))
2685 	for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
2686 	  {
2687 	    tree c_op0;
2688 	    switch (OMP_CLAUSE_CODE (c))
2689 	      {
2690 	      case OMP_CLAUSE_GANG:
2691 		c_op0 = OMP_CLAUSE_GANG_EXPR (c);
2692 		break;
2693 
2694 	      case OMP_CLAUSE_WORKER:
2695 		c_op0 = OMP_CLAUSE_WORKER_EXPR (c);
2696 		break;
2697 
2698 	      case OMP_CLAUSE_VECTOR:
2699 		c_op0 = OMP_CLAUSE_VECTOR_EXPR (c);
2700 		break;
2701 
2702 	      default:
2703 		continue;
2704 	      }
2705 
2706 	    if (c_op0)
2707 	      {
2708 		/* By construction, this is impossible for OpenACC 'kernels'
2709 		   decomposed parts.  */
2710 		gcc_assert (!(tgt && is_oacc_kernels_decomposed_part (tgt)));
2711 
2712 		error_at (OMP_CLAUSE_LOCATION (c),
2713 			  "argument not permitted on %qs clause",
2714 			  omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
2715 		if (tgt)
2716 		  inform (gimple_location (tgt->stmt),
2717 			  "enclosing parent compute construct");
2718 		else if (oacc_get_fn_attrib (current_function_decl))
2719 		  inform (DECL_SOURCE_LOCATION (current_function_decl),
2720 			  "enclosing routine");
2721 		else
2722 		  gcc_unreachable ();
2723 	      }
2724 	  }
2725 
2726       if (tgt && is_oacc_kernels (tgt))
2727 	check_oacc_kernel_gwv (stmt, ctx);
2728 
2729       /* Collect all variables named in reductions on this loop.  Ensure
2730 	 that, if this loop has a reduction on some variable v, and there is
2731 	 a reduction on v somewhere in an outer context, then there is a
2732 	 reduction on v on all intervening loops as well.  */
2733       tree local_reduction_clauses = NULL;
2734       for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
2735 	{
2736 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
2737 	    local_reduction_clauses
2738 	      = tree_cons (NULL, c, local_reduction_clauses);
2739 	}
2740       if (ctx->outer_reduction_clauses == NULL && ctx->outer != NULL)
2741 	ctx->outer_reduction_clauses
2742 	  = chainon (unshare_expr (ctx->outer->local_reduction_clauses),
2743 		     ctx->outer->outer_reduction_clauses);
2744       tree outer_reduction_clauses = ctx->outer_reduction_clauses;
2745       tree local_iter = local_reduction_clauses;
2746       for (; local_iter; local_iter = TREE_CHAIN (local_iter))
2747 	{
2748 	  tree local_clause = TREE_VALUE (local_iter);
2749 	  tree local_var = OMP_CLAUSE_DECL (local_clause);
2750 	  tree_code local_op = OMP_CLAUSE_REDUCTION_CODE (local_clause);
2751 	  bool have_outer_reduction = false;
2752 	  tree ctx_iter = outer_reduction_clauses;
2753 	  for (; ctx_iter; ctx_iter = TREE_CHAIN (ctx_iter))
2754 	    {
2755 	      tree outer_clause = TREE_VALUE (ctx_iter);
2756 	      tree outer_var = OMP_CLAUSE_DECL (outer_clause);
2757 	      tree_code outer_op = OMP_CLAUSE_REDUCTION_CODE (outer_clause);
2758 	      if (outer_var == local_var && outer_op != local_op)
2759 		{
2760 		  warning_at (OMP_CLAUSE_LOCATION (local_clause), 0,
2761 			      "conflicting reduction operations for %qE",
2762 			      local_var);
2763 		  inform (OMP_CLAUSE_LOCATION (outer_clause),
2764 			  "location of the previous reduction for %qE",
2765 			  outer_var);
2766 		}
2767 	      if (outer_var == local_var)
2768 		{
2769 		  have_outer_reduction = true;
2770 		  break;
2771 		}
2772 	    }
2773 	  if (have_outer_reduction)
2774 	    {
2775 	      /* There is a reduction on outer_var both on this loop and on
2776 		 some enclosing loop.  Walk up the context tree until such a
2777 		 loop with a reduction on outer_var is found, and complain
2778 		 about all intervening loops that do not have such a
2779 		 reduction.  */
2780 	      struct omp_context *curr_loop = ctx->outer;
2781 	      bool found = false;
2782 	      while (curr_loop != NULL)
2783 		{
2784 		  tree curr_iter = curr_loop->local_reduction_clauses;
2785 		  for (; curr_iter; curr_iter = TREE_CHAIN (curr_iter))
2786 		    {
2787 		      tree curr_clause = TREE_VALUE (curr_iter);
2788 		      tree curr_var = OMP_CLAUSE_DECL (curr_clause);
2789 		      if (curr_var == local_var)
2790 			{
2791 			  found = true;
2792 			  break;
2793 			}
2794 		    }
2795 		  if (!found)
2796 		    warning_at (gimple_location (curr_loop->stmt), 0,
2797 				"nested loop in reduction needs "
2798 				"reduction clause for %qE",
2799 				local_var);
2800 		  else
2801 		    break;
2802 		  curr_loop = curr_loop->outer;
2803 		}
2804 	    }
2805 	}
2806       ctx->local_reduction_clauses = local_reduction_clauses;
2807       ctx->outer_reduction_clauses
2808 	= chainon (unshare_expr (ctx->local_reduction_clauses),
2809 		   ctx->outer_reduction_clauses);
2810 
2811       if (tgt && is_oacc_kernels (tgt))
2812 	{
2813 	  /* Strip out reductions, as they are not handled yet.  */
2814 	  tree *prev_ptr = &clauses;
2815 
2816 	  while (tree probe = *prev_ptr)
2817 	    {
2818 	      tree *next_ptr = &OMP_CLAUSE_CHAIN (probe);
2819 
2820 	      if (OMP_CLAUSE_CODE (probe) == OMP_CLAUSE_REDUCTION)
2821 		*prev_ptr = *next_ptr;
2822 	      else
2823 		prev_ptr = next_ptr;
2824 	    }
2825 
2826 	  gimple_omp_for_set_clauses (stmt, clauses);
2827 	}
2828     }
2829 
2830   scan_sharing_clauses (clauses, ctx);
2831 
2832   scan_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
2833   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
2834     {
2835       scan_omp_op (gimple_omp_for_index_ptr (stmt, i), ctx);
2836       scan_omp_op (gimple_omp_for_initial_ptr (stmt, i), ctx);
2837       scan_omp_op (gimple_omp_for_final_ptr (stmt, i), ctx);
2838       scan_omp_op (gimple_omp_for_incr_ptr (stmt, i), ctx);
2839     }
2840   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2841   return ctx;
2842 }
2843 
2844 /* Duplicate #pragma omp simd, one for SIMT, another one for SIMD.  */
2845 
2846 static void
scan_omp_simd(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2847 scan_omp_simd (gimple_stmt_iterator *gsi, gomp_for *stmt,
2848 	       omp_context *outer_ctx)
2849 {
2850   gbind *bind = gimple_build_bind (NULL, NULL, NULL);
2851   gsi_replace (gsi, bind, false);
2852   gimple_seq seq = NULL;
2853   gimple *g = gimple_build_call_internal (IFN_GOMP_USE_SIMT, 0);
2854   tree cond = create_tmp_var_raw (integer_type_node);
2855   DECL_CONTEXT (cond) = current_function_decl;
2856   DECL_SEEN_IN_BIND_EXPR_P (cond) = 1;
2857   gimple_bind_set_vars (bind, cond);
2858   gimple_call_set_lhs (g, cond);
2859   gimple_seq_add_stmt (&seq, g);
2860   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
2861   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
2862   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
2863   g = gimple_build_cond (NE_EXPR, cond, integer_zero_node, lab1, lab2);
2864   gimple_seq_add_stmt (&seq, g);
2865   g = gimple_build_label (lab1);
2866   gimple_seq_add_stmt (&seq, g);
2867   gimple_seq new_seq = copy_gimple_seq_and_replace_locals (stmt);
2868   gomp_for *new_stmt = as_a <gomp_for *> (new_seq);
2869   tree clause = build_omp_clause (gimple_location (stmt), OMP_CLAUSE__SIMT_);
2870   OMP_CLAUSE_CHAIN (clause) = gimple_omp_for_clauses (new_stmt);
2871   gimple_omp_for_set_clauses (new_stmt, clause);
2872   gimple_seq_add_stmt (&seq, new_stmt);
2873   g = gimple_build_goto (lab3);
2874   gimple_seq_add_stmt (&seq, g);
2875   g = gimple_build_label (lab2);
2876   gimple_seq_add_stmt (&seq, g);
2877   gimple_seq_add_stmt (&seq, stmt);
2878   g = gimple_build_label (lab3);
2879   gimple_seq_add_stmt (&seq, g);
2880   gimple_bind_set_body (bind, seq);
2881   update_stmt (bind);
2882   scan_omp_for (new_stmt, outer_ctx);
2883   scan_omp_for (stmt, outer_ctx)->simt_stmt = new_stmt;
2884 }
2885 
2886 static tree omp_find_scan (gimple_stmt_iterator *, bool *,
2887 			   struct walk_stmt_info *);
2888 static omp_context *maybe_lookup_ctx (gimple *);
2889 
2890 /* Duplicate #pragma omp simd, one for the scan input phase loop and one
2891    for scan phase loop.  */
2892 
2893 static void
scan_omp_simd_scan(gimple_stmt_iterator * gsi,gomp_for * stmt,omp_context * outer_ctx)2894 scan_omp_simd_scan (gimple_stmt_iterator *gsi, gomp_for *stmt,
2895 		    omp_context *outer_ctx)
2896 {
2897   /* The only change between inclusive and exclusive scan will be
2898      within the first simd loop, so just use inclusive in the
2899      worksharing loop.  */
2900   outer_ctx->scan_inclusive = true;
2901   tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_INCLUSIVE);
2902   OMP_CLAUSE_DECL (c) = integer_zero_node;
2903 
2904   gomp_scan *input_stmt = gimple_build_omp_scan (NULL, NULL_TREE);
2905   gomp_scan *scan_stmt = gimple_build_omp_scan (NULL, c);
2906   gsi_replace (gsi, input_stmt, false);
2907   gimple_seq input_body = NULL;
2908   gimple_seq_add_stmt (&input_body, stmt);
2909   gsi_insert_after (gsi, scan_stmt, GSI_NEW_STMT);
2910 
2911   gimple_stmt_iterator input1_gsi = gsi_none ();
2912   struct walk_stmt_info wi;
2913   memset (&wi, 0, sizeof (wi));
2914   wi.val_only = true;
2915   wi.info = (void *) &input1_gsi;
2916   walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), omp_find_scan, NULL, &wi);
2917   gcc_assert (!gsi_end_p (input1_gsi));
2918 
2919   gimple *input_stmt1 = gsi_stmt (input1_gsi);
2920   gsi_next (&input1_gsi);
2921   gimple *scan_stmt1 = gsi_stmt (input1_gsi);
2922   gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
2923   c = gimple_omp_scan_clauses (as_a <gomp_scan *> (scan_stmt1));
2924   if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2925     std::swap (input_stmt1, scan_stmt1);
2926 
2927   gimple_seq input_body1 = gimple_omp_body (input_stmt1);
2928   gimple_omp_set_body (input_stmt1, NULL);
2929 
2930   gimple_seq scan_body = copy_gimple_seq_and_replace_locals (stmt);
2931   gomp_for *new_stmt = as_a <gomp_for *> (scan_body);
2932 
2933   gimple_omp_set_body (input_stmt1, input_body1);
2934   gimple_omp_set_body (scan_stmt1, NULL);
2935 
2936   gimple_stmt_iterator input2_gsi = gsi_none ();
2937   memset (&wi, 0, sizeof (wi));
2938   wi.val_only = true;
2939   wi.info = (void *) &input2_gsi;
2940   walk_gimple_seq_mod (gimple_omp_body_ptr (new_stmt), omp_find_scan,
2941 		       NULL, &wi);
2942   gcc_assert (!gsi_end_p (input2_gsi));
2943 
2944   gimple *input_stmt2 = gsi_stmt (input2_gsi);
2945   gsi_next (&input2_gsi);
2946   gimple *scan_stmt2 = gsi_stmt (input2_gsi);
2947   gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
2948   if (c && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_EXCLUSIVE)
2949     std::swap (input_stmt2, scan_stmt2);
2950 
2951   gimple_omp_set_body (input_stmt2, NULL);
2952 
2953   gimple_omp_set_body (input_stmt, input_body);
2954   gimple_omp_set_body (scan_stmt, scan_body);
2955 
2956   omp_context *ctx = new_omp_context (input_stmt, outer_ctx);
2957   scan_omp (gimple_omp_body_ptr (input_stmt), ctx);
2958 
2959   ctx = new_omp_context (scan_stmt, outer_ctx);
2960   scan_omp (gimple_omp_body_ptr (scan_stmt), ctx);
2961 
2962   maybe_lookup_ctx (new_stmt)->for_simd_scan_phase = true;
2963 }
2964 
2965 /* Scan an OpenMP sections directive.  */
2966 
2967 static void
scan_omp_sections(gomp_sections * stmt,omp_context * outer_ctx)2968 scan_omp_sections (gomp_sections *stmt, omp_context *outer_ctx)
2969 {
2970   omp_context *ctx;
2971 
2972   ctx = new_omp_context (stmt, outer_ctx);
2973   scan_sharing_clauses (gimple_omp_sections_clauses (stmt), ctx);
2974   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2975 }
2976 
2977 /* Scan an OpenMP single directive.  */
2978 
2979 static void
scan_omp_single(gomp_single * stmt,omp_context * outer_ctx)2980 scan_omp_single (gomp_single *stmt, omp_context *outer_ctx)
2981 {
2982   omp_context *ctx;
2983   tree name;
2984 
2985   ctx = new_omp_context (stmt, outer_ctx);
2986   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
2987   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
2988   name = create_tmp_var_name (".omp_copy_s");
2989   name = build_decl (gimple_location (stmt),
2990 		     TYPE_DECL, name, ctx->record_type);
2991   TYPE_NAME (ctx->record_type) = name;
2992 
2993   scan_sharing_clauses (gimple_omp_single_clauses (stmt), ctx);
2994   scan_omp (gimple_omp_body_ptr (stmt), ctx);
2995 
2996   if (TYPE_FIELDS (ctx->record_type) == NULL)
2997     ctx->record_type = NULL;
2998   else
2999     layout_type (ctx->record_type);
3000 }
3001 
3002 /* Scan a GIMPLE_OMP_TARGET.  */
3003 
3004 static void
scan_omp_target(gomp_target * stmt,omp_context * outer_ctx)3005 scan_omp_target (gomp_target *stmt, omp_context *outer_ctx)
3006 {
3007   omp_context *ctx;
3008   tree name;
3009   bool offloaded = is_gimple_omp_offloaded (stmt);
3010   tree clauses = gimple_omp_target_clauses (stmt);
3011 
3012   ctx = new_omp_context (stmt, outer_ctx);
3013   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3014   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3015   name = create_tmp_var_name (".omp_data_t");
3016   name = build_decl (gimple_location (stmt),
3017 		     TYPE_DECL, name, ctx->record_type);
3018   DECL_ARTIFICIAL (name) = 1;
3019   DECL_NAMELESS (name) = 1;
3020   TYPE_NAME (ctx->record_type) = name;
3021   TYPE_ARTIFICIAL (ctx->record_type) = 1;
3022 
3023   if (offloaded)
3024     {
3025       create_omp_child_function (ctx, false);
3026       gimple_omp_target_set_child_fn (stmt, ctx->cb.dst_fn);
3027     }
3028 
3029   scan_sharing_clauses (clauses, ctx);
3030   scan_omp (gimple_omp_body_ptr (stmt), ctx);
3031 
3032   if (TYPE_FIELDS (ctx->record_type) == NULL)
3033     ctx->record_type = ctx->receiver_decl = NULL;
3034   else
3035     {
3036       TYPE_FIELDS (ctx->record_type)
3037 	= nreverse (TYPE_FIELDS (ctx->record_type));
3038       if (flag_checking)
3039 	{
3040 	  unsigned int align = DECL_ALIGN (TYPE_FIELDS (ctx->record_type));
3041 	  for (tree field = TYPE_FIELDS (ctx->record_type);
3042 	       field;
3043 	       field = DECL_CHAIN (field))
3044 	    gcc_assert (DECL_ALIGN (field) == align);
3045 	}
3046       layout_type (ctx->record_type);
3047       if (offloaded)
3048 	fixup_child_record_type (ctx);
3049     }
3050 
3051   if (ctx->teams_nested_p && ctx->nonteams_nested_p)
3052     {
3053       error_at (gimple_location (stmt),
3054 		"%<target%> construct with nested %<teams%> construct "
3055 		"contains directives outside of the %<teams%> construct");
3056       gimple_omp_set_body (stmt, gimple_build_bind (NULL, NULL, NULL));
3057     }
3058 }
3059 
3060 /* Scan an OpenMP teams directive.  */
3061 
3062 static void
scan_omp_teams(gomp_teams * stmt,omp_context * outer_ctx)3063 scan_omp_teams (gomp_teams *stmt, omp_context *outer_ctx)
3064 {
3065   omp_context *ctx = new_omp_context (stmt, outer_ctx);
3066 
3067   if (!gimple_omp_teams_host (stmt))
3068     {
3069       scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3070       scan_omp (gimple_omp_body_ptr (stmt), ctx);
3071       return;
3072     }
3073   taskreg_contexts.safe_push (ctx);
3074   gcc_assert (taskreg_nesting_level == 1);
3075   ctx->field_map = splay_tree_new (splay_tree_compare_pointers, 0, 0);
3076   ctx->record_type = lang_hooks.types.make_type (RECORD_TYPE);
3077   tree name = create_tmp_var_name (".omp_data_s");
3078   name = build_decl (gimple_location (stmt),
3079 		     TYPE_DECL, name, ctx->record_type);
3080   DECL_ARTIFICIAL (name) = 1;
3081   DECL_NAMELESS (name) = 1;
3082   TYPE_NAME (ctx->record_type) = name;
3083   TYPE_ARTIFICIAL (ctx->record_type) = 1;
3084   create_omp_child_function (ctx, false);
3085   gimple_omp_teams_set_child_fn (stmt, ctx->cb.dst_fn);
3086 
3087   scan_sharing_clauses (gimple_omp_teams_clauses (stmt), ctx);
3088   scan_omp (gimple_omp_body_ptr (stmt), ctx);
3089 
3090   if (TYPE_FIELDS (ctx->record_type) == NULL)
3091     ctx->record_type = ctx->receiver_decl = NULL;
3092 }
3093 
3094 /* Check nesting restrictions.  */
3095 static bool
check_omp_nesting_restrictions(gimple * stmt,omp_context * ctx)3096 check_omp_nesting_restrictions (gimple *stmt, omp_context *ctx)
3097 {
3098   tree c;
3099 
3100   /* No nesting of non-OpenACC STMT (that is, an OpenMP one, or a GOMP builtin)
3101      inside an OpenACC CTX.  */
3102   if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3103       || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE)
3104     /* ..., except for the atomic codes that OpenACC shares with OpenMP.  */
3105     ;
3106   else if (!(is_gimple_omp (stmt)
3107 	     && is_gimple_omp_oacc (stmt)))
3108     {
3109       if (oacc_get_fn_attrib (cfun->decl) != NULL)
3110 	{
3111 	  error_at (gimple_location (stmt),
3112 		    "non-OpenACC construct inside of OpenACC routine");
3113 	  return false;
3114 	}
3115       else
3116 	for (omp_context *octx = ctx; octx != NULL; octx = octx->outer)
3117 	  if (is_gimple_omp (octx->stmt)
3118 	      && is_gimple_omp_oacc (octx->stmt))
3119 	    {
3120 	      error_at (gimple_location (stmt),
3121 			"non-OpenACC construct inside of OpenACC region");
3122 	      return false;
3123 	    }
3124     }
3125 
3126   if (ctx != NULL)
3127     {
3128       if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
3129 	  && gimple_omp_target_kind (ctx->stmt) == GF_OMP_TARGET_KIND_REGION)
3130 	{
3131 	  c = omp_find_clause (gimple_omp_target_clauses (ctx->stmt),
3132 			       OMP_CLAUSE_DEVICE);
3133 	  if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
3134 	    {
3135 	      error_at (gimple_location (stmt),
3136 			"OpenMP constructs are not allowed in target region "
3137 			"with %<ancestor%>");
3138 	      return false;
3139 	    }
3140 
3141 	  if (gimple_code (stmt) == GIMPLE_OMP_TEAMS && !ctx->teams_nested_p)
3142 	    ctx->teams_nested_p = true;
3143 	  else
3144 	    ctx->nonteams_nested_p = true;
3145 	}
3146       if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN
3147 	  && ctx->outer
3148 	  && gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
3149 	ctx = ctx->outer;
3150       if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
3151 	  && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
3152 	  && !ctx->loop_p)
3153 	{
3154 	  c = NULL_TREE;
3155 	  if (ctx->order_concurrent
3156 	      && (gimple_code (stmt) == GIMPLE_OMP_ORDERED
3157 		  || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3158 		  || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE))
3159 	    {
3160 	      error_at (gimple_location (stmt),
3161 			"OpenMP constructs other than %<parallel%>, %<loop%>"
3162 			" or %<simd%> may not be nested inside a region with"
3163 			" the %<order(concurrent)%> clause");
3164 	      return false;
3165 	    }
3166 	  if (gimple_code (stmt) == GIMPLE_OMP_ORDERED)
3167 	    {
3168 	      c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3169 	      if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3170 		{
3171 		  if (omp_find_clause (c, OMP_CLAUSE_THREADS)
3172 		      && (ctx->outer == NULL
3173 			  || !gimple_omp_for_combined_into_p (ctx->stmt)
3174 			  || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR
3175 			  || (gimple_omp_for_kind (ctx->outer->stmt)
3176 			      != GF_OMP_FOR_KIND_FOR)
3177 			  || !gimple_omp_for_combined_p (ctx->outer->stmt)))
3178 		    {
3179 		      error_at (gimple_location (stmt),
3180 				"%<ordered simd threads%> must be closely "
3181 				"nested inside of %<%s simd%> region",
3182 				lang_GNU_Fortran () ? "do" : "for");
3183 		      return false;
3184 		    }
3185 		  return true;
3186 		}
3187 	    }
3188 	  else if (gimple_code (stmt) == GIMPLE_OMP_ATOMIC_LOAD
3189 		   || gimple_code (stmt) == GIMPLE_OMP_ATOMIC_STORE
3190 		   || gimple_code (stmt) == GIMPLE_OMP_SCAN)
3191 	    return true;
3192 	  else if (gimple_code (stmt) == GIMPLE_OMP_FOR
3193 		   && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
3194 	    return true;
3195 	  error_at (gimple_location (stmt),
3196 		    "OpenMP constructs other than "
3197 		    "%<ordered simd%>, %<simd%>, %<loop%> or %<atomic%> may "
3198 		    "not be nested inside %<simd%> region");
3199 	  return false;
3200 	}
3201       else if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS)
3202 	{
3203 	  if ((gimple_code (stmt) != GIMPLE_OMP_FOR
3204 	       || (gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_DISTRIBUTE
3205 		   && omp_find_clause (gimple_omp_for_clauses (stmt),
3206 				       OMP_CLAUSE_BIND) == NULL_TREE))
3207 	      && gimple_code (stmt) != GIMPLE_OMP_PARALLEL)
3208 	    {
3209 	      error_at (gimple_location (stmt),
3210 			"only %<distribute%>, %<parallel%> or %<loop%> "
3211 			"regions are allowed to be strictly nested inside "
3212 			"%<teams%> region");
3213 	      return false;
3214 	    }
3215 	}
3216       else if (ctx->order_concurrent
3217 	       && gimple_code (stmt) != GIMPLE_OMP_PARALLEL
3218 	       && (gimple_code (stmt) != GIMPLE_OMP_FOR
3219 		   || gimple_omp_for_kind (stmt) != GF_OMP_FOR_KIND_SIMD)
3220 	       && gimple_code (stmt) != GIMPLE_OMP_SCAN)
3221 	{
3222 	  if (ctx->loop_p)
3223 	    error_at (gimple_location (stmt),
3224 		      "OpenMP constructs other than %<parallel%>, %<loop%> or "
3225 		      "%<simd%> may not be nested inside a %<loop%> region");
3226 	  else
3227 	    error_at (gimple_location (stmt),
3228 		      "OpenMP constructs other than %<parallel%>, %<loop%> or "
3229 		      "%<simd%> may not be nested inside a region with "
3230 		      "the %<order(concurrent)%> clause");
3231 	  return false;
3232 	}
3233     }
3234   switch (gimple_code (stmt))
3235     {
3236     case GIMPLE_OMP_FOR:
3237       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD)
3238 	return true;
3239       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_DISTRIBUTE)
3240 	{
3241 	  if (ctx != NULL && gimple_code (ctx->stmt) != GIMPLE_OMP_TEAMS)
3242 	    {
3243 	      error_at (gimple_location (stmt),
3244 			"%<distribute%> region must be strictly nested "
3245 			"inside %<teams%> construct");
3246 	      return false;
3247 	    }
3248 	  return true;
3249 	}
3250       /* We split taskloop into task and nested taskloop in it.  */
3251       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3252 	return true;
3253       /* For now, hope this will change and loop bind(parallel) will not
3254 	 be allowed in lots of contexts.  */
3255       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
3256 	  && omp_find_clause (gimple_omp_for_clauses (stmt), OMP_CLAUSE_BIND))
3257 	return true;
3258       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_OACC_LOOP)
3259 	{
3260 	  bool ok = false;
3261 
3262 	  if (ctx)
3263 	    switch (gimple_code (ctx->stmt))
3264 	      {
3265 	      case GIMPLE_OMP_FOR:
3266 		ok = (gimple_omp_for_kind (ctx->stmt)
3267 		      == GF_OMP_FOR_KIND_OACC_LOOP);
3268 		break;
3269 
3270 	      case GIMPLE_OMP_TARGET:
3271 		switch (gimple_omp_target_kind (ctx->stmt))
3272 		  {
3273 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3274 		  case GF_OMP_TARGET_KIND_OACC_KERNELS:
3275 		  case GF_OMP_TARGET_KIND_OACC_SERIAL:
3276 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3277 		  case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3278 		    ok = true;
3279 		    break;
3280 
3281 		  default:
3282 		    break;
3283 		  }
3284 
3285 	      default:
3286 		break;
3287 	      }
3288 	  else if (oacc_get_fn_attrib (current_function_decl))
3289 	    ok = true;
3290 	  if (!ok)
3291 	    {
3292 	      error_at (gimple_location (stmt),
3293 			"OpenACC loop directive must be associated with"
3294 			" an OpenACC compute region");
3295 	      return false;
3296 	    }
3297 	}
3298       /* FALLTHRU */
3299     case GIMPLE_CALL:
3300       if (is_gimple_call (stmt)
3301 	  && (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3302 	      == BUILT_IN_GOMP_CANCEL
3303 	      || DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3304 		 == BUILT_IN_GOMP_CANCELLATION_POINT))
3305 	{
3306 	  const char *bad = NULL;
3307 	  const char *kind = NULL;
3308 	  const char *construct
3309 	    = (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3310 	       == BUILT_IN_GOMP_CANCEL)
3311 	      ? "cancel"
3312 	      : "cancellation point";
3313 	  if (ctx == NULL)
3314 	    {
3315 	      error_at (gimple_location (stmt), "orphaned %qs construct",
3316 			construct);
3317 	      return false;
3318 	    }
3319 	  switch (tree_fits_shwi_p (gimple_call_arg (stmt, 0))
3320 		  ? tree_to_shwi (gimple_call_arg (stmt, 0))
3321 		  : 0)
3322 	    {
3323 	    case 1:
3324 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_PARALLEL)
3325 		bad = "parallel";
3326 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3327 		       == BUILT_IN_GOMP_CANCEL
3328 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
3329 		ctx->cancellable = true;
3330 	      kind = "parallel";
3331 	      break;
3332 	    case 2:
3333 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3334 		  || gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR)
3335 		bad = "for";
3336 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3337 		       == BUILT_IN_GOMP_CANCEL
3338 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
3339 		{
3340 		  ctx->cancellable = true;
3341 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3342 				       OMP_CLAUSE_NOWAIT))
3343 		    warning_at (gimple_location (stmt), 0,
3344 				"%<cancel for%> inside "
3345 				"%<nowait%> for construct");
3346 		  if (omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3347 				       OMP_CLAUSE_ORDERED))
3348 		    warning_at (gimple_location (stmt), 0,
3349 				"%<cancel for%> inside "
3350 				"%<ordered%> for construct");
3351 		}
3352 	      kind = "for";
3353 	      break;
3354 	    case 4:
3355 	      if (gimple_code (ctx->stmt) != GIMPLE_OMP_SECTIONS
3356 		  && gimple_code (ctx->stmt) != GIMPLE_OMP_SECTION)
3357 		bad = "sections";
3358 	      else if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3359 		       == BUILT_IN_GOMP_CANCEL
3360 		       && !integer_zerop (gimple_call_arg (stmt, 1)))
3361 		{
3362 		  if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
3363 		    {
3364 		      ctx->cancellable = true;
3365 		      if (omp_find_clause (gimple_omp_sections_clauses
3366 								(ctx->stmt),
3367 					   OMP_CLAUSE_NOWAIT))
3368 			warning_at (gimple_location (stmt), 0,
3369 				    "%<cancel sections%> inside "
3370 				    "%<nowait%> sections construct");
3371 		    }
3372 		  else
3373 		    {
3374 		      gcc_assert (ctx->outer
3375 				  && gimple_code (ctx->outer->stmt)
3376 				     == GIMPLE_OMP_SECTIONS);
3377 		      ctx->outer->cancellable = true;
3378 		      if (omp_find_clause (gimple_omp_sections_clauses
3379 							(ctx->outer->stmt),
3380 					   OMP_CLAUSE_NOWAIT))
3381 			warning_at (gimple_location (stmt), 0,
3382 				    "%<cancel sections%> inside "
3383 				    "%<nowait%> sections construct");
3384 		    }
3385 		}
3386 	      kind = "sections";
3387 	      break;
3388 	    case 8:
3389 	      if (!is_task_ctx (ctx)
3390 		  && (!is_taskloop_ctx (ctx)
3391 		      || ctx->outer == NULL
3392 		      || !is_task_ctx (ctx->outer)))
3393 		bad = "task";
3394 	      else
3395 		{
3396 		  for (omp_context *octx = ctx->outer;
3397 		       octx; octx = octx->outer)
3398 		    {
3399 		      switch (gimple_code (octx->stmt))
3400 			{
3401 			case GIMPLE_OMP_TASKGROUP:
3402 			  break;
3403 			case GIMPLE_OMP_TARGET:
3404 			  if (gimple_omp_target_kind (octx->stmt)
3405 			      != GF_OMP_TARGET_KIND_REGION)
3406 			    continue;
3407 			  /* FALLTHRU */
3408 			case GIMPLE_OMP_PARALLEL:
3409 			case GIMPLE_OMP_TEAMS:
3410 			  error_at (gimple_location (stmt),
3411 				    "%<%s taskgroup%> construct not closely "
3412 				    "nested inside of %<taskgroup%> region",
3413 				    construct);
3414 			  return false;
3415 			case GIMPLE_OMP_TASK:
3416 			  if (gimple_omp_task_taskloop_p (octx->stmt)
3417 			      && octx->outer
3418 			      && is_taskloop_ctx (octx->outer))
3419 			    {
3420 			      tree clauses
3421 				= gimple_omp_for_clauses (octx->outer->stmt);
3422 			      if (!omp_find_clause (clauses, OMP_CLAUSE_NOGROUP))
3423 				break;
3424 			    }
3425 			  continue;
3426 			default:
3427 			  continue;
3428 			}
3429 		      break;
3430 		    }
3431 		  ctx->cancellable = true;
3432 		}
3433 	      kind = "taskgroup";
3434 	      break;
3435 	    default:
3436 	      error_at (gimple_location (stmt), "invalid arguments");
3437 	      return false;
3438 	    }
3439 	  if (bad)
3440 	    {
3441 	      error_at (gimple_location (stmt),
3442 			"%<%s %s%> construct not closely nested inside of %qs",
3443 			construct, kind, bad);
3444 	      return false;
3445 	    }
3446 	}
3447       /* FALLTHRU */
3448     case GIMPLE_OMP_SECTIONS:
3449     case GIMPLE_OMP_SINGLE:
3450       for (; ctx != NULL; ctx = ctx->outer)
3451 	switch (gimple_code (ctx->stmt))
3452 	  {
3453 	  case GIMPLE_OMP_FOR:
3454 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3455 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3456 	      break;
3457 	    /* FALLTHRU */
3458 	  case GIMPLE_OMP_SECTIONS:
3459 	  case GIMPLE_OMP_SINGLE:
3460 	  case GIMPLE_OMP_ORDERED:
3461 	  case GIMPLE_OMP_MASTER:
3462 	  case GIMPLE_OMP_MASKED:
3463 	  case GIMPLE_OMP_TASK:
3464 	  case GIMPLE_OMP_CRITICAL:
3465 	    if (is_gimple_call (stmt))
3466 	      {
3467 		if (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt))
3468 		    != BUILT_IN_GOMP_BARRIER)
3469 		  return true;
3470 		error_at (gimple_location (stmt),
3471 			  "barrier region may not be closely nested inside "
3472 			  "of work-sharing, %<loop%>, %<critical%>, "
3473 			  "%<ordered%>, %<master%>, %<masked%>, explicit "
3474 			  "%<task%> or %<taskloop%> region");
3475 		return false;
3476 	      }
3477 	    error_at (gimple_location (stmt),
3478 		      "work-sharing region may not be closely nested inside "
3479 		      "of work-sharing, %<loop%>, %<critical%>, %<ordered%>, "
3480 		      "%<master%>, %<masked%>, explicit %<task%> or "
3481 		      "%<taskloop%> region");
3482 	    return false;
3483 	  case GIMPLE_OMP_PARALLEL:
3484 	  case GIMPLE_OMP_TEAMS:
3485 	    return true;
3486 	  case GIMPLE_OMP_TARGET:
3487 	    if (gimple_omp_target_kind (ctx->stmt)
3488 		== GF_OMP_TARGET_KIND_REGION)
3489 	      return true;
3490 	    break;
3491 	  default:
3492 	    break;
3493 	  }
3494       break;
3495     case GIMPLE_OMP_MASTER:
3496     case GIMPLE_OMP_MASKED:
3497       for (; ctx != NULL; ctx = ctx->outer)
3498 	switch (gimple_code (ctx->stmt))
3499 	  {
3500 	  case GIMPLE_OMP_FOR:
3501 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3502 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3503 	      break;
3504 	    /* FALLTHRU */
3505 	  case GIMPLE_OMP_SECTIONS:
3506 	  case GIMPLE_OMP_SINGLE:
3507 	  case GIMPLE_OMP_TASK:
3508 	    error_at (gimple_location (stmt),
3509 		      "%qs region may not be closely nested inside "
3510 		      "of work-sharing, %<loop%>, explicit %<task%> or "
3511 		      "%<taskloop%> region",
3512 		      gimple_code (stmt) == GIMPLE_OMP_MASTER
3513 		      ? "master" : "masked");
3514 	    return false;
3515 	  case GIMPLE_OMP_PARALLEL:
3516 	  case GIMPLE_OMP_TEAMS:
3517 	    return true;
3518 	  case GIMPLE_OMP_TARGET:
3519 	    if (gimple_omp_target_kind (ctx->stmt)
3520 		== GF_OMP_TARGET_KIND_REGION)
3521 	      return true;
3522 	    break;
3523 	  default:
3524 	    break;
3525 	  }
3526       break;
3527     case GIMPLE_OMP_SCOPE:
3528       for (; ctx != NULL; ctx = ctx->outer)
3529 	switch (gimple_code (ctx->stmt))
3530 	  {
3531 	  case GIMPLE_OMP_FOR:
3532 	    if (gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_FOR
3533 		&& gimple_omp_for_kind (ctx->stmt) != GF_OMP_FOR_KIND_TASKLOOP)
3534 	      break;
3535 	    /* FALLTHRU */
3536 	  case GIMPLE_OMP_SECTIONS:
3537 	  case GIMPLE_OMP_SINGLE:
3538 	  case GIMPLE_OMP_TASK:
3539 	  case GIMPLE_OMP_CRITICAL:
3540 	  case GIMPLE_OMP_ORDERED:
3541 	  case GIMPLE_OMP_MASTER:
3542 	  case GIMPLE_OMP_MASKED:
3543 	    error_at (gimple_location (stmt),
3544 		      "%<scope%> region may not be closely nested inside "
3545 		      "of work-sharing, %<loop%>, explicit %<task%>, "
3546 		      "%<taskloop%>, %<critical%>, %<ordered%>, %<master%>, "
3547 		      "or %<masked%> region");
3548 	    return false;
3549 	  case GIMPLE_OMP_PARALLEL:
3550 	  case GIMPLE_OMP_TEAMS:
3551 	    return true;
3552 	  case GIMPLE_OMP_TARGET:
3553 	    if (gimple_omp_target_kind (ctx->stmt)
3554 		== GF_OMP_TARGET_KIND_REGION)
3555 	      return true;
3556 	    break;
3557 	  default:
3558 	    break;
3559 	  }
3560       break;
3561     case GIMPLE_OMP_TASK:
3562       for (c = gimple_omp_task_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3563 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3564 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3565 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3566 	  {
3567 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3568 	    error_at (OMP_CLAUSE_LOCATION (c),
3569 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
3570 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3571 	    return false;
3572 	  }
3573       break;
3574     case GIMPLE_OMP_ORDERED:
3575       for (c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3576 	   c; c = OMP_CLAUSE_CHAIN (c))
3577 	{
3578 	  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
3579 	    {
3580 	      gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREADS
3581 			  || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SIMD);
3582 	      continue;
3583 	    }
3584 	  enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3585 	  if (kind == OMP_CLAUSE_DEPEND_SOURCE
3586 	      || kind == OMP_CLAUSE_DEPEND_SINK)
3587 	    {
3588 	      tree oclause;
3589 	      /* Look for containing ordered(N) loop.  */
3590 	      if (ctx == NULL
3591 		  || gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
3592 		  || (oclause
3593 			= omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3594 					   OMP_CLAUSE_ORDERED)) == NULL_TREE)
3595 		{
3596 		  error_at (OMP_CLAUSE_LOCATION (c),
3597 			    "%<ordered%> construct with %<depend%> clause "
3598 			    "must be closely nested inside an %<ordered%> "
3599 			    "loop");
3600 		  return false;
3601 		}
3602 	      else if (OMP_CLAUSE_ORDERED_EXPR (oclause) == NULL_TREE)
3603 		{
3604 		  error_at (OMP_CLAUSE_LOCATION (c),
3605 			    "%<ordered%> construct with %<depend%> clause "
3606 			    "must be closely nested inside a loop with "
3607 			    "%<ordered%> clause with a parameter");
3608 		  return false;
3609 		}
3610 	    }
3611 	  else
3612 	    {
3613 	      error_at (OMP_CLAUSE_LOCATION (c),
3614 			"invalid depend kind in omp %<ordered%> %<depend%>");
3615 	      return false;
3616 	    }
3617 	}
3618       c = gimple_omp_ordered_clauses (as_a <gomp_ordered *> (stmt));
3619       if (omp_find_clause (c, OMP_CLAUSE_SIMD))
3620 	{
3621 	  /* ordered simd must be closely nested inside of simd region,
3622 	     and simd region must not encounter constructs other than
3623 	     ordered simd, therefore ordered simd may be either orphaned,
3624 	     or ctx->stmt must be simd.  The latter case is handled already
3625 	     earlier.  */
3626 	  if (ctx != NULL)
3627 	    {
3628 	      error_at (gimple_location (stmt),
3629 			"%<ordered%> %<simd%> must be closely nested inside "
3630 			"%<simd%> region");
3631 	      return false;
3632 	    }
3633 	}
3634       for (; ctx != NULL; ctx = ctx->outer)
3635 	switch (gimple_code (ctx->stmt))
3636 	  {
3637 	  case GIMPLE_OMP_CRITICAL:
3638 	  case GIMPLE_OMP_TASK:
3639 	  case GIMPLE_OMP_ORDERED:
3640 	  ordered_in_taskloop:
3641 	    error_at (gimple_location (stmt),
3642 		      "%<ordered%> region may not be closely nested inside "
3643 		      "of %<critical%>, %<ordered%>, explicit %<task%> or "
3644 		      "%<taskloop%> region");
3645 	    return false;
3646 	  case GIMPLE_OMP_FOR:
3647 	    if (gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_TASKLOOP)
3648 	      goto ordered_in_taskloop;
3649 	    tree o;
3650 	    o = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
3651 				 OMP_CLAUSE_ORDERED);
3652 	    if (o == NULL)
3653 	      {
3654 		error_at (gimple_location (stmt),
3655 			  "%<ordered%> region must be closely nested inside "
3656 			  "a loop region with an %<ordered%> clause");
3657 		return false;
3658 	      }
3659 	    if (OMP_CLAUSE_ORDERED_EXPR (o) != NULL_TREE
3660 		&& omp_find_clause (c, OMP_CLAUSE_DEPEND) == NULL_TREE)
3661 	      {
3662 		error_at (gimple_location (stmt),
3663 			  "%<ordered%> region without %<depend%> clause may "
3664 			  "not be closely nested inside a loop region with "
3665 			  "an %<ordered%> clause with a parameter");
3666 		return false;
3667 	      }
3668 	    return true;
3669 	  case GIMPLE_OMP_TARGET:
3670 	    if (gimple_omp_target_kind (ctx->stmt)
3671 		!= GF_OMP_TARGET_KIND_REGION)
3672 	      break;
3673 	    /* FALLTHRU */
3674 	  case GIMPLE_OMP_PARALLEL:
3675 	  case GIMPLE_OMP_TEAMS:
3676 	    error_at (gimple_location (stmt),
3677 		      "%<ordered%> region must be closely nested inside "
3678 		      "a loop region with an %<ordered%> clause");
3679 	    return false;
3680 	  default:
3681 	    break;
3682 	  }
3683       break;
3684     case GIMPLE_OMP_CRITICAL:
3685       {
3686 	tree this_stmt_name
3687 	  = gimple_omp_critical_name (as_a <gomp_critical *> (stmt));
3688 	for (; ctx != NULL; ctx = ctx->outer)
3689 	  if (gomp_critical *other_crit
3690 	        = dyn_cast <gomp_critical *> (ctx->stmt))
3691 	    if (this_stmt_name == gimple_omp_critical_name (other_crit))
3692 	      {
3693 		error_at (gimple_location (stmt),
3694 			  "%<critical%> region may not be nested inside "
3695 			   "a %<critical%> region with the same name");
3696 		return false;
3697 	      }
3698       }
3699       break;
3700     case GIMPLE_OMP_TEAMS:
3701       if (ctx == NULL)
3702 	break;
3703       else if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET
3704 	       || (gimple_omp_target_kind (ctx->stmt)
3705 		   != GF_OMP_TARGET_KIND_REGION))
3706 	{
3707 	  /* Teams construct can appear either strictly nested inside of
3708 	     target construct with no intervening stmts, or can be encountered
3709 	     only by initial task (so must not appear inside any OpenMP
3710 	     construct.  */
3711 	  error_at (gimple_location (stmt),
3712 		    "%<teams%> construct must be closely nested inside of "
3713 		    "%<target%> construct or not nested in any OpenMP "
3714 		    "construct");
3715 	  return false;
3716 	}
3717       break;
3718     case GIMPLE_OMP_TARGET:
3719       for (c = gimple_omp_target_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
3720 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
3721 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE
3722 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK))
3723 	  {
3724 	    enum omp_clause_depend_kind kind = OMP_CLAUSE_DEPEND_KIND (c);
3725 	    error_at (OMP_CLAUSE_LOCATION (c),
3726 		      "%<depend(%s)%> is only allowed in %<omp ordered%>",
3727 		      kind == OMP_CLAUSE_DEPEND_SOURCE ? "source" : "sink");
3728 	    return false;
3729 	  }
3730       if (is_gimple_omp_offloaded (stmt)
3731 	  && oacc_get_fn_attrib (cfun->decl) != NULL)
3732 	{
3733 	  error_at (gimple_location (stmt),
3734 		    "OpenACC region inside of OpenACC routine, nested "
3735 		    "parallelism not supported yet");
3736 	  return false;
3737 	}
3738       for (; ctx != NULL; ctx = ctx->outer)
3739 	{
3740 	  if (gimple_code (ctx->stmt) != GIMPLE_OMP_TARGET)
3741 	    {
3742 	      if (is_gimple_omp (stmt)
3743 		  && is_gimple_omp_oacc (stmt)
3744 		  && is_gimple_omp (ctx->stmt))
3745 		{
3746 		  error_at (gimple_location (stmt),
3747 			    "OpenACC construct inside of non-OpenACC region");
3748 		  return false;
3749 		}
3750 	      continue;
3751 	    }
3752 
3753 	  const char *stmt_name, *ctx_stmt_name;
3754 	  switch (gimple_omp_target_kind (stmt))
3755 	    {
3756 	    case GF_OMP_TARGET_KIND_REGION: stmt_name = "target"; break;
3757 	    case GF_OMP_TARGET_KIND_DATA: stmt_name = "target data"; break;
3758 	    case GF_OMP_TARGET_KIND_UPDATE: stmt_name = "target update"; break;
3759 	    case GF_OMP_TARGET_KIND_ENTER_DATA:
3760 	      stmt_name = "target enter data"; break;
3761 	    case GF_OMP_TARGET_KIND_EXIT_DATA:
3762 	      stmt_name = "target exit data"; break;
3763 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL: stmt_name = "parallel"; break;
3764 	    case GF_OMP_TARGET_KIND_OACC_KERNELS: stmt_name = "kernels"; break;
3765 	    case GF_OMP_TARGET_KIND_OACC_SERIAL: stmt_name = "serial"; break;
3766 	    case GF_OMP_TARGET_KIND_OACC_DATA: stmt_name = "data"; break;
3767 	    case GF_OMP_TARGET_KIND_OACC_UPDATE: stmt_name = "update"; break;
3768 	    case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
3769 	      stmt_name = "enter data"; break;
3770 	    case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
3771 	      stmt_name = "exit data"; break;
3772 	    case GF_OMP_TARGET_KIND_OACC_DECLARE: stmt_name = "declare"; break;
3773 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA: stmt_name = "host_data";
3774 	      break;
3775 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3776 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3777 	    case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3778 	      /* OpenACC 'kernels' decomposed parts.  */
3779 	      stmt_name = "kernels"; break;
3780 	    default: gcc_unreachable ();
3781 	    }
3782 	  switch (gimple_omp_target_kind (ctx->stmt))
3783 	    {
3784 	    case GF_OMP_TARGET_KIND_REGION: ctx_stmt_name = "target"; break;
3785 	    case GF_OMP_TARGET_KIND_DATA: ctx_stmt_name = "target data"; break;
3786 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL:
3787 	      ctx_stmt_name = "parallel"; break;
3788 	    case GF_OMP_TARGET_KIND_OACC_KERNELS:
3789 	      ctx_stmt_name = "kernels"; break;
3790 	    case GF_OMP_TARGET_KIND_OACC_SERIAL:
3791 	      ctx_stmt_name = "serial"; break;
3792 	    case GF_OMP_TARGET_KIND_OACC_DATA: ctx_stmt_name = "data"; break;
3793 	    case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
3794 	      ctx_stmt_name = "host_data"; break;
3795 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
3796 	    case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
3797 	    case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
3798 	      /* OpenACC 'kernels' decomposed parts.  */
3799 	      ctx_stmt_name = "kernels"; break;
3800 	    default: gcc_unreachable ();
3801 	    }
3802 
3803 	  /* OpenACC/OpenMP mismatch?  */
3804 	  if (is_gimple_omp_oacc (stmt)
3805 	      != is_gimple_omp_oacc (ctx->stmt))
3806 	    {
3807 	      error_at (gimple_location (stmt),
3808 			"%s %qs construct inside of %s %qs region",
3809 			(is_gimple_omp_oacc (stmt)
3810 			 ? "OpenACC" : "OpenMP"), stmt_name,
3811 			(is_gimple_omp_oacc (ctx->stmt)
3812 			 ? "OpenACC" : "OpenMP"), ctx_stmt_name);
3813 	      return false;
3814 	    }
3815 	  if (is_gimple_omp_offloaded (ctx->stmt))
3816 	    {
3817 	      /* No GIMPLE_OMP_TARGET inside offloaded OpenACC CTX.  */
3818 	      if (is_gimple_omp_oacc (ctx->stmt))
3819 		{
3820 		  error_at (gimple_location (stmt),
3821 			    "%qs construct inside of %qs region",
3822 			    stmt_name, ctx_stmt_name);
3823 		  return false;
3824 		}
3825 	      else
3826 		{
3827 		  warning_at (gimple_location (stmt), 0,
3828 			      "%qs construct inside of %qs region",
3829 			      stmt_name, ctx_stmt_name);
3830 		}
3831 	    }
3832 	}
3833       break;
3834     default:
3835       break;
3836     }
3837   return true;
3838 }
3839 
3840 
3841 /* Helper function scan_omp.
3842 
3843    Callback for walk_tree or operators in walk_gimple_stmt used to
3844    scan for OMP directives in TP.  */
3845 
3846 static tree
scan_omp_1_op(tree * tp,int * walk_subtrees,void * data)3847 scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
3848 {
3849   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
3850   omp_context *ctx = (omp_context *) wi->info;
3851   tree t = *tp;
3852 
3853   switch (TREE_CODE (t))
3854     {
3855     case VAR_DECL:
3856     case PARM_DECL:
3857     case LABEL_DECL:
3858     case RESULT_DECL:
3859       if (ctx)
3860 	{
3861 	  tree repl = remap_decl (t, &ctx->cb);
3862 	  gcc_checking_assert (TREE_CODE (repl) != ERROR_MARK);
3863 	  *tp = repl;
3864 	}
3865       break;
3866 
3867     default:
3868       if (ctx && TYPE_P (t))
3869 	*tp = remap_type (t, &ctx->cb);
3870       else if (!DECL_P (t))
3871 	{
3872 	  *walk_subtrees = 1;
3873 	  if (ctx)
3874 	    {
3875 	      tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
3876 	      if (tem != TREE_TYPE (t))
3877 		{
3878 		  if (TREE_CODE (t) == INTEGER_CST)
3879 		    *tp = wide_int_to_tree (tem, wi::to_wide (t));
3880 		  else
3881 		    TREE_TYPE (t) = tem;
3882 		}
3883 	    }
3884 	}
3885       break;
3886     }
3887 
3888   return NULL_TREE;
3889 }
3890 
3891 /* Return true if FNDECL is a setjmp or a longjmp.  */
3892 
3893 static bool
setjmp_or_longjmp_p(const_tree fndecl)3894 setjmp_or_longjmp_p (const_tree fndecl)
3895 {
3896   if (fndecl_built_in_p (fndecl, BUILT_IN_SETJMP)
3897       || fndecl_built_in_p (fndecl, BUILT_IN_LONGJMP))
3898     return true;
3899 
3900   tree declname = DECL_NAME (fndecl);
3901   if (!declname
3902       || (DECL_CONTEXT (fndecl) != NULL_TREE
3903           && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3904       || !TREE_PUBLIC (fndecl))
3905     return false;
3906 
3907   const char *name = IDENTIFIER_POINTER (declname);
3908   return !strcmp (name, "setjmp") || !strcmp (name, "longjmp");
3909 }
3910 
3911 /* Return true if FNDECL is an omp_* runtime API call.  */
3912 
3913 static bool
omp_runtime_api_call(const_tree fndecl)3914 omp_runtime_api_call (const_tree fndecl)
3915 {
3916   tree declname = DECL_NAME (fndecl);
3917   if (!declname
3918       || (DECL_CONTEXT (fndecl) != NULL_TREE
3919           && TREE_CODE (DECL_CONTEXT (fndecl)) != TRANSLATION_UNIT_DECL)
3920       || !TREE_PUBLIC (fndecl))
3921     return false;
3922 
3923   const char *name = IDENTIFIER_POINTER (declname);
3924   if (!startswith (name, "omp_"))
3925     return false;
3926 
3927   static const char *omp_runtime_apis[] =
3928     {
3929       /* This array has 3 sections.  First omp_* calls that don't
3930 	 have any suffixes.  */
3931       "aligned_alloc",
3932       "aligned_calloc",
3933       "alloc",
3934       "calloc",
3935       "free",
3936       "realloc",
3937       "target_alloc",
3938       "target_associate_ptr",
3939       "target_disassociate_ptr",
3940       "target_free",
3941       "target_is_present",
3942       "target_memcpy",
3943       "target_memcpy_rect",
3944       NULL,
3945       /* Now omp_* calls that are available as omp_* and omp_*_; however, the
3946 	 DECL_NAME is always omp_* without tailing underscore.  */
3947       "capture_affinity",
3948       "destroy_allocator",
3949       "destroy_lock",
3950       "destroy_nest_lock",
3951       "display_affinity",
3952       "fulfill_event",
3953       "get_active_level",
3954       "get_affinity_format",
3955       "get_cancellation",
3956       "get_default_allocator",
3957       "get_default_device",
3958       "get_device_num",
3959       "get_dynamic",
3960       "get_initial_device",
3961       "get_level",
3962       "get_max_active_levels",
3963       "get_max_task_priority",
3964       "get_max_teams",
3965       "get_max_threads",
3966       "get_nested",
3967       "get_num_devices",
3968       "get_num_places",
3969       "get_num_procs",
3970       "get_num_teams",
3971       "get_num_threads",
3972       "get_partition_num_places",
3973       "get_place_num",
3974       "get_proc_bind",
3975       "get_supported_active_levels",
3976       "get_team_num",
3977       "get_teams_thread_limit",
3978       "get_thread_limit",
3979       "get_thread_num",
3980       "get_wtick",
3981       "get_wtime",
3982       "in_final",
3983       "in_parallel",
3984       "init_lock",
3985       "init_nest_lock",
3986       "is_initial_device",
3987       "pause_resource",
3988       "pause_resource_all",
3989       "set_affinity_format",
3990       "set_default_allocator",
3991       "set_lock",
3992       "set_nest_lock",
3993       "test_lock",
3994       "test_nest_lock",
3995       "unset_lock",
3996       "unset_nest_lock",
3997       NULL,
3998       /* And finally calls available as omp_*, omp_*_ and omp_*_8_; however,
3999 	 as DECL_NAME only omp_* and omp_*_8 appear.  */
4000       "display_env",
4001       "get_ancestor_thread_num",
4002       "init_allocator",
4003       "get_partition_place_nums",
4004       "get_place_num_procs",
4005       "get_place_proc_ids",
4006       "get_schedule",
4007       "get_team_size",
4008       "set_default_device",
4009       "set_dynamic",
4010       "set_max_active_levels",
4011       "set_nested",
4012       "set_num_teams",
4013       "set_num_threads",
4014       "set_schedule",
4015       "set_teams_thread_limit"
4016     };
4017 
4018   int mode = 0;
4019   for (unsigned i = 0; i < ARRAY_SIZE (omp_runtime_apis); i++)
4020     {
4021       if (omp_runtime_apis[i] == NULL)
4022 	{
4023 	  mode++;
4024 	  continue;
4025 	}
4026       size_t len = strlen (omp_runtime_apis[i]);
4027       if (strncmp (name + 4, omp_runtime_apis[i], len) == 0
4028 	  && (name[4 + len] == '\0'
4029 	      || (mode > 1 && strcmp (name + 4 + len, "_8") == 0)))
4030 	return true;
4031     }
4032   return false;
4033 }
4034 
4035 /* Helper function for scan_omp.
4036 
4037    Callback for walk_gimple_stmt used to scan for OMP directives in
4038    the current statement in GSI.  */
4039 
4040 static tree
scan_omp_1_stmt(gimple_stmt_iterator * gsi,bool * handled_ops_p,struct walk_stmt_info * wi)4041 scan_omp_1_stmt (gimple_stmt_iterator *gsi, bool *handled_ops_p,
4042 		 struct walk_stmt_info *wi)
4043 {
4044   gimple *stmt = gsi_stmt (*gsi);
4045   omp_context *ctx = (omp_context *) wi->info;
4046 
4047   if (gimple_has_location (stmt))
4048     input_location = gimple_location (stmt);
4049 
4050   /* Check the nesting restrictions.  */
4051   bool remove = false;
4052   if (is_gimple_omp (stmt))
4053     remove = !check_omp_nesting_restrictions (stmt, ctx);
4054   else if (is_gimple_call (stmt))
4055     {
4056       tree fndecl = gimple_call_fndecl (stmt);
4057       if (fndecl)
4058 	{
4059 	  if (ctx
4060 	      && gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4061 	      && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD
4062 	      && setjmp_or_longjmp_p (fndecl)
4063 	      && !ctx->loop_p)
4064 	    {
4065 	      remove = true;
4066 	      error_at (gimple_location (stmt),
4067 			"setjmp/longjmp inside %<simd%> construct");
4068 	    }
4069 	  else if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4070 	    switch (DECL_FUNCTION_CODE (fndecl))
4071 	      {
4072 	      case BUILT_IN_GOMP_BARRIER:
4073 	      case BUILT_IN_GOMP_CANCEL:
4074 	      case BUILT_IN_GOMP_CANCELLATION_POINT:
4075 	      case BUILT_IN_GOMP_TASKYIELD:
4076 	      case BUILT_IN_GOMP_TASKWAIT:
4077 	      case BUILT_IN_GOMP_TASKGROUP_START:
4078 	      case BUILT_IN_GOMP_TASKGROUP_END:
4079 		remove = !check_omp_nesting_restrictions (stmt, ctx);
4080 		break;
4081 	      default:
4082 		break;
4083 	      }
4084 	  else if (ctx)
4085 	    {
4086 	      omp_context *octx = ctx;
4087 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_SCAN && ctx->outer)
4088 		octx = ctx->outer;
4089 	      if (octx->order_concurrent && omp_runtime_api_call (fndecl))
4090 		{
4091 		  remove = true;
4092 		  error_at (gimple_location (stmt),
4093 			    "OpenMP runtime API call %qD in a region with "
4094 			    "%<order(concurrent)%> clause", fndecl);
4095 		}
4096 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4097 		  && omp_runtime_api_call (fndecl)
4098 		  && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4099 		       != strlen ("omp_get_num_teams"))
4100 		      || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4101 				 "omp_get_num_teams") != 0)
4102 		  && ((IDENTIFIER_LENGTH (DECL_NAME (fndecl))
4103 		       != strlen ("omp_get_team_num"))
4104 		      || strcmp (IDENTIFIER_POINTER (DECL_NAME (fndecl)),
4105 				 "omp_get_team_num") != 0))
4106 		{
4107 		  remove = true;
4108 		  error_at (gimple_location (stmt),
4109 			    "OpenMP runtime API call %qD strictly nested in a "
4110 			    "%<teams%> region", fndecl);
4111 		}
4112 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TARGET
4113 		  && (gimple_omp_target_kind (ctx->stmt)
4114 		      == GF_OMP_TARGET_KIND_REGION)
4115 		  && omp_runtime_api_call (fndecl))
4116 		{
4117 		  tree tgt_clauses = gimple_omp_target_clauses (ctx->stmt);
4118 		  tree c = omp_find_clause (tgt_clauses, OMP_CLAUSE_DEVICE);
4119 		  if (c && OMP_CLAUSE_DEVICE_ANCESTOR (c))
4120 		    error_at (gimple_location (stmt),
4121 			      "OpenMP runtime API call %qD in a region with "
4122 			      "%<device(ancestor)%> clause", fndecl);
4123 		}
4124 	    }
4125 	}
4126     }
4127   if (remove)
4128     {
4129       stmt = gimple_build_nop ();
4130       gsi_replace (gsi, stmt, false);
4131     }
4132 
4133   *handled_ops_p = true;
4134 
4135   switch (gimple_code (stmt))
4136     {
4137     case GIMPLE_OMP_PARALLEL:
4138       taskreg_nesting_level++;
4139       scan_omp_parallel (gsi, ctx);
4140       taskreg_nesting_level--;
4141       break;
4142 
4143     case GIMPLE_OMP_TASK:
4144       taskreg_nesting_level++;
4145       scan_omp_task (gsi, ctx);
4146       taskreg_nesting_level--;
4147       break;
4148 
4149     case GIMPLE_OMP_FOR:
4150       if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4151 	   == GF_OMP_FOR_KIND_SIMD)
4152 	  && gimple_omp_for_combined_into_p (stmt)
4153 	  && gimple_code (ctx->stmt) != GIMPLE_OMP_SCAN)
4154 	{
4155 	  tree clauses = gimple_omp_for_clauses (as_a <gomp_for *> (stmt));
4156 	  tree c = omp_find_clause (clauses, OMP_CLAUSE_REDUCTION);
4157 	  if (c && OMP_CLAUSE_REDUCTION_INSCAN (c) && !seen_error ())
4158 	    {
4159 	      scan_omp_simd_scan (gsi, as_a <gomp_for *> (stmt), ctx);
4160 	      break;
4161 	    }
4162 	}
4163       if ((gimple_omp_for_kind (as_a <gomp_for *> (stmt))
4164 	   == GF_OMP_FOR_KIND_SIMD)
4165 	  && omp_maybe_offloaded_ctx (ctx)
4166 	  && omp_max_simt_vf ()
4167 	  && gimple_omp_for_collapse (stmt) == 1)
4168 	scan_omp_simd (gsi, as_a <gomp_for *> (stmt), ctx);
4169       else
4170 	scan_omp_for (as_a <gomp_for *> (stmt), ctx);
4171       break;
4172 
4173     case GIMPLE_OMP_SCOPE:
4174       ctx = new_omp_context (stmt, ctx);
4175       scan_sharing_clauses (gimple_omp_scope_clauses (stmt), ctx);
4176       scan_omp (gimple_omp_body_ptr (stmt), ctx);
4177       break;
4178 
4179     case GIMPLE_OMP_SECTIONS:
4180       scan_omp_sections (as_a <gomp_sections *> (stmt), ctx);
4181       break;
4182 
4183     case GIMPLE_OMP_SINGLE:
4184       scan_omp_single (as_a <gomp_single *> (stmt), ctx);
4185       break;
4186 
4187     case GIMPLE_OMP_SCAN:
4188       if (tree clauses = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)))
4189 	{
4190 	  if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_INCLUSIVE)
4191 	    ctx->scan_inclusive = true;
4192 	  else if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_EXCLUSIVE)
4193 	    ctx->scan_exclusive = true;
4194 	}
4195       /* FALLTHRU */
4196     case GIMPLE_OMP_SECTION:
4197     case GIMPLE_OMP_MASTER:
4198     case GIMPLE_OMP_ORDERED:
4199     case GIMPLE_OMP_CRITICAL:
4200       ctx = new_omp_context (stmt, ctx);
4201       scan_omp (gimple_omp_body_ptr (stmt), ctx);
4202       break;
4203 
4204     case GIMPLE_OMP_MASKED:
4205       ctx = new_omp_context (stmt, ctx);
4206       scan_sharing_clauses (gimple_omp_masked_clauses (stmt), ctx);
4207       scan_omp (gimple_omp_body_ptr (stmt), ctx);
4208       break;
4209 
4210     case GIMPLE_OMP_TASKGROUP:
4211       ctx = new_omp_context (stmt, ctx);
4212       scan_sharing_clauses (gimple_omp_taskgroup_clauses (stmt), ctx);
4213       scan_omp (gimple_omp_body_ptr (stmt), ctx);
4214       break;
4215 
4216     case GIMPLE_OMP_TARGET:
4217       if (is_gimple_omp_offloaded (stmt))
4218 	{
4219 	  taskreg_nesting_level++;
4220 	  scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4221 	  taskreg_nesting_level--;
4222 	}
4223       else
4224 	scan_omp_target (as_a <gomp_target *> (stmt), ctx);
4225       break;
4226 
4227     case GIMPLE_OMP_TEAMS:
4228       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
4229 	{
4230 	  taskreg_nesting_level++;
4231 	  scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4232 	  taskreg_nesting_level--;
4233 	}
4234       else
4235 	scan_omp_teams (as_a <gomp_teams *> (stmt), ctx);
4236       break;
4237 
4238     case GIMPLE_BIND:
4239       {
4240 	tree var;
4241 
4242 	*handled_ops_p = false;
4243 	if (ctx)
4244 	  for (var = gimple_bind_vars (as_a <gbind *> (stmt));
4245 	       var ;
4246 	       var = DECL_CHAIN (var))
4247 	    insert_decl_map (&ctx->cb, var, var);
4248       }
4249       break;
4250     default:
4251       *handled_ops_p = false;
4252       break;
4253     }
4254 
4255   return NULL_TREE;
4256 }
4257 
4258 
4259 /* Scan all the statements starting at the current statement.  CTX
4260    contains context information about the OMP directives and
4261    clauses found during the scan.  */
4262 
4263 static void
scan_omp(gimple_seq * body_p,omp_context * ctx)4264 scan_omp (gimple_seq *body_p, omp_context *ctx)
4265 {
4266   location_t saved_location;
4267   struct walk_stmt_info wi;
4268 
4269   memset (&wi, 0, sizeof (wi));
4270   wi.info = ctx;
4271   wi.want_locations = true;
4272 
4273   saved_location = input_location;
4274   walk_gimple_seq_mod (body_p, scan_omp_1_stmt, scan_omp_1_op, &wi);
4275   input_location = saved_location;
4276 }
4277 
4278 /* Re-gimplification and code generation routines.  */
4279 
4280 /* Remove omp_member_access_dummy_var variables from gimple_bind_vars
4281    of BIND if in a method.  */
4282 
4283 static void
maybe_remove_omp_member_access_dummy_vars(gbind * bind)4284 maybe_remove_omp_member_access_dummy_vars (gbind *bind)
4285 {
4286   if (DECL_ARGUMENTS (current_function_decl)
4287       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
4288       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
4289 	  == POINTER_TYPE))
4290     {
4291       tree vars = gimple_bind_vars (bind);
4292       for (tree *pvar = &vars; *pvar; )
4293 	if (omp_member_access_dummy_var (*pvar))
4294 	  *pvar = DECL_CHAIN (*pvar);
4295 	else
4296 	  pvar = &DECL_CHAIN (*pvar);
4297       gimple_bind_set_vars (bind, vars);
4298     }
4299 }
4300 
4301 /* Remove omp_member_access_dummy_var variables from BLOCK_VARS of
4302    block and its subblocks.  */
4303 
4304 static void
remove_member_access_dummy_vars(tree block)4305 remove_member_access_dummy_vars (tree block)
4306 {
4307   for (tree *pvar = &BLOCK_VARS (block); *pvar; )
4308     if (omp_member_access_dummy_var (*pvar))
4309       *pvar = DECL_CHAIN (*pvar);
4310     else
4311       pvar = &DECL_CHAIN (*pvar);
4312 
4313   for (block = BLOCK_SUBBLOCKS (block); block; block = BLOCK_CHAIN (block))
4314     remove_member_access_dummy_vars (block);
4315 }
4316 
4317 /* If a context was created for STMT when it was scanned, return it.  */
4318 
4319 static omp_context *
maybe_lookup_ctx(gimple * stmt)4320 maybe_lookup_ctx (gimple *stmt)
4321 {
4322   splay_tree_node n;
4323   n = splay_tree_lookup (all_contexts, (splay_tree_key) stmt);
4324   return n ? (omp_context *) n->value : NULL;
4325 }
4326 
4327 
4328 /* Find the mapping for DECL in CTX or the immediately enclosing
4329    context that has a mapping for DECL.
4330 
4331    If CTX is a nested parallel directive, we may have to use the decl
4332    mappings created in CTX's parent context.  Suppose that we have the
4333    following parallel nesting (variable UIDs showed for clarity):
4334 
4335 	iD.1562 = 0;
4336      	#omp parallel shared(iD.1562)		-> outer parallel
4337 	  iD.1562 = iD.1562 + 1;
4338 
4339 	  #omp parallel shared (iD.1562)	-> inner parallel
4340 	     iD.1562 = iD.1562 - 1;
4341 
4342    Each parallel structure will create a distinct .omp_data_s structure
4343    for copying iD.1562 in/out of the directive:
4344 
4345   	outer parallel		.omp_data_s.1.i -> iD.1562
4346 	inner parallel		.omp_data_s.2.i -> iD.1562
4347 
4348    A shared variable mapping will produce a copy-out operation before
4349    the parallel directive and a copy-in operation after it.  So, in
4350    this case we would have:
4351 
4352   	iD.1562 = 0;
4353 	.omp_data_o.1.i = iD.1562;
4354 	#omp parallel shared(iD.1562)		-> outer parallel
4355 	  .omp_data_i.1 = &.omp_data_o.1
4356 	  .omp_data_i.1->i = .omp_data_i.1->i + 1;
4357 
4358 	  .omp_data_o.2.i = iD.1562;		-> **
4359 	  #omp parallel shared(iD.1562)		-> inner parallel
4360 	    .omp_data_i.2 = &.omp_data_o.2
4361 	    .omp_data_i.2->i = .omp_data_i.2->i - 1;
4362 
4363 
4364     ** This is a problem.  The symbol iD.1562 cannot be referenced
4365        inside the body of the outer parallel region.  But since we are
4366        emitting this copy operation while expanding the inner parallel
4367        directive, we need to access the CTX structure of the outer
4368        parallel directive to get the correct mapping:
4369 
4370 	  .omp_data_o.2.i = .omp_data_i.1->i
4371 
4372     Since there may be other workshare or parallel directives enclosing
4373     the parallel directive, it may be necessary to walk up the context
4374     parent chain.  This is not a problem in general because nested
4375     parallelism happens only rarely.  */
4376 
4377 static tree
lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)4378 lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4379 {
4380   tree t;
4381   omp_context *up;
4382 
4383   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4384     t = maybe_lookup_decl (decl, up);
4385 
4386   gcc_assert (!ctx->is_nested || t || is_global_var (decl));
4387 
4388   return t ? t : decl;
4389 }
4390 
4391 
4392 /* Similar to lookup_decl_in_outer_ctx, but return DECL if not found
4393    in outer contexts.  */
4394 
4395 static tree
maybe_lookup_decl_in_outer_ctx(tree decl,omp_context * ctx)4396 maybe_lookup_decl_in_outer_ctx (tree decl, omp_context *ctx)
4397 {
4398   tree t = NULL;
4399   omp_context *up;
4400 
4401   for (up = ctx->outer, t = NULL; up && t == NULL; up = up->outer)
4402     t = maybe_lookup_decl (decl, up);
4403 
4404   return t ? t : decl;
4405 }
4406 
4407 
4408 /* Construct the initialization value for reduction operation OP.  */
4409 
4410 tree
omp_reduction_init_op(location_t loc,enum tree_code op,tree type)4411 omp_reduction_init_op (location_t loc, enum tree_code op, tree type)
4412 {
4413   switch (op)
4414     {
4415     case PLUS_EXPR:
4416     case MINUS_EXPR:
4417     case BIT_IOR_EXPR:
4418     case BIT_XOR_EXPR:
4419     case TRUTH_OR_EXPR:
4420     case TRUTH_ORIF_EXPR:
4421     case TRUTH_XOR_EXPR:
4422     case NE_EXPR:
4423       return build_zero_cst (type);
4424 
4425     case MULT_EXPR:
4426     case TRUTH_AND_EXPR:
4427     case TRUTH_ANDIF_EXPR:
4428     case EQ_EXPR:
4429       return fold_convert_loc (loc, type, integer_one_node);
4430 
4431     case BIT_AND_EXPR:
4432       return fold_convert_loc (loc, type, integer_minus_one_node);
4433 
4434     case MAX_EXPR:
4435       if (SCALAR_FLOAT_TYPE_P (type))
4436 	{
4437 	  REAL_VALUE_TYPE max, min;
4438 	  if (HONOR_INFINITIES (type))
4439 	    {
4440 	      real_inf (&max);
4441 	      real_arithmetic (&min, NEGATE_EXPR, &max, NULL);
4442 	    }
4443 	  else
4444 	    real_maxval (&min, 1, TYPE_MODE (type));
4445 	  return build_real (type, min);
4446 	}
4447       else if (POINTER_TYPE_P (type))
4448 	{
4449 	  wide_int min
4450 	    = wi::min_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4451 	  return wide_int_to_tree (type, min);
4452 	}
4453       else
4454 	{
4455 	  gcc_assert (INTEGRAL_TYPE_P (type));
4456 	  return TYPE_MIN_VALUE (type);
4457 	}
4458 
4459     case MIN_EXPR:
4460       if (SCALAR_FLOAT_TYPE_P (type))
4461 	{
4462 	  REAL_VALUE_TYPE max;
4463 	  if (HONOR_INFINITIES (type))
4464 	    real_inf (&max);
4465 	  else
4466 	    real_maxval (&max, 0, TYPE_MODE (type));
4467 	  return build_real (type, max);
4468 	}
4469       else if (POINTER_TYPE_P (type))
4470 	{
4471 	  wide_int max
4472 	    = wi::max_value (TYPE_PRECISION (type), TYPE_SIGN (type));
4473 	  return wide_int_to_tree (type, max);
4474 	}
4475       else
4476 	{
4477 	  gcc_assert (INTEGRAL_TYPE_P (type));
4478 	  return TYPE_MAX_VALUE (type);
4479 	}
4480 
4481     default:
4482       gcc_unreachable ();
4483     }
4484 }
4485 
4486 /* Construct the initialization value for reduction CLAUSE.  */
4487 
4488 tree
omp_reduction_init(tree clause,tree type)4489 omp_reduction_init (tree clause, tree type)
4490 {
4491   return omp_reduction_init_op (OMP_CLAUSE_LOCATION (clause),
4492 				OMP_CLAUSE_REDUCTION_CODE (clause), type);
4493 }
4494 
4495 /* Return alignment to be assumed for var in CLAUSE, which should be
4496    OMP_CLAUSE_ALIGNED.  */
4497 
4498 static tree
omp_clause_aligned_alignment(tree clause)4499 omp_clause_aligned_alignment (tree clause)
4500 {
4501   if (OMP_CLAUSE_ALIGNED_ALIGNMENT (clause))
4502     return OMP_CLAUSE_ALIGNED_ALIGNMENT (clause);
4503 
4504   /* Otherwise return implementation defined alignment.  */
4505   unsigned int al = 1;
4506   opt_scalar_mode mode_iter;
4507   auto_vector_modes modes;
4508   targetm.vectorize.autovectorize_vector_modes (&modes, true);
4509   static enum mode_class classes[]
4510     = { MODE_INT, MODE_VECTOR_INT, MODE_FLOAT, MODE_VECTOR_FLOAT };
4511   for (int i = 0; i < 4; i += 2)
4512     /* The for loop above dictates that we only walk through scalar classes.  */
4513     FOR_EACH_MODE_IN_CLASS (mode_iter, classes[i])
4514       {
4515 	scalar_mode mode = mode_iter.require ();
4516 	machine_mode vmode = targetm.vectorize.preferred_simd_mode (mode);
4517 	if (GET_MODE_CLASS (vmode) != classes[i + 1])
4518 	  continue;
4519 	machine_mode alt_vmode;
4520 	for (unsigned int j = 0; j < modes.length (); ++j)
4521 	  if (related_vector_mode (modes[j], mode).exists (&alt_vmode)
4522 	      && known_ge (GET_MODE_SIZE (alt_vmode), GET_MODE_SIZE (vmode)))
4523 	    vmode = alt_vmode;
4524 
4525 	tree type = lang_hooks.types.type_for_mode (mode, 1);
4526 	if (type == NULL_TREE || TYPE_MODE (type) != mode)
4527 	  continue;
4528 	type = build_vector_type_for_mode (type, vmode);
4529 	if (TYPE_MODE (type) != vmode)
4530 	  continue;
4531 	if (TYPE_ALIGN_UNIT (type) > al)
4532 	  al = TYPE_ALIGN_UNIT (type);
4533       }
4534   return build_int_cst (integer_type_node, al);
4535 }
4536 
4537 
4538 /* This structure is part of the interface between lower_rec_simd_input_clauses
4539    and lower_rec_input_clauses.  */
4540 
4541 class omplow_simd_context {
4542 public:
omplow_simd_context()4543   omplow_simd_context () { memset (this, 0, sizeof (*this)); }
4544   tree idx;
4545   tree lane;
4546   tree lastlane;
4547   vec<tree, va_heap> simt_eargs;
4548   gimple_seq simt_dlist;
4549   poly_uint64_pod max_vf;
4550   bool is_simt;
4551 };
4552 
4553 /* Helper function of lower_rec_input_clauses, used for #pragma omp simd
4554    privatization.  */
4555 
4556 static bool
4557 lower_rec_simd_input_clauses (tree new_var, omp_context *ctx,
4558 			      omplow_simd_context *sctx, tree &ivar,
4559 			      tree &lvar, tree *rvar = NULL,
4560 			      tree *rvar2 = NULL)
4561 {
4562   if (known_eq (sctx->max_vf, 0U))
4563     {
4564       sctx->max_vf = sctx->is_simt ? omp_max_simt_vf () : omp_max_vf ();
4565       if (maybe_gt (sctx->max_vf, 1U))
4566 	{
4567 	  tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
4568 				    OMP_CLAUSE_SAFELEN);
4569 	  if (c)
4570 	    {
4571 	      poly_uint64 safe_len;
4572 	      if (!poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
4573 		  || maybe_lt (safe_len, 1U))
4574 		sctx->max_vf = 1;
4575 	      else
4576 		sctx->max_vf = lower_bound (sctx->max_vf, safe_len);
4577 	    }
4578 	}
4579       if (sctx->is_simt && !known_eq (sctx->max_vf, 1U))
4580 	{
4581 	  for (tree c = gimple_omp_for_clauses (ctx->stmt); c;
4582 	       c = OMP_CLAUSE_CHAIN (c))
4583 	    {
4584 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
4585 		continue;
4586 
4587 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4588 		{
4589 		  /* UDR reductions are not supported yet for SIMT, disable
4590 		     SIMT.  */
4591 		  sctx->max_vf = 1;
4592 		  break;
4593 		}
4594 
4595 	      if (truth_value_p (OMP_CLAUSE_REDUCTION_CODE (c))
4596 		  && !INTEGRAL_TYPE_P (TREE_TYPE (new_var)))
4597 		{
4598 		  /* Doing boolean operations on non-integral types is
4599 		     for conformance only, it's not worth supporting this
4600 		     for SIMT.  */
4601 		  sctx->max_vf = 1;
4602 		  break;
4603 	      }
4604 	    }
4605 	}
4606       if (maybe_gt (sctx->max_vf, 1U))
4607 	{
4608 	  sctx->idx = create_tmp_var (unsigned_type_node);
4609 	  sctx->lane = create_tmp_var (unsigned_type_node);
4610 	}
4611     }
4612   if (known_eq (sctx->max_vf, 1U))
4613     return false;
4614 
4615   if (sctx->is_simt)
4616     {
4617       if (is_gimple_reg (new_var))
4618 	{
4619 	  ivar = lvar = new_var;
4620 	  return true;
4621 	}
4622       tree type = TREE_TYPE (new_var), ptype = build_pointer_type (type);
4623       ivar = lvar = create_tmp_var (type);
4624       TREE_ADDRESSABLE (ivar) = 1;
4625       DECL_ATTRIBUTES (ivar) = tree_cons (get_identifier ("omp simt private"),
4626 					  NULL, DECL_ATTRIBUTES (ivar));
4627       sctx->simt_eargs.safe_push (build1 (ADDR_EXPR, ptype, ivar));
4628       tree clobber = build_clobber (type);
4629       gimple *g = gimple_build_assign (ivar, clobber);
4630       gimple_seq_add_stmt (&sctx->simt_dlist, g);
4631     }
4632   else
4633     {
4634       tree atype = build_array_type_nelts (TREE_TYPE (new_var), sctx->max_vf);
4635       tree avar = create_tmp_var_raw (atype);
4636       if (TREE_ADDRESSABLE (new_var))
4637 	TREE_ADDRESSABLE (avar) = 1;
4638       DECL_ATTRIBUTES (avar)
4639 	= tree_cons (get_identifier ("omp simd array"), NULL,
4640 		     DECL_ATTRIBUTES (avar));
4641       gimple_add_tmp_var (avar);
4642       tree iavar = avar;
4643       if (rvar && !ctx->for_simd_scan_phase)
4644 	{
4645 	  /* For inscan reductions, create another array temporary,
4646 	     which will hold the reduced value.  */
4647 	  iavar = create_tmp_var_raw (atype);
4648 	  if (TREE_ADDRESSABLE (new_var))
4649 	    TREE_ADDRESSABLE (iavar) = 1;
4650 	  DECL_ATTRIBUTES (iavar)
4651 	    = tree_cons (get_identifier ("omp simd array"), NULL,
4652 			 tree_cons (get_identifier ("omp simd inscan"), NULL,
4653 				    DECL_ATTRIBUTES (iavar)));
4654 	  gimple_add_tmp_var (iavar);
4655 	  ctx->cb.decl_map->put (avar, iavar);
4656 	  if (sctx->lastlane == NULL_TREE)
4657 	    sctx->lastlane = create_tmp_var (unsigned_type_node);
4658 	  *rvar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar,
4659 			  sctx->lastlane, NULL_TREE, NULL_TREE);
4660 	  TREE_THIS_NOTRAP (*rvar) = 1;
4661 
4662 	  if (ctx->scan_exclusive)
4663 	    {
4664 	      /* And for exclusive scan yet another one, which will
4665 		 hold the value during the scan phase.  */
4666 	      tree savar = create_tmp_var_raw (atype);
4667 	      if (TREE_ADDRESSABLE (new_var))
4668 		TREE_ADDRESSABLE (savar) = 1;
4669 	      DECL_ATTRIBUTES (savar)
4670 		= tree_cons (get_identifier ("omp simd array"), NULL,
4671 			     tree_cons (get_identifier ("omp simd inscan "
4672 							"exclusive"), NULL,
4673 					DECL_ATTRIBUTES (savar)));
4674 	      gimple_add_tmp_var (savar);
4675 	      ctx->cb.decl_map->put (iavar, savar);
4676 	      *rvar2 = build4 (ARRAY_REF, TREE_TYPE (new_var), savar,
4677 			       sctx->idx, NULL_TREE, NULL_TREE);
4678 	      TREE_THIS_NOTRAP (*rvar2) = 1;
4679 	    }
4680 	}
4681       ivar = build4 (ARRAY_REF, TREE_TYPE (new_var), iavar, sctx->idx,
4682 		     NULL_TREE, NULL_TREE);
4683       lvar = build4 (ARRAY_REF, TREE_TYPE (new_var), avar, sctx->lane,
4684 		     NULL_TREE, NULL_TREE);
4685       TREE_THIS_NOTRAP (ivar) = 1;
4686       TREE_THIS_NOTRAP (lvar) = 1;
4687     }
4688   if (DECL_P (new_var))
4689     {
4690       SET_DECL_VALUE_EXPR (new_var, lvar);
4691       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4692     }
4693   return true;
4694 }
4695 
4696 /* Helper function of lower_rec_input_clauses.  For a reference
4697    in simd reduction, add an underlying variable it will reference.  */
4698 
4699 static void
handle_simd_reference(location_t loc,tree new_vard,gimple_seq * ilist)4700 handle_simd_reference (location_t loc, tree new_vard, gimple_seq *ilist)
4701 {
4702   tree z = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_vard)));
4703   if (TREE_CONSTANT (z))
4704     {
4705       z = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_vard)),
4706 			      get_name (new_vard));
4707       gimple_add_tmp_var (z);
4708       TREE_ADDRESSABLE (z) = 1;
4709       z = build_fold_addr_expr_loc (loc, z);
4710       gimplify_assign (new_vard, z, ilist);
4711     }
4712 }
4713 
4714 /* Helper function for lower_rec_input_clauses.  Emit into ilist sequence
4715    code to emit (type) (tskred_temp[idx]).  */
4716 
4717 static tree
task_reduction_read(gimple_seq * ilist,tree tskred_temp,tree type,unsigned idx)4718 task_reduction_read (gimple_seq *ilist, tree tskred_temp, tree type,
4719 		     unsigned idx)
4720 {
4721   unsigned HOST_WIDE_INT sz
4722     = tree_to_uhwi (TYPE_SIZE_UNIT (pointer_sized_int_node));
4723   tree r = build2 (MEM_REF, pointer_sized_int_node,
4724 		   tskred_temp, build_int_cst (TREE_TYPE (tskred_temp),
4725 					       idx * sz));
4726   tree v = create_tmp_var (pointer_sized_int_node);
4727   gimple *g = gimple_build_assign (v, r);
4728   gimple_seq_add_stmt (ilist, g);
4729   if (!useless_type_conversion_p (type, pointer_sized_int_node))
4730     {
4731       v = create_tmp_var (type);
4732       g = gimple_build_assign (v, NOP_EXPR, gimple_assign_lhs (g));
4733       gimple_seq_add_stmt (ilist, g);
4734     }
4735   return v;
4736 }
4737 
4738 /* Lower early initialization of privatized variable NEW_VAR
4739    if it needs an allocator (has allocate clause).  */
4740 
4741 static bool
lower_private_allocate(tree var,tree new_var,tree & allocator,tree & allocate_ptr,gimple_seq * ilist,omp_context * ctx,bool is_ref,tree size)4742 lower_private_allocate (tree var, tree new_var, tree &allocator,
4743 			tree &allocate_ptr, gimple_seq *ilist,
4744 			omp_context *ctx, bool is_ref, tree size)
4745 {
4746   if (allocator)
4747     return false;
4748   gcc_assert (allocate_ptr == NULL_TREE);
4749   if (ctx->allocate_map
4750       && (DECL_P (new_var) || (TYPE_P (new_var) && size)))
4751     if (tree *allocatorp = ctx->allocate_map->get (var))
4752       allocator = *allocatorp;
4753   if (allocator == NULL_TREE)
4754     return false;
4755   if (!is_ref && omp_privatize_by_reference (var))
4756     {
4757       allocator = NULL_TREE;
4758       return false;
4759     }
4760 
4761   unsigned HOST_WIDE_INT ialign = 0;
4762   if (TREE_CODE (allocator) == TREE_LIST)
4763     {
4764       ialign = tree_to_uhwi (TREE_VALUE (allocator));
4765       allocator = TREE_PURPOSE (allocator);
4766     }
4767   if (TREE_CODE (allocator) != INTEGER_CST)
4768     allocator = build_outer_var_ref (allocator, ctx);
4769   allocator = fold_convert (pointer_sized_int_node, allocator);
4770   if (TREE_CODE (allocator) != INTEGER_CST)
4771     {
4772       tree var = create_tmp_var (TREE_TYPE (allocator));
4773       gimplify_assign (var, allocator, ilist);
4774       allocator = var;
4775     }
4776 
4777   tree ptr_type, align, sz = size;
4778   if (TYPE_P (new_var))
4779     {
4780       ptr_type = build_pointer_type (new_var);
4781       ialign = MAX (ialign, TYPE_ALIGN_UNIT (new_var));
4782     }
4783   else if (is_ref)
4784     {
4785       ptr_type = build_pointer_type (TREE_TYPE (TREE_TYPE (new_var)));
4786       ialign = MAX (ialign, TYPE_ALIGN_UNIT (TREE_TYPE (ptr_type)));
4787     }
4788   else
4789     {
4790       ptr_type = build_pointer_type (TREE_TYPE (new_var));
4791       ialign = MAX (ialign, DECL_ALIGN_UNIT (new_var));
4792       if (sz == NULL_TREE)
4793 	sz = fold_convert (size_type_node, DECL_SIZE_UNIT (new_var));
4794     }
4795   align = build_int_cst (size_type_node, ialign);
4796   if (TREE_CODE (sz) != INTEGER_CST)
4797     {
4798       tree szvar = create_tmp_var (size_type_node);
4799       gimplify_assign (szvar, sz, ilist);
4800       sz = szvar;
4801     }
4802   allocate_ptr = create_tmp_var (ptr_type);
4803   tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
4804   gimple *g = gimple_build_call (a, 3, align, sz, allocator);
4805   gimple_call_set_lhs (g, allocate_ptr);
4806   gimple_seq_add_stmt (ilist, g);
4807   if (!is_ref)
4808     {
4809       tree x = build_simple_mem_ref (allocate_ptr);
4810       TREE_THIS_NOTRAP (x) = 1;
4811       SET_DECL_VALUE_EXPR (new_var, x);
4812       DECL_HAS_VALUE_EXPR_P (new_var) = 1;
4813     }
4814   return true;
4815 }
4816 
4817 /* Generate code to implement the input clauses, FIRSTPRIVATE and COPYIN,
4818    from the receiver (aka child) side and initializers for REFERENCE_TYPE
4819    private variables.  Initialization statements go in ILIST, while calls
4820    to destructors go in DLIST.  */
4821 
4822 static void
lower_rec_input_clauses(tree clauses,gimple_seq * ilist,gimple_seq * dlist,omp_context * ctx,struct omp_for_data * fd)4823 lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
4824 			 omp_context *ctx, struct omp_for_data *fd)
4825 {
4826   tree c, copyin_seq, x, ptr;
4827   bool copyin_by_ref = false;
4828   bool lastprivate_firstprivate = false;
4829   bool reduction_omp_orig_ref = false;
4830   int pass;
4831   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
4832 		  && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
4833   omplow_simd_context sctx = omplow_simd_context ();
4834   tree simt_lane = NULL_TREE, simtrec = NULL_TREE;
4835   tree ivar = NULL_TREE, lvar = NULL_TREE, uid = NULL_TREE;
4836   gimple_seq llist[4] = { };
4837   tree nonconst_simd_if = NULL_TREE;
4838 
4839   copyin_seq = NULL;
4840   sctx.is_simt = is_simd && omp_find_clause (clauses, OMP_CLAUSE__SIMT_);
4841 
4842   /* Set max_vf=1 (which will later enforce safelen=1) in simd loops
4843      with data sharing clauses referencing variable sized vars.  That
4844      is unnecessarily hard to support and very unlikely to result in
4845      vectorized code anyway.  */
4846   if (is_simd)
4847     for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4848       switch (OMP_CLAUSE_CODE (c))
4849 	{
4850 	case OMP_CLAUSE_LINEAR:
4851 	  if (OMP_CLAUSE_LINEAR_ARRAY (c))
4852 	    sctx.max_vf = 1;
4853 	  /* FALLTHRU */
4854 	case OMP_CLAUSE_PRIVATE:
4855 	case OMP_CLAUSE_FIRSTPRIVATE:
4856 	case OMP_CLAUSE_LASTPRIVATE:
4857 	  if (is_variable_sized (OMP_CLAUSE_DECL (c)))
4858 	    sctx.max_vf = 1;
4859 	  else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4860 	    {
4861 	      tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4862 	      if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4863 		sctx.max_vf = 1;
4864 	    }
4865 	  break;
4866 	case OMP_CLAUSE_REDUCTION:
4867 	case OMP_CLAUSE_IN_REDUCTION:
4868 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
4869 	      || is_variable_sized (OMP_CLAUSE_DECL (c)))
4870 	    sctx.max_vf = 1;
4871 	  else if (omp_privatize_by_reference (OMP_CLAUSE_DECL (c)))
4872 	    {
4873 	      tree rtype = TREE_TYPE (TREE_TYPE (OMP_CLAUSE_DECL (c)));
4874 	      if (!TREE_CONSTANT (TYPE_SIZE_UNIT (rtype)))
4875 		sctx.max_vf = 1;
4876 	    }
4877 	  break;
4878 	case OMP_CLAUSE_IF:
4879 	  if (integer_zerop (OMP_CLAUSE_IF_EXPR (c)))
4880 	    sctx.max_vf = 1;
4881 	  else if (TREE_CODE (OMP_CLAUSE_IF_EXPR (c)) != INTEGER_CST)
4882 	    nonconst_simd_if = OMP_CLAUSE_IF_EXPR (c);
4883 	  break;
4884         case OMP_CLAUSE_SIMDLEN:
4885 	  if (integer_onep (OMP_CLAUSE_SIMDLEN_EXPR (c)))
4886 	    sctx.max_vf = 1;
4887 	  break;
4888 	case OMP_CLAUSE__CONDTEMP_:
4889 	  /* FIXME: lastprivate(conditional:) not handled for SIMT yet.  */
4890 	  if (sctx.is_simt)
4891 	    sctx.max_vf = 1;
4892 	  break;
4893 	default:
4894 	  continue;
4895 	}
4896 
4897   /* Add a placeholder for simduid.  */
4898   if (sctx.is_simt && maybe_ne (sctx.max_vf, 1U))
4899     sctx.simt_eargs.safe_push (NULL_TREE);
4900 
4901   unsigned task_reduction_cnt = 0;
4902   unsigned task_reduction_cntorig = 0;
4903   unsigned task_reduction_cnt_full = 0;
4904   unsigned task_reduction_cntorig_full = 0;
4905   unsigned task_reduction_other_cnt = 0;
4906   tree tskred_atype = NULL_TREE, tskred_avar = NULL_TREE;
4907   tree tskred_base = NULL_TREE, tskred_temp = NULL_TREE;
4908   /* Do all the fixed sized types in the first pass, and the variable sized
4909      types in the second pass.  This makes sure that the scalar arguments to
4910      the variable sized types are processed before we use them in the
4911      variable sized operations.  For task reductions we use 4 passes, in the
4912      first two we ignore them, in the third one gather arguments for
4913      GOMP_task_reduction_remap call and in the last pass actually handle
4914      the task reductions.  */
4915   for (pass = 0; pass < ((task_reduction_cnt || task_reduction_other_cnt)
4916 			 ? 4 : 2); ++pass)
4917     {
4918       if (pass == 2 && task_reduction_cnt)
4919 	{
4920 	  tskred_atype
4921 	    = build_array_type_nelts (ptr_type_node, task_reduction_cnt
4922 						     + task_reduction_cntorig);
4923 	  tskred_avar = create_tmp_var_raw (tskred_atype);
4924 	  gimple_add_tmp_var (tskred_avar);
4925 	  TREE_ADDRESSABLE (tskred_avar) = 1;
4926 	  task_reduction_cnt_full = task_reduction_cnt;
4927 	  task_reduction_cntorig_full = task_reduction_cntorig;
4928 	}
4929       else if (pass == 3 && task_reduction_cnt)
4930 	{
4931 	  x = builtin_decl_explicit (BUILT_IN_GOMP_TASK_REDUCTION_REMAP);
4932 	  gimple *g
4933 	    = gimple_build_call (x, 3, size_int (task_reduction_cnt),
4934 				 size_int (task_reduction_cntorig),
4935 				 build_fold_addr_expr (tskred_avar));
4936 	  gimple_seq_add_stmt (ilist, g);
4937 	}
4938       if (pass == 3 && task_reduction_other_cnt)
4939 	{
4940 	  /* For reduction clauses, build
4941 	     tskred_base = (void *) tskred_temp[2]
4942 			   + omp_get_thread_num () * tskred_temp[1]
4943 	     or if tskred_temp[1] is known to be constant, that constant
4944 	     directly.  This is the start of the private reduction copy block
4945 	     for the current thread.  */
4946 	  tree v = create_tmp_var (integer_type_node);
4947 	  x = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
4948 	  gimple *g = gimple_build_call (x, 0);
4949 	  gimple_call_set_lhs (g, v);
4950 	  gimple_seq_add_stmt (ilist, g);
4951 	  c = omp_find_clause (clauses, OMP_CLAUSE__REDUCTEMP_);
4952 	  tskred_temp = OMP_CLAUSE_DECL (c);
4953 	  if (is_taskreg_ctx (ctx))
4954 	    tskred_temp = lookup_decl (tskred_temp, ctx);
4955 	  tree v2 = create_tmp_var (sizetype);
4956 	  g = gimple_build_assign (v2, NOP_EXPR, v);
4957 	  gimple_seq_add_stmt (ilist, g);
4958 	  if (ctx->task_reductions[0])
4959 	    v = fold_convert (sizetype, ctx->task_reductions[0]);
4960 	  else
4961 	    v = task_reduction_read (ilist, tskred_temp, sizetype, 1);
4962 	  tree v3 = create_tmp_var (sizetype);
4963 	  g = gimple_build_assign (v3, MULT_EXPR, v2, v);
4964 	  gimple_seq_add_stmt (ilist, g);
4965 	  v = task_reduction_read (ilist, tskred_temp, ptr_type_node, 2);
4966 	  tskred_base = create_tmp_var (ptr_type_node);
4967 	  g = gimple_build_assign (tskred_base, POINTER_PLUS_EXPR, v, v3);
4968 	  gimple_seq_add_stmt (ilist, g);
4969 	}
4970       task_reduction_cnt = 0;
4971       task_reduction_cntorig = 0;
4972       task_reduction_other_cnt = 0;
4973       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
4974 	{
4975 	  enum omp_clause_code c_kind = OMP_CLAUSE_CODE (c);
4976 	  tree var, new_var;
4977 	  bool by_ref;
4978 	  location_t clause_loc = OMP_CLAUSE_LOCATION (c);
4979 	  bool task_reduction_p = false;
4980 	  bool task_reduction_needs_orig_p = false;
4981 	  tree cond = NULL_TREE;
4982 	  tree allocator, allocate_ptr;
4983 
4984 	  switch (c_kind)
4985 	    {
4986 	    case OMP_CLAUSE_PRIVATE:
4987 	      if (OMP_CLAUSE_PRIVATE_DEBUG (c))
4988 		continue;
4989 	      break;
4990 	    case OMP_CLAUSE_SHARED:
4991 	      /* Ignore shared directives in teams construct inside
4992 		 of target construct.  */
4993 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
4994 		  && !is_host_teams_ctx (ctx))
4995 		continue;
4996 	      if (maybe_lookup_decl (OMP_CLAUSE_DECL (c), ctx) == NULL)
4997 		{
4998 		  gcc_assert (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c)
4999 			      || is_global_var (OMP_CLAUSE_DECL (c)));
5000 		  continue;
5001 		}
5002 	    case OMP_CLAUSE_FIRSTPRIVATE:
5003 	    case OMP_CLAUSE_COPYIN:
5004 	      break;
5005 	    case OMP_CLAUSE_LINEAR:
5006 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
5007 		  && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
5008 		lastprivate_firstprivate = true;
5009 	      break;
5010 	    case OMP_CLAUSE_REDUCTION:
5011 	    case OMP_CLAUSE_IN_REDUCTION:
5012 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
5013 		  || is_task_ctx (ctx)
5014 		  || OMP_CLAUSE_REDUCTION_TASK (c))
5015 		{
5016 		  task_reduction_p = true;
5017 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
5018 		    {
5019 		      task_reduction_other_cnt++;
5020 		      if (pass == 2)
5021 			continue;
5022 		    }
5023 		  else
5024 		    task_reduction_cnt++;
5025 		  if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5026 		    {
5027 		      var = OMP_CLAUSE_DECL (c);
5028 		      /* If var is a global variable that isn't privatized
5029 			 in outer contexts, we don't need to look up the
5030 			 original address, it is always the address of the
5031 			 global variable itself.  */
5032 		      if (!DECL_P (var)
5033 			  || omp_privatize_by_reference (var)
5034 			  || !is_global_var
5035 				(maybe_lookup_decl_in_outer_ctx (var, ctx)))
5036 			{
5037 			  task_reduction_needs_orig_p = true;
5038 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5039 			    task_reduction_cntorig++;
5040 			}
5041 		    }
5042 		}
5043 	      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5044 		reduction_omp_orig_ref = true;
5045 	      break;
5046 	    case OMP_CLAUSE__REDUCTEMP_:
5047 	      if (!is_taskreg_ctx (ctx))
5048 		continue;
5049 	      /* FALLTHRU */
5050 	    case OMP_CLAUSE__LOOPTEMP_:
5051 	      /* Handle _looptemp_/_reductemp_ clauses only on
5052 		 parallel/task.  */
5053 	      if (fd)
5054 		continue;
5055 	      break;
5056 	    case OMP_CLAUSE_LASTPRIVATE:
5057 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5058 		{
5059 		  lastprivate_firstprivate = true;
5060 		  if (pass != 0 || is_taskloop_ctx (ctx))
5061 		    continue;
5062 		}
5063 	      /* Even without corresponding firstprivate, if
5064 		 decl is Fortran allocatable, it needs outer var
5065 		 reference.  */
5066 	      else if (pass == 0
5067 		       && lang_hooks.decls.omp_private_outer_ref
5068 							(OMP_CLAUSE_DECL (c)))
5069 		lastprivate_firstprivate = true;
5070 	      break;
5071 	    case OMP_CLAUSE_ALIGNED:
5072 	      if (pass != 1)
5073 		continue;
5074 	      var = OMP_CLAUSE_DECL (c);
5075 	      if (TREE_CODE (TREE_TYPE (var)) == POINTER_TYPE
5076 		  && !is_global_var (var))
5077 		{
5078 		  new_var = maybe_lookup_decl (var, ctx);
5079 		  if (new_var == NULL_TREE)
5080 		    new_var = maybe_lookup_decl_in_outer_ctx (var, ctx);
5081 		  x = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5082 		  tree alarg = omp_clause_aligned_alignment (c);
5083 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5084 		  x = build_call_expr_loc (clause_loc, x, 2, new_var, alarg);
5085 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5086 		  x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
5087 		  gimplify_and_add (x, ilist);
5088 		}
5089 	      else if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
5090 		       && is_global_var (var))
5091 		{
5092 		  tree ptype = build_pointer_type (TREE_TYPE (var)), t, t2;
5093 		  new_var = lookup_decl (var, ctx);
5094 		  t = maybe_lookup_decl_in_outer_ctx (var, ctx);
5095 		  t = build_fold_addr_expr_loc (clause_loc, t);
5096 		  t2 = builtin_decl_explicit (BUILT_IN_ASSUME_ALIGNED);
5097 		  tree alarg = omp_clause_aligned_alignment (c);
5098 		  alarg = fold_convert_loc (clause_loc, size_type_node, alarg);
5099 		  t = build_call_expr_loc (clause_loc, t2, 2, t, alarg);
5100 		  t = fold_convert_loc (clause_loc, ptype, t);
5101 		  x = create_tmp_var (ptype);
5102 		  t = build2 (MODIFY_EXPR, ptype, x, t);
5103 		  gimplify_and_add (t, ilist);
5104 		  t = build_simple_mem_ref_loc (clause_loc, x);
5105 		  SET_DECL_VALUE_EXPR (new_var, t);
5106 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5107 		}
5108 	      continue;
5109 	    case OMP_CLAUSE__CONDTEMP_:
5110 	      if (is_parallel_ctx (ctx)
5111 		  || (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c)))
5112 		break;
5113 	      continue;
5114 	    default:
5115 	      continue;
5116 	    }
5117 
5118 	  if (task_reduction_p != (pass >= 2))
5119 	    continue;
5120 
5121 	  allocator = NULL_TREE;
5122 	  allocate_ptr = NULL_TREE;
5123 	  new_var = var = OMP_CLAUSE_DECL (c);
5124 	  if ((c_kind == OMP_CLAUSE_REDUCTION
5125 	       || c_kind == OMP_CLAUSE_IN_REDUCTION)
5126 	      && TREE_CODE (var) == MEM_REF)
5127 	    {
5128 	      var = TREE_OPERAND (var, 0);
5129 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
5130 		var = TREE_OPERAND (var, 0);
5131 	      if (TREE_CODE (var) == INDIRECT_REF
5132 		  || TREE_CODE (var) == ADDR_EXPR)
5133 		var = TREE_OPERAND (var, 0);
5134 	      if (is_variable_sized (var))
5135 		{
5136 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
5137 		  var = DECL_VALUE_EXPR (var);
5138 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
5139 		  var = TREE_OPERAND (var, 0);
5140 		  gcc_assert (DECL_P (var));
5141 		}
5142 	      new_var = var;
5143 	    }
5144 	  if (c_kind == OMP_CLAUSE_IN_REDUCTION && is_omp_target (ctx->stmt))
5145 	    {
5146 	      splay_tree_key key = (splay_tree_key) &DECL_CONTEXT (var);
5147 	      new_var = (tree) splay_tree_lookup (ctx->field_map, key)->value;
5148 	    }
5149 	  else if (c_kind != OMP_CLAUSE_COPYIN)
5150 	    new_var = lookup_decl (var, ctx);
5151 
5152 	  if (c_kind == OMP_CLAUSE_SHARED || c_kind == OMP_CLAUSE_COPYIN)
5153 	    {
5154 	      if (pass != 0)
5155 		continue;
5156 	    }
5157 	  /* C/C++ array section reductions.  */
5158 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
5159 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
5160 		   && var != OMP_CLAUSE_DECL (c))
5161 	    {
5162 	      if (pass == 0)
5163 		continue;
5164 
5165 	      tree bias = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5166 	      tree orig_var = TREE_OPERAND (OMP_CLAUSE_DECL (c), 0);
5167 
5168 	      if (TREE_CODE (orig_var) == POINTER_PLUS_EXPR)
5169 		{
5170 		  tree b = TREE_OPERAND (orig_var, 1);
5171 		  if (is_omp_target (ctx->stmt))
5172 		    b = NULL_TREE;
5173 		  else
5174 		    b = maybe_lookup_decl (b, ctx);
5175 		  if (b == NULL)
5176 		    {
5177 		      b = TREE_OPERAND (orig_var, 1);
5178 		      b = maybe_lookup_decl_in_outer_ctx (b, ctx);
5179 		    }
5180 		  if (integer_zerop (bias))
5181 		    bias = b;
5182 		  else
5183 		    {
5184 		      bias = fold_convert_loc (clause_loc,
5185 					       TREE_TYPE (b), bias);
5186 		      bias = fold_build2_loc (clause_loc, PLUS_EXPR,
5187 					      TREE_TYPE (b), b, bias);
5188 		    }
5189 		  orig_var = TREE_OPERAND (orig_var, 0);
5190 		}
5191 	      if (pass == 2)
5192 		{
5193 		  tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5194 		  if (is_global_var (out)
5195 		      && TREE_CODE (TREE_TYPE (out)) != POINTER_TYPE
5196 		      && (TREE_CODE (TREE_TYPE (out)) != REFERENCE_TYPE
5197 			  || (TREE_CODE (TREE_TYPE (TREE_TYPE (out)))
5198 			      != POINTER_TYPE)))
5199 		    x = var;
5200 		  else if (is_omp_target (ctx->stmt))
5201 		    x = out;
5202 		  else
5203 		    {
5204 		      bool by_ref = use_pointer_for_field (var, NULL);
5205 		      x = build_receiver_ref (var, by_ref, ctx);
5206 		      if (TREE_CODE (TREE_TYPE (var)) == REFERENCE_TYPE
5207 			  && (TREE_CODE (TREE_TYPE (TREE_TYPE (var)))
5208 			      == POINTER_TYPE))
5209 			x = build_fold_addr_expr (x);
5210 		    }
5211 		  if (TREE_CODE (orig_var) == INDIRECT_REF)
5212 		    x = build_simple_mem_ref (x);
5213 		  else if (TREE_CODE (orig_var) == ADDR_EXPR)
5214 		    {
5215 		      if (var == TREE_OPERAND (orig_var, 0))
5216 			x = build_fold_addr_expr (x);
5217 		    }
5218 		  bias = fold_convert (sizetype, bias);
5219 		  x = fold_convert (ptr_type_node, x);
5220 		  x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
5221 				       TREE_TYPE (x), x, bias);
5222 		  unsigned cnt = task_reduction_cnt - 1;
5223 		  if (!task_reduction_needs_orig_p)
5224 		    cnt += (task_reduction_cntorig_full
5225 			    - task_reduction_cntorig);
5226 		  else
5227 		    cnt = task_reduction_cntorig - 1;
5228 		  tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5229 				   size_int (cnt), NULL_TREE, NULL_TREE);
5230 		  gimplify_assign (r, x, ilist);
5231 		  continue;
5232 		}
5233 
5234 	      if (TREE_CODE (orig_var) == INDIRECT_REF
5235 		  || TREE_CODE (orig_var) == ADDR_EXPR)
5236 		orig_var = TREE_OPERAND (orig_var, 0);
5237 	      tree d = OMP_CLAUSE_DECL (c);
5238 	      tree type = TREE_TYPE (d);
5239 	      gcc_assert (TREE_CODE (type) == ARRAY_TYPE);
5240 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
5241 	      tree sz = v;
5242 	      const char *name = get_name (orig_var);
5243 	      if (pass != 3 && !TREE_CONSTANT (v))
5244 		{
5245 		  tree t;
5246 		  if (is_omp_target (ctx->stmt))
5247 		    t = NULL_TREE;
5248 		  else
5249 		    t = maybe_lookup_decl (v, ctx);
5250 		  if (t)
5251 		    v = t;
5252 		  else
5253 		    v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5254 		  gimplify_expr (&v, ilist, NULL, is_gimple_val, fb_rvalue);
5255 		  t = fold_build2_loc (clause_loc, PLUS_EXPR,
5256 				       TREE_TYPE (v), v,
5257 				       build_int_cst (TREE_TYPE (v), 1));
5258 		  sz = fold_build2_loc (clause_loc, MULT_EXPR,
5259 					TREE_TYPE (v), t,
5260 					TYPE_SIZE_UNIT (TREE_TYPE (type)));
5261 		}
5262 	      if (pass == 3)
5263 		{
5264 		  tree xv = create_tmp_var (ptr_type_node);
5265 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5266 		    {
5267 		      unsigned cnt = task_reduction_cnt - 1;
5268 		      if (!task_reduction_needs_orig_p)
5269 			cnt += (task_reduction_cntorig_full
5270 				- task_reduction_cntorig);
5271 		      else
5272 			cnt = task_reduction_cntorig - 1;
5273 		      x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5274 				  size_int (cnt), NULL_TREE, NULL_TREE);
5275 
5276 		      gimple *g = gimple_build_assign (xv, x);
5277 		      gimple_seq_add_stmt (ilist, g);
5278 		    }
5279 		  else
5280 		    {
5281 		      unsigned int idx = *ctx->task_reduction_map->get (c);
5282 		      tree off;
5283 		      if (ctx->task_reductions[1 + idx])
5284 			off = fold_convert (sizetype,
5285 					    ctx->task_reductions[1 + idx]);
5286 		      else
5287 			off = task_reduction_read (ilist, tskred_temp, sizetype,
5288 						   7 + 3 * idx + 1);
5289 		      gimple *g = gimple_build_assign (xv, POINTER_PLUS_EXPR,
5290 						       tskred_base, off);
5291 		      gimple_seq_add_stmt (ilist, g);
5292 		    }
5293 		  x = fold_convert (build_pointer_type (boolean_type_node),
5294 				    xv);
5295 		  if (TREE_CONSTANT (v))
5296 		    x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x,
5297 				     TYPE_SIZE_UNIT (type));
5298 		  else
5299 		    {
5300 		      tree t;
5301 		      if (is_omp_target (ctx->stmt))
5302 			t = NULL_TREE;
5303 		      else
5304 			t = maybe_lookup_decl (v, ctx);
5305 		      if (t)
5306 			v = t;
5307 		      else
5308 			v = maybe_lookup_decl_in_outer_ctx (v, ctx);
5309 		      gimplify_expr (&v, ilist, NULL, is_gimple_val,
5310 				     fb_rvalue);
5311 		      t = fold_build2_loc (clause_loc, PLUS_EXPR,
5312 					   TREE_TYPE (v), v,
5313 					   build_int_cst (TREE_TYPE (v), 1));
5314 		      t = fold_build2_loc (clause_loc, MULT_EXPR,
5315 					   TREE_TYPE (v), t,
5316 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5317 		      x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (x), x, t);
5318 		    }
5319 		  cond = create_tmp_var (TREE_TYPE (x));
5320 		  gimplify_assign (cond, x, ilist);
5321 		  x = xv;
5322 		}
5323 	      else if (lower_private_allocate (var, type, allocator,
5324 					       allocate_ptr, ilist, ctx,
5325 					       true,
5326 					       TREE_CONSTANT (v)
5327 					       ? TYPE_SIZE_UNIT (type)
5328 					       : sz))
5329 		x = allocate_ptr;
5330 	      else if (TREE_CONSTANT (v))
5331 		{
5332 		  x = create_tmp_var_raw (type, name);
5333 		  gimple_add_tmp_var (x);
5334 		  TREE_ADDRESSABLE (x) = 1;
5335 		  x = build_fold_addr_expr_loc (clause_loc, x);
5336 		}
5337 	      else
5338 		{
5339 		  tree atmp
5340 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5341 		  tree al = size_int (TYPE_ALIGN (TREE_TYPE (type)));
5342 		  x = build_call_expr_loc (clause_loc, atmp, 2, sz, al);
5343 		}
5344 
5345 	      tree ptype = build_pointer_type (TREE_TYPE (type));
5346 	      x = fold_convert_loc (clause_loc, ptype, x);
5347 	      tree y = create_tmp_var (ptype, name);
5348 	      gimplify_assign (y, x, ilist);
5349 	      x = y;
5350 	      tree yb = y;
5351 
5352 	      if (!integer_zerop (bias))
5353 		{
5354 		  bias = fold_convert_loc (clause_loc, pointer_sized_int_node,
5355 					   bias);
5356 		  yb = fold_convert_loc (clause_loc, pointer_sized_int_node,
5357 					 x);
5358 		  yb = fold_build2_loc (clause_loc, MINUS_EXPR,
5359 					pointer_sized_int_node, yb, bias);
5360 		  x = fold_convert_loc (clause_loc, TREE_TYPE (x), yb);
5361 		  yb = create_tmp_var (ptype, name);
5362 		  gimplify_assign (yb, x, ilist);
5363 		  x = yb;
5364 		}
5365 
5366 	      d = TREE_OPERAND (d, 0);
5367 	      if (TREE_CODE (d) == POINTER_PLUS_EXPR)
5368 		d = TREE_OPERAND (d, 0);
5369 	      if (TREE_CODE (d) == ADDR_EXPR)
5370 		{
5371 		  if (orig_var != var)
5372 		    {
5373 		      gcc_assert (is_variable_sized (orig_var));
5374 		      x = fold_convert_loc (clause_loc, TREE_TYPE (new_var),
5375 					    x);
5376 		      gimplify_assign (new_var, x, ilist);
5377 		      tree new_orig_var = lookup_decl (orig_var, ctx);
5378 		      tree t = build_fold_indirect_ref (new_var);
5379 		      DECL_IGNORED_P (new_var) = 0;
5380 		      TREE_THIS_NOTRAP (t) = 1;
5381 		      SET_DECL_VALUE_EXPR (new_orig_var, t);
5382 		      DECL_HAS_VALUE_EXPR_P (new_orig_var) = 1;
5383 		    }
5384 		  else
5385 		    {
5386 		      x = build2 (MEM_REF, TREE_TYPE (new_var), x,
5387 				  build_int_cst (ptype, 0));
5388 		      SET_DECL_VALUE_EXPR (new_var, x);
5389 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5390 		    }
5391 		}
5392 	      else
5393 		{
5394 		  gcc_assert (orig_var == var);
5395 		  if (TREE_CODE (d) == INDIRECT_REF)
5396 		    {
5397 		      x = create_tmp_var (ptype, name);
5398 		      TREE_ADDRESSABLE (x) = 1;
5399 		      gimplify_assign (x, yb, ilist);
5400 		      x = build_fold_addr_expr_loc (clause_loc, x);
5401 		    }
5402 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5403 		  gimplify_assign (new_var, x, ilist);
5404 		}
5405 	      /* GOMP_taskgroup_reduction_register memsets the whole
5406 		 array to zero.  If the initializer is zero, we don't
5407 		 need to initialize it again, just mark it as ever
5408 		 used unconditionally, i.e. cond = true.  */
5409 	      if (cond
5410 		  && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE
5411 		  && initializer_zerop (omp_reduction_init (c,
5412 							    TREE_TYPE (type))))
5413 		{
5414 		  gimple *g = gimple_build_assign (build_simple_mem_ref (cond),
5415 						   boolean_true_node);
5416 		  gimple_seq_add_stmt (ilist, g);
5417 		  continue;
5418 		}
5419 	      tree end = create_artificial_label (UNKNOWN_LOCATION);
5420 	      if (cond)
5421 		{
5422 		  gimple *g;
5423 		  if (!is_parallel_ctx (ctx))
5424 		    {
5425 		      tree condv = create_tmp_var (boolean_type_node);
5426 		      g = gimple_build_assign (condv,
5427 					       build_simple_mem_ref (cond));
5428 		      gimple_seq_add_stmt (ilist, g);
5429 		      tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
5430 		      g = gimple_build_cond (NE_EXPR, condv,
5431 					     boolean_false_node, end, lab1);
5432 		      gimple_seq_add_stmt (ilist, g);
5433 		      gimple_seq_add_stmt (ilist, gimple_build_label (lab1));
5434 		    }
5435 		  g = gimple_build_assign (build_simple_mem_ref (cond),
5436 					   boolean_true_node);
5437 		  gimple_seq_add_stmt (ilist, g);
5438 		}
5439 
5440 	      tree y1 = create_tmp_var (ptype);
5441 	      gimplify_assign (y1, y, ilist);
5442 	      tree i2 = NULL_TREE, y2 = NULL_TREE;
5443 	      tree body2 = NULL_TREE, end2 = NULL_TREE;
5444 	      tree y3 = NULL_TREE, y4 = NULL_TREE;
5445 	      if (task_reduction_needs_orig_p)
5446 		{
5447 		  y3 = create_tmp_var (ptype);
5448 		  tree ref;
5449 		  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5450 		    ref = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5451 				  size_int (task_reduction_cnt_full
5452 					    + task_reduction_cntorig - 1),
5453 				  NULL_TREE, NULL_TREE);
5454 		  else
5455 		    {
5456 		      unsigned int idx = *ctx->task_reduction_map->get (c);
5457 		      ref = task_reduction_read (ilist, tskred_temp, ptype,
5458 						 7 + 3 * idx);
5459 		    }
5460 		  gimplify_assign (y3, ref, ilist);
5461 		}
5462 	      else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) || is_simd)
5463 		{
5464 		  if (pass != 3)
5465 		    {
5466 		      y2 = create_tmp_var (ptype);
5467 		      gimplify_assign (y2, y, ilist);
5468 		    }
5469 		  if (is_simd || OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5470 		    {
5471 		      tree ref = build_outer_var_ref (var, ctx);
5472 		      /* For ref build_outer_var_ref already performs this.  */
5473 		      if (TREE_CODE (d) == INDIRECT_REF)
5474 			gcc_assert (omp_privatize_by_reference (var));
5475 		      else if (TREE_CODE (d) == ADDR_EXPR)
5476 			ref = build_fold_addr_expr (ref);
5477 		      else if (omp_privatize_by_reference (var))
5478 			ref = build_fold_addr_expr (ref);
5479 		      ref = fold_convert_loc (clause_loc, ptype, ref);
5480 		      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
5481 			  && OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
5482 			{
5483 			  y3 = create_tmp_var (ptype);
5484 			  gimplify_assign (y3, unshare_expr (ref), ilist);
5485 			}
5486 		      if (is_simd)
5487 			{
5488 			  y4 = create_tmp_var (ptype);
5489 			  gimplify_assign (y4, ref, dlist);
5490 			}
5491 		    }
5492 		}
5493 	      tree i = create_tmp_var (TREE_TYPE (v));
5494 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), ilist);
5495 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
5496 	      gimple_seq_add_stmt (ilist, gimple_build_label (body));
5497 	      if (y2)
5498 		{
5499 		  i2 = create_tmp_var (TREE_TYPE (v));
5500 		  gimplify_assign (i2, build_int_cst (TREE_TYPE (v), 0), dlist);
5501 		  body2 = create_artificial_label (UNKNOWN_LOCATION);
5502 		  end2 = create_artificial_label (UNKNOWN_LOCATION);
5503 		  gimple_seq_add_stmt (dlist, gimple_build_label (body2));
5504 		}
5505 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5506 		{
5507 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
5508 		  tree decl_placeholder
5509 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
5510 		  SET_DECL_VALUE_EXPR (decl_placeholder,
5511 				       build_simple_mem_ref (y1));
5512 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
5513 		  SET_DECL_VALUE_EXPR (placeholder,
5514 				       y3 ? build_simple_mem_ref (y3)
5515 				       : error_mark_node);
5516 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
5517 		  x = lang_hooks.decls.omp_clause_default_ctor
5518 				(c, build_simple_mem_ref (y1),
5519 				 y3 ? build_simple_mem_ref (y3) : NULL_TREE);
5520 		  if (x)
5521 		    gimplify_and_add (x, ilist);
5522 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
5523 		    {
5524 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
5525 		      lower_omp (&tseq, ctx);
5526 		      gimple_seq_add_seq (ilist, tseq);
5527 		    }
5528 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
5529 		  if (is_simd)
5530 		    {
5531 		      SET_DECL_VALUE_EXPR (decl_placeholder,
5532 					   build_simple_mem_ref (y2));
5533 		      SET_DECL_VALUE_EXPR (placeholder,
5534 					   build_simple_mem_ref (y4));
5535 		      gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
5536 		      lower_omp (&tseq, ctx);
5537 		      gimple_seq_add_seq (dlist, tseq);
5538 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
5539 		    }
5540 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
5541 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 0;
5542 		  if (y2)
5543 		    {
5544 		      x = lang_hooks.decls.omp_clause_dtor
5545 						(c, build_simple_mem_ref (y2));
5546 		      if (x)
5547 			gimplify_and_add (x, dlist);
5548 		    }
5549 		}
5550 	      else
5551 		{
5552 		  x = omp_reduction_init (c, TREE_TYPE (type));
5553 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
5554 
5555 		  /* reduction(-:var) sums up the partial results, so it
5556 		     acts identically to reduction(+:var).  */
5557 		  if (code == MINUS_EXPR)
5558 		    code = PLUS_EXPR;
5559 
5560 		  gimplify_assign (build_simple_mem_ref (y1), x, ilist);
5561 		  if (is_simd)
5562 		    {
5563 		      x = build2 (code, TREE_TYPE (type),
5564 				  build_simple_mem_ref (y4),
5565 				  build_simple_mem_ref (y2));
5566 		      gimplify_assign (build_simple_mem_ref (y4), x, dlist);
5567 		    }
5568 		}
5569 	      gimple *g
5570 		= gimple_build_assign (y1, POINTER_PLUS_EXPR, y1,
5571 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
5572 	      gimple_seq_add_stmt (ilist, g);
5573 	      if (y3)
5574 		{
5575 		  g = gimple_build_assign (y3, POINTER_PLUS_EXPR, y3,
5576 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5577 		  gimple_seq_add_stmt (ilist, g);
5578 		}
5579 	      g = gimple_build_assign (i, PLUS_EXPR, i,
5580 				       build_int_cst (TREE_TYPE (i), 1));
5581 	      gimple_seq_add_stmt (ilist, g);
5582 	      g = gimple_build_cond (LE_EXPR, i, v, body, end);
5583 	      gimple_seq_add_stmt (ilist, g);
5584 	      gimple_seq_add_stmt (ilist, gimple_build_label (end));
5585 	      if (y2)
5586 		{
5587 		  g = gimple_build_assign (y2, POINTER_PLUS_EXPR, y2,
5588 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
5589 		  gimple_seq_add_stmt (dlist, g);
5590 		  if (y4)
5591 		    {
5592 		      g = gimple_build_assign
5593 					(y4, POINTER_PLUS_EXPR, y4,
5594 					 TYPE_SIZE_UNIT (TREE_TYPE (type)));
5595 		      gimple_seq_add_stmt (dlist, g);
5596 		    }
5597 		  g = gimple_build_assign (i2, PLUS_EXPR, i2,
5598 					   build_int_cst (TREE_TYPE (i2), 1));
5599 		  gimple_seq_add_stmt (dlist, g);
5600 		  g = gimple_build_cond (LE_EXPR, i2, v, body2, end2);
5601 		  gimple_seq_add_stmt (dlist, g);
5602 		  gimple_seq_add_stmt (dlist, gimple_build_label (end2));
5603 		}
5604 	      if (allocator)
5605 		{
5606 		  tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
5607 		  g = gimple_build_call (f, 2, allocate_ptr, allocator);
5608 		  gimple_seq_add_stmt (dlist, g);
5609 		}
5610 	      continue;
5611 	    }
5612 	  else if (pass == 2)
5613 	    {
5614 	      tree out = maybe_lookup_decl_in_outer_ctx (var, ctx);
5615 	      if (is_global_var (out))
5616 		x = var;
5617 	      else if (is_omp_target (ctx->stmt))
5618 		x = out;
5619 	      else
5620 		{
5621 		  bool by_ref = use_pointer_for_field (var, ctx);
5622 		  x = build_receiver_ref (var, by_ref, ctx);
5623 		}
5624 	      if (!omp_privatize_by_reference (var))
5625 		x = build_fold_addr_expr (x);
5626 	      x = fold_convert (ptr_type_node, x);
5627 	      unsigned cnt = task_reduction_cnt - 1;
5628 	      if (!task_reduction_needs_orig_p)
5629 		cnt += task_reduction_cntorig_full - task_reduction_cntorig;
5630 	      else
5631 		cnt = task_reduction_cntorig - 1;
5632 	      tree r = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5633 			       size_int (cnt), NULL_TREE, NULL_TREE);
5634 	      gimplify_assign (r, x, ilist);
5635 	      continue;
5636 	    }
5637 	  else if (pass == 3)
5638 	    {
5639 	      tree type = TREE_TYPE (new_var);
5640 	      if (!omp_privatize_by_reference (var))
5641 		type = build_pointer_type (type);
5642 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
5643 		{
5644 		  unsigned cnt = task_reduction_cnt - 1;
5645 		  if (!task_reduction_needs_orig_p)
5646 		    cnt += (task_reduction_cntorig_full
5647 			    - task_reduction_cntorig);
5648 		  else
5649 		    cnt = task_reduction_cntorig - 1;
5650 		  x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
5651 			      size_int (cnt), NULL_TREE, NULL_TREE);
5652 		}
5653 	      else
5654 		{
5655 		  unsigned int idx = *ctx->task_reduction_map->get (c);
5656 		  tree off;
5657 		  if (ctx->task_reductions[1 + idx])
5658 		    off = fold_convert (sizetype,
5659 					ctx->task_reductions[1 + idx]);
5660 		  else
5661 		    off = task_reduction_read (ilist, tskred_temp, sizetype,
5662 					       7 + 3 * idx + 1);
5663 		  x = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
5664 				   tskred_base, off);
5665 		}
5666 	      x = fold_convert (type, x);
5667 	      tree t;
5668 	      if (omp_privatize_by_reference (var))
5669 		{
5670 		  gimplify_assign (new_var, x, ilist);
5671 		  t = new_var;
5672 		  new_var = build_simple_mem_ref (new_var);
5673 		}
5674 	      else
5675 		{
5676 		  t = create_tmp_var (type);
5677 		  gimplify_assign (t, x, ilist);
5678 		  SET_DECL_VALUE_EXPR (new_var, build_simple_mem_ref (t));
5679 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5680 		}
5681 	      t = fold_convert (build_pointer_type (boolean_type_node), t);
5682 	      t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
5683 			       TYPE_SIZE_UNIT (TREE_TYPE (type)));
5684 	      cond = create_tmp_var (TREE_TYPE (t));
5685 	      gimplify_assign (cond, t, ilist);
5686 	    }
5687 	  else if (is_variable_sized (var))
5688 	    {
5689 	      /* For variable sized types, we need to allocate the
5690 		 actual storage here.  Call alloca and store the
5691 		 result in the pointer decl that we created elsewhere.  */
5692 	      if (pass == 0)
5693 		continue;
5694 
5695 	      if (c_kind != OMP_CLAUSE_FIRSTPRIVATE || !is_task_ctx (ctx))
5696 		{
5697 		  tree tmp;
5698 
5699 		  ptr = DECL_VALUE_EXPR (new_var);
5700 		  gcc_assert (TREE_CODE (ptr) == INDIRECT_REF);
5701 		  ptr = TREE_OPERAND (ptr, 0);
5702 		  gcc_assert (DECL_P (ptr));
5703 		  x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
5704 
5705 		  if (lower_private_allocate (var, new_var, allocator,
5706 					      allocate_ptr, ilist, ctx,
5707 					      false, x))
5708 		    tmp = allocate_ptr;
5709 		  else
5710 		    {
5711 		      /* void *tmp = __builtin_alloca */
5712 		      tree atmp
5713 			= builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5714 		      gcall *stmt
5715 			= gimple_build_call (atmp, 2, x,
5716 					     size_int (DECL_ALIGN (var)));
5717 		      cfun->calls_alloca = 1;
5718 		      tmp = create_tmp_var_raw (ptr_type_node);
5719 		      gimple_add_tmp_var (tmp);
5720 		      gimple_call_set_lhs (stmt, tmp);
5721 
5722 		      gimple_seq_add_stmt (ilist, stmt);
5723 		    }
5724 
5725 		  x = fold_convert_loc (clause_loc, TREE_TYPE (ptr), tmp);
5726 		  gimplify_assign (ptr, x, ilist);
5727 		}
5728 	    }
5729 	  else if (omp_privatize_by_reference (var)
5730 		   && (c_kind != OMP_CLAUSE_FIRSTPRIVATE
5731 		       || !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)))
5732 	    {
5733 	      /* For references that are being privatized for Fortran,
5734 		 allocate new backing storage for the new pointer
5735 		 variable.  This allows us to avoid changing all the
5736 		 code that expects a pointer to something that expects
5737 		 a direct variable.  */
5738 	      if (pass == 0)
5739 		continue;
5740 
5741 	      x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
5742 	      if (c_kind == OMP_CLAUSE_FIRSTPRIVATE && is_task_ctx (ctx))
5743 		{
5744 		  x = build_receiver_ref (var, false, ctx);
5745 		  if (ctx->allocate_map)
5746 		    if (tree *allocatep = ctx->allocate_map->get (var))
5747 		      {
5748 			allocator = *allocatep;
5749 			if (TREE_CODE (allocator) == TREE_LIST)
5750 			  allocator = TREE_PURPOSE (allocator);
5751 			if (TREE_CODE (allocator) != INTEGER_CST)
5752 			  allocator = build_outer_var_ref (allocator, ctx);
5753 			allocator = fold_convert (pointer_sized_int_node,
5754 						  allocator);
5755 			allocate_ptr = unshare_expr (x);
5756 		      }
5757 		  if (allocator == NULL_TREE)
5758 		    x = build_fold_addr_expr_loc (clause_loc, x);
5759 		}
5760 	      else if (lower_private_allocate (var, new_var, allocator,
5761 					       allocate_ptr,
5762 					       ilist, ctx, true, x))
5763 		x = allocate_ptr;
5764 	      else if (TREE_CONSTANT (x))
5765 		{
5766 		  /* For reduction in SIMD loop, defer adding the
5767 		     initialization of the reference, because if we decide
5768 		     to use SIMD array for it, the initilization could cause
5769 		     expansion ICE.  Ditto for other privatization clauses.  */
5770 		  if (is_simd)
5771 		    x = NULL_TREE;
5772 		  else
5773 		    {
5774 		      x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
5775 					      get_name (var));
5776 		      gimple_add_tmp_var (x);
5777 		      TREE_ADDRESSABLE (x) = 1;
5778 		      x = build_fold_addr_expr_loc (clause_loc, x);
5779 		    }
5780 		}
5781 	      else
5782 		{
5783 		  tree atmp
5784 		    = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
5785 		  tree rtype = TREE_TYPE (TREE_TYPE (new_var));
5786 		  tree al = size_int (TYPE_ALIGN (rtype));
5787 		  x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
5788 		}
5789 
5790 	      if (x)
5791 		{
5792 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
5793 		  gimplify_assign (new_var, x, ilist);
5794 		}
5795 
5796 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
5797 	    }
5798 	  else if ((c_kind == OMP_CLAUSE_REDUCTION
5799 		    || c_kind == OMP_CLAUSE_IN_REDUCTION)
5800 		   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5801 	    {
5802 	      if (pass == 0)
5803 		continue;
5804 	    }
5805 	  else if (pass != 0)
5806 	    continue;
5807 
5808 	  switch (OMP_CLAUSE_CODE (c))
5809 	    {
5810 	    case OMP_CLAUSE_SHARED:
5811 	      /* Ignore shared directives in teams construct inside
5812 		 target construct.  */
5813 	      if (gimple_code (ctx->stmt) == GIMPLE_OMP_TEAMS
5814 		  && !is_host_teams_ctx (ctx))
5815 		continue;
5816 	      /* Shared global vars are just accessed directly.  */
5817 	      if (is_global_var (new_var))
5818 		break;
5819 	      /* For taskloop firstprivate/lastprivate, represented
5820 		 as firstprivate and shared clause on the task, new_var
5821 		 is the firstprivate var.  */
5822 	      if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
5823 		break;
5824 	      /* Set up the DECL_VALUE_EXPR for shared variables now.  This
5825 		 needs to be delayed until after fixup_child_record_type so
5826 		 that we get the correct type during the dereference.  */
5827 	      by_ref = use_pointer_for_field (var, ctx);
5828 	      x = build_receiver_ref (var, by_ref, ctx);
5829 	      SET_DECL_VALUE_EXPR (new_var, x);
5830 	      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5831 
5832 	      /* ??? If VAR is not passed by reference, and the variable
5833 		 hasn't been initialized yet, then we'll get a warning for
5834 		 the store into the omp_data_s structure.  Ideally, we'd be
5835 		 able to notice this and not store anything at all, but
5836 		 we're generating code too early.  Suppress the warning.  */
5837 	      if (!by_ref)
5838 		suppress_warning (var, OPT_Wuninitialized);
5839 	      break;
5840 
5841 	    case OMP_CLAUSE__CONDTEMP_:
5842 	      if (is_parallel_ctx (ctx))
5843 		{
5844 		  x = build_receiver_ref (var, false, ctx);
5845 		  SET_DECL_VALUE_EXPR (new_var, x);
5846 		  DECL_HAS_VALUE_EXPR_P (new_var) = 1;
5847 		}
5848 	      else if (is_simd && !OMP_CLAUSE__CONDTEMP__ITER (c))
5849 		{
5850 		  x = build_zero_cst (TREE_TYPE (var));
5851 		  goto do_private;
5852 		}
5853 	      break;
5854 
5855 	    case OMP_CLAUSE_LASTPRIVATE:
5856 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
5857 		break;
5858 	      /* FALLTHRU */
5859 
5860 	    case OMP_CLAUSE_PRIVATE:
5861 	      if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_PRIVATE)
5862 		x = build_outer_var_ref (var, ctx);
5863 	      else if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
5864 		{
5865 		  if (is_task_ctx (ctx))
5866 		    x = build_receiver_ref (var, false, ctx);
5867 		  else
5868 		    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_PRIVATE);
5869 		}
5870 	      else
5871 		x = NULL;
5872 	    do_private:
5873 	      tree nx;
5874 	      bool copy_ctor;
5875 	      copy_ctor = false;
5876 	      lower_private_allocate (var, new_var, allocator, allocate_ptr,
5877 				      ilist, ctx, false, NULL_TREE);
5878 	      nx = unshare_expr (new_var);
5879 	      if (is_simd
5880 		  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5881 		  && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
5882 		copy_ctor = true;
5883 	      if (copy_ctor)
5884 		nx = lang_hooks.decls.omp_clause_copy_ctor (c, nx, x);
5885 	      else
5886 		nx = lang_hooks.decls.omp_clause_default_ctor (c, nx, x);
5887 	      if (is_simd)
5888 		{
5889 		  tree y = lang_hooks.decls.omp_clause_dtor (c, new_var);
5890 		  if ((TREE_ADDRESSABLE (new_var) || nx || y
5891 		       || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5892 			   && (gimple_omp_for_collapse (ctx->stmt) != 1
5893 			       || (gimple_omp_for_index (ctx->stmt, 0)
5894 				   != new_var)))
5895 		       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_
5896 		       || omp_privatize_by_reference (var))
5897 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
5898 						       ivar, lvar))
5899 		    {
5900 		      if (omp_privatize_by_reference (var))
5901 			{
5902 			  gcc_assert (TREE_CODE (new_var) == MEM_REF);
5903 			  tree new_vard = TREE_OPERAND (new_var, 0);
5904 			  gcc_assert (DECL_P (new_vard));
5905 			  SET_DECL_VALUE_EXPR (new_vard,
5906 					       build_fold_addr_expr (lvar));
5907 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
5908 			}
5909 
5910 		      if (nx)
5911 			{
5912 			  tree iv = unshare_expr (ivar);
5913 			  if (copy_ctor)
5914 			    x = lang_hooks.decls.omp_clause_copy_ctor (c, iv,
5915 								       x);
5916 			  else
5917 			    x = lang_hooks.decls.omp_clause_default_ctor (c,
5918 									  iv,
5919 									  x);
5920 			}
5921 		      else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE__CONDTEMP_)
5922 			{
5923 			  x = build2 (MODIFY_EXPR, TREE_TYPE (ivar),
5924 				      unshare_expr (ivar), x);
5925 			  nx = x;
5926 			}
5927 		      if (nx && x)
5928 			gimplify_and_add (x, &llist[0]);
5929 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5930 			  && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
5931 			{
5932 			  tree v = new_var;
5933 			  if (!DECL_P (v))
5934 			    {
5935 			      gcc_assert (TREE_CODE (v) == MEM_REF);
5936 			      v = TREE_OPERAND (v, 0);
5937 			      gcc_assert (DECL_P (v));
5938 			    }
5939 			  v = *ctx->lastprivate_conditional_map->get (v);
5940 			  tree t = create_tmp_var (TREE_TYPE (v));
5941 			  tree z = build_zero_cst (TREE_TYPE (v));
5942 			  tree orig_v
5943 			    = build_outer_var_ref (var, ctx,
5944 						   OMP_CLAUSE_LASTPRIVATE);
5945 			  gimple_seq_add_stmt (dlist,
5946 					       gimple_build_assign (t, z));
5947 			  gcc_assert (DECL_HAS_VALUE_EXPR_P (v));
5948 			  tree civar = DECL_VALUE_EXPR (v);
5949 			  gcc_assert (TREE_CODE (civar) == ARRAY_REF);
5950 			  civar = unshare_expr (civar);
5951 			  TREE_OPERAND (civar, 1) = sctx.idx;
5952 			  x = build2 (MODIFY_EXPR, TREE_TYPE (t), t,
5953 				      unshare_expr (civar));
5954 			  x = build2 (COMPOUND_EXPR, TREE_TYPE (orig_v), x,
5955 				      build2 (MODIFY_EXPR, TREE_TYPE (orig_v),
5956 					      orig_v, unshare_expr (ivar)));
5957 			  tree cond = build2 (LT_EXPR, boolean_type_node, t,
5958 					      civar);
5959 			  x = build3 (COND_EXPR, void_type_node, cond, x,
5960 				      void_node);
5961 			  gimple_seq tseq = NULL;
5962 			  gimplify_and_add (x, &tseq);
5963 			  if (ctx->outer)
5964 			    lower_omp (&tseq, ctx->outer);
5965 			  gimple_seq_add_seq (&llist[1], tseq);
5966 			}
5967 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5968 			  && ctx->for_simd_scan_phase)
5969 			{
5970 			  x = unshare_expr (ivar);
5971 			  tree orig_v
5972 			    = build_outer_var_ref (var, ctx,
5973 						   OMP_CLAUSE_LASTPRIVATE);
5974 			  x = lang_hooks.decls.omp_clause_assign_op (c, x,
5975 								     orig_v);
5976 			  gimplify_and_add (x, &llist[0]);
5977 			}
5978 		      if (y)
5979 			{
5980 			  y = lang_hooks.decls.omp_clause_dtor (c, ivar);
5981 			  if (y)
5982 			    gimplify_and_add (y, &llist[1]);
5983 			}
5984 		      break;
5985 		    }
5986 		  if (omp_privatize_by_reference (var))
5987 		    {
5988 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
5989 		      tree new_vard = TREE_OPERAND (new_var, 0);
5990 		      gcc_assert (DECL_P (new_vard));
5991 		      tree type = TREE_TYPE (TREE_TYPE (new_vard));
5992 		      x = TYPE_SIZE_UNIT (type);
5993 		      if (TREE_CONSTANT (x))
5994 			{
5995 			  x = create_tmp_var_raw (type, get_name (var));
5996 			  gimple_add_tmp_var (x);
5997 			  TREE_ADDRESSABLE (x) = 1;
5998 			  x = build_fold_addr_expr_loc (clause_loc, x);
5999 			  x = fold_convert_loc (clause_loc,
6000 						TREE_TYPE (new_vard), x);
6001 			  gimplify_assign (new_vard, x, ilist);
6002 			}
6003 		    }
6004 		}
6005 	      if (nx)
6006 		gimplify_and_add (nx, ilist);
6007 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6008 		  && is_simd
6009 		  && ctx->for_simd_scan_phase)
6010 		{
6011 		  tree orig_v = build_outer_var_ref (var, ctx,
6012 						     OMP_CLAUSE_LASTPRIVATE);
6013 		  x = lang_hooks.decls.omp_clause_assign_op (c, new_var,
6014 							     orig_v);
6015 		  gimplify_and_add (x, ilist);
6016 		}
6017 	      /* FALLTHRU */
6018 
6019 	    do_dtor:
6020 	      x = lang_hooks.decls.omp_clause_dtor (c, new_var);
6021 	      if (x)
6022 		gimplify_and_add (x, dlist);
6023 	      if (allocator)
6024 		{
6025 		  if (!is_gimple_val (allocator))
6026 		    {
6027 		      tree avar = create_tmp_var (TREE_TYPE (allocator));
6028 		      gimplify_assign (avar, allocator, dlist);
6029 		      allocator = avar;
6030 		    }
6031 		  if (!is_gimple_val (allocate_ptr))
6032 		    {
6033 		      tree apvar = create_tmp_var (TREE_TYPE (allocate_ptr));
6034 		      gimplify_assign (apvar, allocate_ptr, dlist);
6035 		      allocate_ptr = apvar;
6036 		    }
6037 		  tree f = builtin_decl_explicit (BUILT_IN_GOMP_FREE);
6038 		  gimple *g
6039 		    = gimple_build_call (f, 2, allocate_ptr, allocator);
6040 		  gimple_seq_add_stmt (dlist, g);
6041 		}
6042 	      break;
6043 
6044 	    case OMP_CLAUSE_LINEAR:
6045 	      if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6046 		goto do_firstprivate;
6047 	      if (OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6048 		x = NULL;
6049 	      else
6050 		x = build_outer_var_ref (var, ctx);
6051 	      goto do_private;
6052 
6053 	    case OMP_CLAUSE_FIRSTPRIVATE:
6054 	      if (is_task_ctx (ctx))
6055 		{
6056 		  if ((omp_privatize_by_reference (var)
6057 		       && !OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c))
6058 		      || is_variable_sized (var))
6059 		    goto do_dtor;
6060 		  else if (is_global_var (maybe_lookup_decl_in_outer_ctx (var,
6061 									  ctx))
6062 			   || use_pointer_for_field (var, NULL))
6063 		    {
6064 		      x = build_receiver_ref (var, false, ctx);
6065 		      if (ctx->allocate_map)
6066 			if (tree *allocatep = ctx->allocate_map->get (var))
6067 			  {
6068 			    allocator = *allocatep;
6069 			    if (TREE_CODE (allocator) == TREE_LIST)
6070 			      allocator = TREE_PURPOSE (allocator);
6071 			    if (TREE_CODE (allocator) != INTEGER_CST)
6072 			      allocator = build_outer_var_ref (allocator, ctx);
6073 			    allocator = fold_convert (pointer_sized_int_node,
6074 						      allocator);
6075 			    allocate_ptr = unshare_expr (x);
6076 			    x = build_simple_mem_ref (x);
6077 			    TREE_THIS_NOTRAP (x) = 1;
6078 			  }
6079 		      SET_DECL_VALUE_EXPR (new_var, x);
6080 		      DECL_HAS_VALUE_EXPR_P (new_var) = 1;
6081 		      goto do_dtor;
6082 		    }
6083 		}
6084 	      if (OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c)
6085 		  && omp_privatize_by_reference (var))
6086 		{
6087 		  x = build_outer_var_ref (var, ctx);
6088 		  gcc_assert (TREE_CODE (x) == MEM_REF
6089 			      && integer_zerop (TREE_OPERAND (x, 1)));
6090 		  x = TREE_OPERAND (x, 0);
6091 		  x = lang_hooks.decls.omp_clause_copy_ctor
6092 						(c, unshare_expr (new_var), x);
6093 		  gimplify_and_add (x, ilist);
6094 		  goto do_dtor;
6095 		}
6096 	    do_firstprivate:
6097 	      lower_private_allocate (var, new_var, allocator, allocate_ptr,
6098 				      ilist, ctx, false, NULL_TREE);
6099 	      x = build_outer_var_ref (var, ctx);
6100 	      if (is_simd)
6101 		{
6102 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6103 		      && gimple_omp_for_combined_into_p (ctx->stmt))
6104 		    {
6105 		      tree t = OMP_CLAUSE_LINEAR_STEP (c);
6106 		      tree stept = TREE_TYPE (t);
6107 		      tree ct = omp_find_clause (clauses,
6108 						 OMP_CLAUSE__LOOPTEMP_);
6109 		      gcc_assert (ct);
6110 		      tree l = OMP_CLAUSE_DECL (ct);
6111 		      tree n1 = fd->loop.n1;
6112 		      tree step = fd->loop.step;
6113 		      tree itype = TREE_TYPE (l);
6114 		      if (POINTER_TYPE_P (itype))
6115 			itype = signed_type_for (itype);
6116 		      l = fold_build2 (MINUS_EXPR, itype, l, n1);
6117 		      if (TYPE_UNSIGNED (itype)
6118 			  && fd->loop.cond_code == GT_EXPR)
6119 			l = fold_build2 (TRUNC_DIV_EXPR, itype,
6120 					 fold_build1 (NEGATE_EXPR, itype, l),
6121 					 fold_build1 (NEGATE_EXPR,
6122 						      itype, step));
6123 		      else
6124 			l = fold_build2 (TRUNC_DIV_EXPR, itype, l, step);
6125 		      t = fold_build2 (MULT_EXPR, stept,
6126 				       fold_convert (stept, l), t);
6127 
6128 		      if (OMP_CLAUSE_LINEAR_ARRAY (c))
6129 			{
6130 			  if (omp_privatize_by_reference (var))
6131 			    {
6132 			      gcc_assert (TREE_CODE (new_var) == MEM_REF);
6133 			      tree new_vard = TREE_OPERAND (new_var, 0);
6134 			      gcc_assert (DECL_P (new_vard));
6135 			      tree type = TREE_TYPE (TREE_TYPE (new_vard));
6136 			      nx = TYPE_SIZE_UNIT (type);
6137 			      if (TREE_CONSTANT (nx))
6138 				{
6139 				  nx = create_tmp_var_raw (type,
6140 							   get_name (var));
6141 				  gimple_add_tmp_var (nx);
6142 				  TREE_ADDRESSABLE (nx) = 1;
6143 				  nx = build_fold_addr_expr_loc (clause_loc,
6144 								 nx);
6145 				  nx = fold_convert_loc (clause_loc,
6146 							 TREE_TYPE (new_vard),
6147 							 nx);
6148 				  gimplify_assign (new_vard, nx, ilist);
6149 				}
6150 			    }
6151 
6152 			  x = lang_hooks.decls.omp_clause_linear_ctor
6153 							(c, new_var, x, t);
6154 			  gimplify_and_add (x, ilist);
6155 			  goto do_dtor;
6156 			}
6157 
6158 		      if (POINTER_TYPE_P (TREE_TYPE (x)))
6159 			x = fold_build2 (POINTER_PLUS_EXPR,
6160 					 TREE_TYPE (x), x, t);
6161 		      else
6162 			x = fold_build2 (PLUS_EXPR, TREE_TYPE (x), x, t);
6163 		    }
6164 
6165 		  if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_LINEAR
6166 		       || TREE_ADDRESSABLE (new_var)
6167 		       || omp_privatize_by_reference (var))
6168 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6169 						       ivar, lvar))
6170 		    {
6171 		      if (omp_privatize_by_reference (var))
6172 			{
6173 			  gcc_assert (TREE_CODE (new_var) == MEM_REF);
6174 			  tree new_vard = TREE_OPERAND (new_var, 0);
6175 			  gcc_assert (DECL_P (new_vard));
6176 			  SET_DECL_VALUE_EXPR (new_vard,
6177 					       build_fold_addr_expr (lvar));
6178 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6179 			}
6180 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR)
6181 			{
6182 			  tree iv = create_tmp_var (TREE_TYPE (new_var));
6183 			  x = lang_hooks.decls.omp_clause_copy_ctor (c, iv, x);
6184 			  gimplify_and_add (x, ilist);
6185 			  gimple_stmt_iterator gsi
6186 			    = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6187 			  gassign *g
6188 			    = gimple_build_assign (unshare_expr (lvar), iv);
6189 			  gsi_insert_before_without_update (&gsi, g,
6190 							    GSI_SAME_STMT);
6191 			  tree t = OMP_CLAUSE_LINEAR_STEP (c);
6192 			  enum tree_code code = PLUS_EXPR;
6193 			  if (POINTER_TYPE_P (TREE_TYPE (new_var)))
6194 			    code = POINTER_PLUS_EXPR;
6195 			  g = gimple_build_assign (iv, code, iv, t);
6196 			  gsi_insert_before_without_update (&gsi, g,
6197 							    GSI_SAME_STMT);
6198 			  break;
6199 			}
6200 		      x = lang_hooks.decls.omp_clause_copy_ctor
6201 						(c, unshare_expr (ivar), x);
6202 		      gimplify_and_add (x, &llist[0]);
6203 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6204 		      if (x)
6205 			gimplify_and_add (x, &llist[1]);
6206 		      break;
6207 		    }
6208 		  if (omp_privatize_by_reference (var))
6209 		    {
6210 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
6211 		      tree new_vard = TREE_OPERAND (new_var, 0);
6212 		      gcc_assert (DECL_P (new_vard));
6213 		      tree type = TREE_TYPE (TREE_TYPE (new_vard));
6214 		      nx = TYPE_SIZE_UNIT (type);
6215 		      if (TREE_CONSTANT (nx))
6216 			{
6217 			  nx = create_tmp_var_raw (type, get_name (var));
6218 			  gimple_add_tmp_var (nx);
6219 			  TREE_ADDRESSABLE (nx) = 1;
6220 			  nx = build_fold_addr_expr_loc (clause_loc, nx);
6221 			  nx = fold_convert_loc (clause_loc,
6222 						 TREE_TYPE (new_vard), nx);
6223 			  gimplify_assign (new_vard, nx, ilist);
6224 			}
6225 		    }
6226 		}
6227 	      x = lang_hooks.decls.omp_clause_copy_ctor
6228 						(c, unshare_expr (new_var), x);
6229 	      gimplify_and_add (x, ilist);
6230 	      goto do_dtor;
6231 
6232 	    case OMP_CLAUSE__LOOPTEMP_:
6233 	    case OMP_CLAUSE__REDUCTEMP_:
6234 	      gcc_assert (is_taskreg_ctx (ctx));
6235 	      x = build_outer_var_ref (var, ctx);
6236 	      x = build2 (MODIFY_EXPR, TREE_TYPE (new_var), new_var, x);
6237 	      gimplify_and_add (x, ilist);
6238 	      break;
6239 
6240 	    case OMP_CLAUSE_COPYIN:
6241 	      by_ref = use_pointer_for_field (var, NULL);
6242 	      x = build_receiver_ref (var, by_ref, ctx);
6243 	      x = lang_hooks.decls.omp_clause_assign_op (c, new_var, x);
6244 	      append_to_statement_list (x, &copyin_seq);
6245 	      copyin_by_ref |= by_ref;
6246 	      break;
6247 
6248 	    case OMP_CLAUSE_REDUCTION:
6249 	    case OMP_CLAUSE_IN_REDUCTION:
6250 	      /* OpenACC reductions are initialized using the
6251 		 GOACC_REDUCTION internal function.  */
6252 	      if (is_gimple_omp_oacc (ctx->stmt))
6253 		break;
6254 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6255 		{
6256 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
6257 		  gimple *tseq;
6258 		  tree ptype = TREE_TYPE (placeholder);
6259 		  if (cond)
6260 		    {
6261 		      x = error_mark_node;
6262 		      if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)
6263 			  && !task_reduction_needs_orig_p)
6264 			x = var;
6265 		      else if (OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c))
6266 			{
6267 			  tree pptype = build_pointer_type (ptype);
6268 			  if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION)
6269 			    x = build4 (ARRAY_REF, ptr_type_node, tskred_avar,
6270 					size_int (task_reduction_cnt_full
6271 						  + task_reduction_cntorig - 1),
6272 					NULL_TREE, NULL_TREE);
6273 			  else
6274 			    {
6275 			      unsigned int idx
6276 				= *ctx->task_reduction_map->get (c);
6277 			      x = task_reduction_read (ilist, tskred_temp,
6278 						       pptype, 7 + 3 * idx);
6279 			    }
6280 			  x = fold_convert (pptype, x);
6281 			  x = build_simple_mem_ref (x);
6282 			}
6283 		    }
6284 		  else
6285 		    {
6286 		      lower_private_allocate (var, new_var, allocator,
6287 					      allocate_ptr, ilist, ctx, false,
6288 					      NULL_TREE);
6289 		      x = build_outer_var_ref (var, ctx);
6290 
6291 		      if (omp_privatize_by_reference (var)
6292 			  && !useless_type_conversion_p (ptype, TREE_TYPE (x)))
6293 			x = build_fold_addr_expr_loc (clause_loc, x);
6294 		    }
6295 		  SET_DECL_VALUE_EXPR (placeholder, x);
6296 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
6297 		  tree new_vard = new_var;
6298 		  if (omp_privatize_by_reference (var))
6299 		    {
6300 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
6301 		      new_vard = TREE_OPERAND (new_var, 0);
6302 		      gcc_assert (DECL_P (new_vard));
6303 		    }
6304 		  tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6305 		  if (is_simd
6306 		      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6307 		      && OMP_CLAUSE_REDUCTION_INSCAN (c))
6308 		    rvarp = &rvar;
6309 		  if (is_simd
6310 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6311 						       ivar, lvar, rvarp,
6312 						       &rvar2))
6313 		    {
6314 		      if (new_vard == new_var)
6315 			{
6316 			  gcc_assert (DECL_VALUE_EXPR (new_var) == lvar);
6317 			  SET_DECL_VALUE_EXPR (new_var, ivar);
6318 			}
6319 		      else
6320 			{
6321 			  SET_DECL_VALUE_EXPR (new_vard,
6322 					       build_fold_addr_expr (ivar));
6323 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6324 			}
6325 		      x = lang_hooks.decls.omp_clause_default_ctor
6326 				(c, unshare_expr (ivar),
6327 				 build_outer_var_ref (var, ctx));
6328 		      if (rvarp && ctx->for_simd_scan_phase)
6329 			{
6330 			  if (x)
6331 			    gimplify_and_add (x, &llist[0]);
6332 			  x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6333 			  if (x)
6334 			    gimplify_and_add (x, &llist[1]);
6335 			  break;
6336 			}
6337 		      else if (rvarp)
6338 			{
6339 			  if (x)
6340 			    {
6341 			      gimplify_and_add (x, &llist[0]);
6342 
6343 			      tree ivar2 = unshare_expr (lvar);
6344 			      TREE_OPERAND (ivar2, 1) = sctx.idx;
6345 			      x = lang_hooks.decls.omp_clause_default_ctor
6346 				    (c, ivar2, build_outer_var_ref (var, ctx));
6347 			      gimplify_and_add (x, &llist[0]);
6348 
6349 			      if (rvar2)
6350 				{
6351 				  x = lang_hooks.decls.omp_clause_default_ctor
6352 					(c, unshare_expr (rvar2),
6353 					 build_outer_var_ref (var, ctx));
6354 				  gimplify_and_add (x, &llist[0]);
6355 				}
6356 
6357 			      /* For types that need construction, add another
6358 				 private var which will be default constructed
6359 				 and optionally initialized with
6360 				 OMP_CLAUSE_REDUCTION_GIMPLE_INIT, as in the
6361 				 loop we want to assign this value instead of
6362 				 constructing and destructing it in each
6363 				 iteration.  */
6364 			      tree nv = create_tmp_var_raw (TREE_TYPE (ivar));
6365 			      gimple_add_tmp_var (nv);
6366 			      ctx->cb.decl_map->put (TREE_OPERAND (rvar2
6367 								   ? rvar2
6368 								   : ivar, 0),
6369 						     nv);
6370 			      x = lang_hooks.decls.omp_clause_default_ctor
6371 				    (c, nv, build_outer_var_ref (var, ctx));
6372 			      gimplify_and_add (x, ilist);
6373 
6374 			      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6375 				{
6376 				  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6377 				  x = DECL_VALUE_EXPR (new_vard);
6378 				  tree vexpr = nv;
6379 				  if (new_vard != new_var)
6380 				    vexpr = build_fold_addr_expr (nv);
6381 				  SET_DECL_VALUE_EXPR (new_vard, vexpr);
6382 				  lower_omp (&tseq, ctx);
6383 				  SET_DECL_VALUE_EXPR (new_vard, x);
6384 				  gimple_seq_add_seq (ilist, tseq);
6385 				  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6386 				}
6387 
6388 			      x = lang_hooks.decls.omp_clause_dtor (c, nv);
6389 			      if (x)
6390 				gimplify_and_add (x, dlist);
6391 			    }
6392 
6393 			  tree ref = build_outer_var_ref (var, ctx);
6394 			  x = unshare_expr (ivar);
6395 			  x = lang_hooks.decls.omp_clause_assign_op (c, x,
6396 								     ref);
6397 			  gimplify_and_add (x, &llist[0]);
6398 
6399 			  ref = build_outer_var_ref (var, ctx);
6400 			  x = lang_hooks.decls.omp_clause_assign_op (c, ref,
6401 								     rvar);
6402 			  gimplify_and_add (x, &llist[3]);
6403 
6404 			  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6405 			  if (new_vard == new_var)
6406 			    SET_DECL_VALUE_EXPR (new_var, lvar);
6407 			  else
6408 			    SET_DECL_VALUE_EXPR (new_vard,
6409 						 build_fold_addr_expr (lvar));
6410 
6411 			  x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6412 			  if (x)
6413 			    gimplify_and_add (x, &llist[1]);
6414 
6415 			  tree ivar2 = unshare_expr (lvar);
6416 			  TREE_OPERAND (ivar2, 1) = sctx.idx;
6417 			  x = lang_hooks.decls.omp_clause_dtor (c, ivar2);
6418 			  if (x)
6419 			    gimplify_and_add (x, &llist[1]);
6420 
6421 			  if (rvar2)
6422 			    {
6423 			      x = lang_hooks.decls.omp_clause_dtor (c, rvar2);
6424 			      if (x)
6425 				gimplify_and_add (x, &llist[1]);
6426 			    }
6427 			  break;
6428 			}
6429 		      if (x)
6430 			gimplify_and_add (x, &llist[0]);
6431 		      if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6432 			{
6433 			  tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6434 			  lower_omp (&tseq, ctx);
6435 			  gimple_seq_add_seq (&llist[0], tseq);
6436 			}
6437 		      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6438 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6439 		      lower_omp (&tseq, ctx);
6440 		      gimple_seq_add_seq (&llist[1], tseq);
6441 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6442 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6443 		      if (new_vard == new_var)
6444 			SET_DECL_VALUE_EXPR (new_var, lvar);
6445 		      else
6446 			SET_DECL_VALUE_EXPR (new_vard,
6447 					     build_fold_addr_expr (lvar));
6448 		      x = lang_hooks.decls.omp_clause_dtor (c, ivar);
6449 		      if (x)
6450 			gimplify_and_add (x, &llist[1]);
6451 		      break;
6452 		    }
6453 		  /* If this is a reference to constant size reduction var
6454 		     with placeholder, we haven't emitted the initializer
6455 		     for it because it is undesirable if SIMD arrays are used.
6456 		     But if they aren't used, we need to emit the deferred
6457 		     initialization now.  */
6458 		  else if (omp_privatize_by_reference (var) && is_simd)
6459 		    handle_simd_reference (clause_loc, new_vard, ilist);
6460 
6461 		  tree lab2 = NULL_TREE;
6462 		  if (cond)
6463 		    {
6464 		      gimple *g;
6465 		      if (!is_parallel_ctx (ctx))
6466 			{
6467 			  tree condv = create_tmp_var (boolean_type_node);
6468 			  tree m = build_simple_mem_ref (cond);
6469 			  g = gimple_build_assign (condv, m);
6470 			  gimple_seq_add_stmt (ilist, g);
6471 			  tree lab1
6472 			    = create_artificial_label (UNKNOWN_LOCATION);
6473 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
6474 			  g = gimple_build_cond (NE_EXPR, condv,
6475 						 boolean_false_node,
6476 						 lab2, lab1);
6477 			  gimple_seq_add_stmt (ilist, g);
6478 			  gimple_seq_add_stmt (ilist,
6479 					       gimple_build_label (lab1));
6480 			}
6481 		      g = gimple_build_assign (build_simple_mem_ref (cond),
6482 					       boolean_true_node);
6483 		      gimple_seq_add_stmt (ilist, g);
6484 		    }
6485 		  x = lang_hooks.decls.omp_clause_default_ctor
6486 				(c, unshare_expr (new_var),
6487 				 cond ? NULL_TREE
6488 				 : build_outer_var_ref (var, ctx));
6489 		  if (x)
6490 		    gimplify_and_add (x, ilist);
6491 
6492 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6493 		      && OMP_CLAUSE_REDUCTION_INSCAN (c))
6494 		    {
6495 		      if (ctx->for_simd_scan_phase)
6496 			goto do_dtor;
6497 		      if (x || (!is_simd
6498 				&& OMP_CLAUSE_REDUCTION_OMP_ORIG_REF (c)))
6499 			{
6500 			  tree nv = create_tmp_var_raw (TREE_TYPE (new_var));
6501 			  gimple_add_tmp_var (nv);
6502 			  ctx->cb.decl_map->put (new_vard, nv);
6503 			  x = lang_hooks.decls.omp_clause_default_ctor
6504 				(c, nv, build_outer_var_ref (var, ctx));
6505 			  if (x)
6506 			    gimplify_and_add (x, ilist);
6507 			  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6508 			    {
6509 			      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6510 			      tree vexpr = nv;
6511 			      if (new_vard != new_var)
6512 				vexpr = build_fold_addr_expr (nv);
6513 			      SET_DECL_VALUE_EXPR (new_vard, vexpr);
6514 			      DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6515 			      lower_omp (&tseq, ctx);
6516 			      SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
6517 			      DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
6518 			      gimple_seq_add_seq (ilist, tseq);
6519 			    }
6520 			  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6521 			  if (is_simd && ctx->scan_exclusive)
6522 			    {
6523 			      tree nv2
6524 				= create_tmp_var_raw (TREE_TYPE (new_var));
6525 			      gimple_add_tmp_var (nv2);
6526 			      ctx->cb.decl_map->put (nv, nv2);
6527 			      x = lang_hooks.decls.omp_clause_default_ctor
6528 				    (c, nv2, build_outer_var_ref (var, ctx));
6529 			      gimplify_and_add (x, ilist);
6530 			      x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6531 			      if (x)
6532 				gimplify_and_add (x, dlist);
6533 			    }
6534 			  x = lang_hooks.decls.omp_clause_dtor (c, nv);
6535 			  if (x)
6536 			    gimplify_and_add (x, dlist);
6537 			}
6538 		      else if (is_simd
6539 			       && ctx->scan_exclusive
6540 			       && TREE_ADDRESSABLE (TREE_TYPE (new_var)))
6541 			{
6542 			  tree nv2 = create_tmp_var_raw (TREE_TYPE (new_var));
6543 			  gimple_add_tmp_var (nv2);
6544 			  ctx->cb.decl_map->put (new_vard, nv2);
6545 			  x = lang_hooks.decls.omp_clause_dtor (c, nv2);
6546 			  if (x)
6547 			    gimplify_and_add (x, dlist);
6548 			}
6549 		      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6550 		      goto do_dtor;
6551 		    }
6552 
6553 		  if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
6554 		    {
6555 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
6556 		      if (c_kind == OMP_CLAUSE_IN_REDUCTION
6557 			  && is_omp_target (ctx->stmt))
6558 			{
6559 			  tree d = maybe_lookup_decl_in_outer_ctx (var, ctx);
6560 			  tree oldv = NULL_TREE;
6561 			  gcc_assert (d);
6562 			  if (DECL_HAS_VALUE_EXPR_P (d))
6563 			    oldv = DECL_VALUE_EXPR (d);
6564 			  SET_DECL_VALUE_EXPR (d, new_vard);
6565 			  DECL_HAS_VALUE_EXPR_P (d) = 1;
6566 			  lower_omp (&tseq, ctx);
6567 			  if (oldv)
6568 			    SET_DECL_VALUE_EXPR (d, oldv);
6569 			  else
6570 			    {
6571 			      SET_DECL_VALUE_EXPR (d, NULL_TREE);
6572 			      DECL_HAS_VALUE_EXPR_P (d) = 0;
6573 			    }
6574 			}
6575 		      else
6576 			lower_omp (&tseq, ctx);
6577 		      gimple_seq_add_seq (ilist, tseq);
6578 		    }
6579 		  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6580 		  if (is_simd)
6581 		    {
6582 		      tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
6583 		      lower_omp (&tseq, ctx);
6584 		      gimple_seq_add_seq (dlist, tseq);
6585 		      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6586 		    }
6587 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
6588 		  if (cond)
6589 		    {
6590 		      if (lab2)
6591 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6592 		      break;
6593 		    }
6594 		  goto do_dtor;
6595 		}
6596 	      else
6597 		{
6598 		  x = omp_reduction_init (c, TREE_TYPE (new_var));
6599 		  gcc_assert (TREE_CODE (TREE_TYPE (new_var)) != ARRAY_TYPE);
6600 		  enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
6601 
6602 		  if (cond)
6603 		    {
6604 		      gimple *g;
6605 		      tree lab2 = NULL_TREE;
6606 		      /* GOMP_taskgroup_reduction_register memsets the whole
6607 			 array to zero.  If the initializer is zero, we don't
6608 			 need to initialize it again, just mark it as ever
6609 			 used unconditionally, i.e. cond = true.  */
6610 		      if (initializer_zerop (x))
6611 			{
6612 			  g = gimple_build_assign (build_simple_mem_ref (cond),
6613 						   boolean_true_node);
6614 			  gimple_seq_add_stmt (ilist, g);
6615 			  break;
6616 			}
6617 
6618 		      /* Otherwise, emit
6619 			 if (!cond) { cond = true; new_var = x; }  */
6620 		      if (!is_parallel_ctx (ctx))
6621 			{
6622 			  tree condv = create_tmp_var (boolean_type_node);
6623 			  tree m = build_simple_mem_ref (cond);
6624 			  g = gimple_build_assign (condv, m);
6625 			  gimple_seq_add_stmt (ilist, g);
6626 			  tree lab1
6627 			    = create_artificial_label (UNKNOWN_LOCATION);
6628 			  lab2 = create_artificial_label (UNKNOWN_LOCATION);
6629 			  g = gimple_build_cond (NE_EXPR, condv,
6630 						 boolean_false_node,
6631 						 lab2, lab1);
6632 			  gimple_seq_add_stmt (ilist, g);
6633 			  gimple_seq_add_stmt (ilist,
6634 					       gimple_build_label (lab1));
6635 			}
6636 		      g = gimple_build_assign (build_simple_mem_ref (cond),
6637 					       boolean_true_node);
6638 		      gimple_seq_add_stmt (ilist, g);
6639 		      gimplify_assign (new_var, x, ilist);
6640 		      if (lab2)
6641 			gimple_seq_add_stmt (ilist, gimple_build_label (lab2));
6642 		      break;
6643 		    }
6644 
6645 		  /* reduction(-:var) sums up the partial results, so it
6646 		     acts identically to reduction(+:var).  */
6647 		  if (code == MINUS_EXPR)
6648 		    code = PLUS_EXPR;
6649 
6650 		  bool is_truth_op
6651 		    = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
6652 		  tree new_vard = new_var;
6653 		  if (is_simd && omp_privatize_by_reference (var))
6654 		    {
6655 		      gcc_assert (TREE_CODE (new_var) == MEM_REF);
6656 		      new_vard = TREE_OPERAND (new_var, 0);
6657 		      gcc_assert (DECL_P (new_vard));
6658 		    }
6659 		  tree rvar = NULL_TREE, *rvarp = NULL, rvar2 = NULL_TREE;
6660 		  if (is_simd
6661 		      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6662 		      && OMP_CLAUSE_REDUCTION_INSCAN (c))
6663 		    rvarp = &rvar;
6664 		  if (is_simd
6665 		      && lower_rec_simd_input_clauses (new_var, ctx, &sctx,
6666 						       ivar, lvar, rvarp,
6667 						       &rvar2))
6668 		    {
6669 		      if (new_vard != new_var)
6670 			{
6671 			  SET_DECL_VALUE_EXPR (new_vard,
6672 					       build_fold_addr_expr (lvar));
6673 			  DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
6674 			}
6675 
6676 		      tree ref = build_outer_var_ref (var, ctx);
6677 
6678 		      if (rvarp)
6679 			{
6680 			  if (ctx->for_simd_scan_phase)
6681 			    break;
6682 			  gimplify_assign (ivar, ref, &llist[0]);
6683 			  ref = build_outer_var_ref (var, ctx);
6684 			  gimplify_assign (ref, rvar, &llist[3]);
6685 			  break;
6686 			}
6687 
6688 		      gimplify_assign (unshare_expr (ivar), x, &llist[0]);
6689 
6690 		      if (sctx.is_simt)
6691 			{
6692 			  if (!simt_lane)
6693 			    simt_lane = create_tmp_var (unsigned_type_node);
6694 			  x = build_call_expr_internal_loc
6695 			    (UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_BFLY,
6696 			     TREE_TYPE (ivar), 2, ivar, simt_lane);
6697 			  x = build2 (code, TREE_TYPE (ivar), ivar, x);
6698 			  gimplify_assign (ivar, x, &llist[2]);
6699 			}
6700 		      tree ivar2 = ivar;
6701 		      tree ref2 = ref;
6702 		      if (is_truth_op)
6703 			{
6704 			  tree zero = build_zero_cst (TREE_TYPE (ivar));
6705 			  ivar2 = fold_build2_loc (clause_loc, NE_EXPR,
6706 						   boolean_type_node, ivar,
6707 						   zero);
6708 			  ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6709 						  boolean_type_node, ref,
6710 						  zero);
6711 			}
6712 		      x = build2 (code, TREE_TYPE (ref), ref2, ivar2);
6713 		      if (is_truth_op)
6714 			x = fold_convert (TREE_TYPE (ref), x);
6715 		      ref = build_outer_var_ref (var, ctx);
6716 		      gimplify_assign (ref, x, &llist[1]);
6717 
6718 		    }
6719 		  else
6720 		    {
6721 		      lower_private_allocate (var, new_var, allocator,
6722 					      allocate_ptr, ilist, ctx,
6723 					      false, NULL_TREE);
6724 		      if (omp_privatize_by_reference (var) && is_simd)
6725 			handle_simd_reference (clause_loc, new_vard, ilist);
6726 		      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6727 			  && OMP_CLAUSE_REDUCTION_INSCAN (c))
6728 			break;
6729 		      gimplify_assign (new_var, x, ilist);
6730 		      if (is_simd)
6731 			{
6732 			  tree ref = build_outer_var_ref (var, ctx);
6733 			  tree new_var2 = new_var;
6734 			  tree ref2 = ref;
6735 			  if (is_truth_op)
6736 			    {
6737 			      tree zero = build_zero_cst (TREE_TYPE (new_var));
6738 			      new_var2
6739 				= fold_build2_loc (clause_loc, NE_EXPR,
6740 						   boolean_type_node, new_var,
6741 						   zero);
6742 			      ref2 = fold_build2_loc (clause_loc, NE_EXPR,
6743 						      boolean_type_node, ref,
6744 						      zero);
6745 			    }
6746 			  x = build2 (code, TREE_TYPE (ref2), ref2, new_var2);
6747 			  if (is_truth_op)
6748 			    x = fold_convert (TREE_TYPE (new_var), x);
6749 			  ref = build_outer_var_ref (var, ctx);
6750 			  gimplify_assign (ref, x, dlist);
6751 			}
6752 		      if (allocator)
6753 			goto do_dtor;
6754 		    }
6755 		}
6756 	      break;
6757 
6758 	    default:
6759 	      gcc_unreachable ();
6760 	    }
6761 	}
6762     }
6763   if (tskred_avar)
6764     {
6765       tree clobber = build_clobber (TREE_TYPE (tskred_avar));
6766       gimple_seq_add_stmt (ilist, gimple_build_assign (tskred_avar, clobber));
6767     }
6768 
6769   if (known_eq (sctx.max_vf, 1U))
6770     {
6771       sctx.is_simt = false;
6772       if (ctx->lastprivate_conditional_map)
6773 	{
6774 	  if (gimple_omp_for_combined_into_p (ctx->stmt))
6775 	    {
6776 	      /* Signal to lower_omp_1 that it should use parent context.  */
6777 	      ctx->combined_into_simd_safelen1 = true;
6778 	      for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
6779 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6780 		    && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6781 		  {
6782 		    tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
6783 		    omp_context *outer = ctx->outer;
6784 		    if (gimple_code (outer->stmt) == GIMPLE_OMP_SCAN)
6785 		      outer = outer->outer;
6786 		    tree *v = ctx->lastprivate_conditional_map->get (o);
6787 		    tree po = lookup_decl (OMP_CLAUSE_DECL (c), outer);
6788 		    tree *pv = outer->lastprivate_conditional_map->get (po);
6789 		    *v = *pv;
6790 		  }
6791 	    }
6792 	  else
6793 	    {
6794 	      /* When not vectorized, treat lastprivate(conditional:) like
6795 		 normal lastprivate, as there will be just one simd lane
6796 		 writing the privatized variable.  */
6797 	      delete ctx->lastprivate_conditional_map;
6798 	      ctx->lastprivate_conditional_map = NULL;
6799 	    }
6800 	}
6801     }
6802 
6803   if (nonconst_simd_if)
6804     {
6805       if (sctx.lane == NULL_TREE)
6806 	{
6807 	  sctx.idx = create_tmp_var (unsigned_type_node);
6808 	  sctx.lane = create_tmp_var (unsigned_type_node);
6809 	}
6810       /* FIXME: For now.  */
6811       sctx.is_simt = false;
6812     }
6813 
6814   if (sctx.lane || sctx.is_simt)
6815     {
6816       uid = create_tmp_var (ptr_type_node, "simduid");
6817       /* Don't want uninit warnings on simduid, it is always uninitialized,
6818 	 but we use it not for the value, but for the DECL_UID only.  */
6819       suppress_warning (uid, OPT_Wuninitialized);
6820       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SIMDUID_);
6821       OMP_CLAUSE__SIMDUID__DECL (c) = uid;
6822       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6823       gimple_omp_for_set_clauses (ctx->stmt, c);
6824     }
6825   /* Emit calls denoting privatized variables and initializing a pointer to
6826      structure that holds private variables as fields after ompdevlow pass.  */
6827   if (sctx.is_simt)
6828     {
6829       sctx.simt_eargs[0] = uid;
6830       gimple *g
6831 	= gimple_build_call_internal_vec (IFN_GOMP_SIMT_ENTER, sctx.simt_eargs);
6832       gimple_call_set_lhs (g, uid);
6833       gimple_seq_add_stmt (ilist, g);
6834       sctx.simt_eargs.release ();
6835 
6836       simtrec = create_tmp_var (ptr_type_node, ".omp_simt");
6837       g = gimple_build_call_internal (IFN_GOMP_SIMT_ENTER_ALLOC, 1, uid);
6838       gimple_call_set_lhs (g, simtrec);
6839       gimple_seq_add_stmt (ilist, g);
6840     }
6841   if (sctx.lane)
6842     {
6843       gimple *g = gimple_build_call_internal (IFN_GOMP_SIMD_LANE,
6844 					      2 + (nonconst_simd_if != NULL),
6845 					      uid, integer_zero_node,
6846 					      nonconst_simd_if);
6847       gimple_call_set_lhs (g, sctx.lane);
6848       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (ctx->stmt));
6849       gsi_insert_before_without_update (&gsi, g, GSI_SAME_STMT);
6850       g = gimple_build_assign (sctx.lane, INTEGER_CST,
6851 			       build_int_cst (unsigned_type_node, 0));
6852       gimple_seq_add_stmt (ilist, g);
6853       if (sctx.lastlane)
6854 	{
6855 	  g = gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
6856 					  2, uid, sctx.lane);
6857 	  gimple_call_set_lhs (g, sctx.lastlane);
6858 	  gimple_seq_add_stmt (dlist, g);
6859 	  gimple_seq_add_seq (dlist, llist[3]);
6860 	}
6861       /* Emit reductions across SIMT lanes in log_2(simt_vf) steps.  */
6862       if (llist[2])
6863 	{
6864 	  tree simt_vf = create_tmp_var (unsigned_type_node);
6865 	  g = gimple_build_call_internal (IFN_GOMP_SIMT_VF, 0);
6866 	  gimple_call_set_lhs (g, simt_vf);
6867 	  gimple_seq_add_stmt (dlist, g);
6868 
6869 	  tree t = build_int_cst (unsigned_type_node, 1);
6870 	  g = gimple_build_assign (simt_lane, INTEGER_CST, t);
6871 	  gimple_seq_add_stmt (dlist, g);
6872 
6873 	  t = build_int_cst (unsigned_type_node, 0);
6874 	  g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6875 	  gimple_seq_add_stmt (dlist, g);
6876 
6877 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
6878 	  tree header = create_artificial_label (UNKNOWN_LOCATION);
6879 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
6880 	  gimple_seq_add_stmt (dlist, gimple_build_goto (header));
6881 	  gimple_seq_add_stmt (dlist, gimple_build_label (body));
6882 
6883 	  gimple_seq_add_seq (dlist, llist[2]);
6884 
6885 	  g = gimple_build_assign (simt_lane, LSHIFT_EXPR, simt_lane, integer_one_node);
6886 	  gimple_seq_add_stmt (dlist, g);
6887 
6888 	  gimple_seq_add_stmt (dlist, gimple_build_label (header));
6889 	  g = gimple_build_cond (LT_EXPR, simt_lane, simt_vf, body, end);
6890 	  gimple_seq_add_stmt (dlist, g);
6891 
6892 	  gimple_seq_add_stmt (dlist, gimple_build_label (end));
6893 	}
6894       for (int i = 0; i < 2; i++)
6895 	if (llist[i])
6896 	  {
6897 	    tree vf = create_tmp_var (unsigned_type_node);
6898 	    g = gimple_build_call_internal (IFN_GOMP_SIMD_VF, 1, uid);
6899 	    gimple_call_set_lhs (g, vf);
6900 	    gimple_seq *seq = i == 0 ? ilist : dlist;
6901 	    gimple_seq_add_stmt (seq, g);
6902 	    tree t = build_int_cst (unsigned_type_node, 0);
6903 	    g = gimple_build_assign (sctx.idx, INTEGER_CST, t);
6904 	    gimple_seq_add_stmt (seq, g);
6905 	    tree body = create_artificial_label (UNKNOWN_LOCATION);
6906 	    tree header = create_artificial_label (UNKNOWN_LOCATION);
6907 	    tree end = create_artificial_label (UNKNOWN_LOCATION);
6908 	    gimple_seq_add_stmt (seq, gimple_build_goto (header));
6909 	    gimple_seq_add_stmt (seq, gimple_build_label (body));
6910 	    gimple_seq_add_seq (seq, llist[i]);
6911 	    t = build_int_cst (unsigned_type_node, 1);
6912 	    g = gimple_build_assign (sctx.idx, PLUS_EXPR, sctx.idx, t);
6913 	    gimple_seq_add_stmt (seq, g);
6914 	    gimple_seq_add_stmt (seq, gimple_build_label (header));
6915 	    g = gimple_build_cond (LT_EXPR, sctx.idx, vf, body, end);
6916 	    gimple_seq_add_stmt (seq, g);
6917 	    gimple_seq_add_stmt (seq, gimple_build_label (end));
6918 	  }
6919     }
6920   if (sctx.is_simt)
6921     {
6922       gimple_seq_add_seq (dlist, sctx.simt_dlist);
6923       gimple *g
6924 	= gimple_build_call_internal (IFN_GOMP_SIMT_EXIT, 1, simtrec);
6925       gimple_seq_add_stmt (dlist, g);
6926     }
6927 
6928   /* The copyin sequence is not to be executed by the main thread, since
6929      that would result in self-copies.  Perhaps not visible to scalars,
6930      but it certainly is to C++ operator=.  */
6931   if (copyin_seq)
6932     {
6933       x = build_call_expr (builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM),
6934 			   0);
6935       x = build2 (NE_EXPR, boolean_type_node, x,
6936 		  build_int_cst (TREE_TYPE (x), 0));
6937       x = build3 (COND_EXPR, void_type_node, x, copyin_seq, NULL);
6938       gimplify_and_add (x, ilist);
6939     }
6940 
6941   /* If any copyin variable is passed by reference, we must ensure the
6942      master thread doesn't modify it before it is copied over in all
6943      threads.  Similarly for variables in both firstprivate and
6944      lastprivate clauses we need to ensure the lastprivate copying
6945      happens after firstprivate copying in all threads.  And similarly
6946      for UDRs if initializer expression refers to omp_orig.  */
6947   if (copyin_by_ref || lastprivate_firstprivate
6948       || (reduction_omp_orig_ref
6949 	  && !ctx->scan_inclusive
6950 	  && !ctx->scan_exclusive))
6951     {
6952       /* Don't add any barrier for #pragma omp simd or
6953 	 #pragma omp distribute.  */
6954       if (!is_task_ctx (ctx)
6955 	  && (gimple_code (ctx->stmt) != GIMPLE_OMP_FOR
6956 	      || gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_FOR))
6957 	gimple_seq_add_stmt (ilist, omp_build_barrier (NULL_TREE));
6958     }
6959 
6960   /* If max_vf is non-zero, then we can use only a vectorization factor
6961      up to the max_vf we chose.  So stick it into the safelen clause.  */
6962   if (maybe_ne (sctx.max_vf, 0U))
6963     {
6964       tree c = omp_find_clause (gimple_omp_for_clauses (ctx->stmt),
6965 				OMP_CLAUSE_SAFELEN);
6966       poly_uint64 safe_len;
6967       if (c == NULL_TREE
6968 	  || (poly_int_tree_p (OMP_CLAUSE_SAFELEN_EXPR (c), &safe_len)
6969 	      && maybe_gt (safe_len, sctx.max_vf)))
6970 	{
6971 	  c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
6972 	  OMP_CLAUSE_SAFELEN_EXPR (c) = build_int_cst (integer_type_node,
6973 						       sctx.max_vf);
6974 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (ctx->stmt);
6975 	  gimple_omp_for_set_clauses (ctx->stmt, c);
6976 	}
6977     }
6978 }
6979 
6980 /* Create temporary variables for lastprivate(conditional:) implementation
6981    in context CTX with CLAUSES.  */
6982 
6983 static void
lower_lastprivate_conditional_clauses(tree * clauses,omp_context * ctx)6984 lower_lastprivate_conditional_clauses (tree *clauses, omp_context *ctx)
6985 {
6986   tree iter_type = NULL_TREE;
6987   tree cond_ptr = NULL_TREE;
6988   tree iter_var = NULL_TREE;
6989   bool is_simd = (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
6990 		  && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD);
6991   tree next = *clauses;
6992   for (tree c = *clauses; c; c = OMP_CLAUSE_CHAIN (c))
6993     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6994 	&& OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
6995       {
6996 	if (is_simd)
6997 	  {
6998 	    tree cc = omp_find_clause (next, OMP_CLAUSE__CONDTEMP_);
6999 	    gcc_assert (cc);
7000 	    if (iter_type == NULL_TREE)
7001 	      {
7002 		iter_type = TREE_TYPE (OMP_CLAUSE_DECL (cc));
7003 		iter_var = create_tmp_var_raw (iter_type);
7004 		DECL_CONTEXT (iter_var) = current_function_decl;
7005 		DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7006 		DECL_CHAIN (iter_var) = ctx->block_vars;
7007 		ctx->block_vars = iter_var;
7008 		tree c3
7009 		  = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7010 		OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7011 		OMP_CLAUSE_DECL (c3) = iter_var;
7012 		OMP_CLAUSE_CHAIN (c3) = *clauses;
7013 		*clauses = c3;
7014 		ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7015 	      }
7016 	    next = OMP_CLAUSE_CHAIN (cc);
7017 	    tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7018 	    tree v = lookup_decl (OMP_CLAUSE_DECL (cc), ctx);
7019 	    ctx->lastprivate_conditional_map->put (o, v);
7020 	    continue;
7021 	  }
7022 	if (iter_type == NULL)
7023 	  {
7024 	    if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR)
7025 	      {
7026 		struct omp_for_data fd;
7027 		omp_extract_for_data (as_a <gomp_for *> (ctx->stmt), &fd,
7028 				      NULL);
7029 		iter_type = unsigned_type_for (fd.iter_type);
7030 	      }
7031 	    else if (gimple_code (ctx->stmt) == GIMPLE_OMP_SECTIONS)
7032 	      iter_type = unsigned_type_node;
7033 	    tree c2 = omp_find_clause (*clauses, OMP_CLAUSE__CONDTEMP_);
7034 	    if (c2)
7035 	      {
7036 		cond_ptr
7037 		  = lookup_decl_in_outer_ctx (OMP_CLAUSE_DECL (c2), ctx);
7038 		OMP_CLAUSE_DECL (c2) = cond_ptr;
7039 	      }
7040 	    else
7041 	      {
7042 		cond_ptr = create_tmp_var_raw (build_pointer_type (iter_type));
7043 		DECL_CONTEXT (cond_ptr) = current_function_decl;
7044 		DECL_SEEN_IN_BIND_EXPR_P (cond_ptr) = 1;
7045 		DECL_CHAIN (cond_ptr) = ctx->block_vars;
7046 		ctx->block_vars = cond_ptr;
7047 		c2 = build_omp_clause (UNKNOWN_LOCATION,
7048 				       OMP_CLAUSE__CONDTEMP_);
7049 		OMP_CLAUSE_DECL (c2) = cond_ptr;
7050 		OMP_CLAUSE_CHAIN (c2) = *clauses;
7051 		*clauses = c2;
7052 	      }
7053 	    iter_var = create_tmp_var_raw (iter_type);
7054 	    DECL_CONTEXT (iter_var) = current_function_decl;
7055 	    DECL_SEEN_IN_BIND_EXPR_P (iter_var) = 1;
7056 	    DECL_CHAIN (iter_var) = ctx->block_vars;
7057 	    ctx->block_vars = iter_var;
7058 	    tree c3
7059 	      = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
7060 	    OMP_CLAUSE__CONDTEMP__ITER (c3) = 1;
7061 	    OMP_CLAUSE_DECL (c3) = iter_var;
7062 	    OMP_CLAUSE_CHAIN (c3) = OMP_CLAUSE_CHAIN (c2);
7063 	    OMP_CLAUSE_CHAIN (c2) = c3;
7064 	    ctx->lastprivate_conditional_map = new hash_map<tree, tree>;
7065 	  }
7066 	tree v = create_tmp_var_raw (iter_type);
7067 	DECL_CONTEXT (v) = current_function_decl;
7068 	DECL_SEEN_IN_BIND_EXPR_P (v) = 1;
7069 	DECL_CHAIN (v) = ctx->block_vars;
7070 	ctx->block_vars = v;
7071 	tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7072 	ctx->lastprivate_conditional_map->put (o, v);
7073       }
7074 }
7075 
7076 
7077 /* Generate code to implement the LASTPRIVATE clauses.  This is used for
7078    both parallel and workshare constructs.  PREDICATE may be NULL if it's
7079    always true.  BODY_P is the sequence to insert early initialization
7080    if needed, STMT_LIST is where the non-conditional lastprivate handling
7081    goes into and CSTMT_LIST is a sequence that needs to be run in a critical
7082    section.  */
7083 
7084 static void
lower_lastprivate_clauses(tree clauses,tree predicate,gimple_seq * body_p,gimple_seq * stmt_list,gimple_seq * cstmt_list,omp_context * ctx)7085 lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *body_p,
7086 			   gimple_seq *stmt_list, gimple_seq *cstmt_list,
7087 			   omp_context *ctx)
7088 {
7089   tree x, c, label = NULL, orig_clauses = clauses;
7090   bool par_clauses = false;
7091   tree simduid = NULL, lastlane = NULL, simtcond = NULL, simtlast = NULL;
7092   unsigned HOST_WIDE_INT conditional_off = 0;
7093   gimple_seq post_stmt_list = NULL;
7094 
7095   /* Early exit if there are no lastprivate or linear clauses.  */
7096   for (; clauses ; clauses = OMP_CLAUSE_CHAIN (clauses))
7097     if (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LASTPRIVATE
7098 	|| (OMP_CLAUSE_CODE (clauses) == OMP_CLAUSE_LINEAR
7099 	    && !OMP_CLAUSE_LINEAR_NO_COPYOUT (clauses)))
7100       break;
7101   if (clauses == NULL)
7102     {
7103       /* If this was a workshare clause, see if it had been combined
7104 	 with its parallel.  In that case, look for the clauses on the
7105 	 parallel statement itself.  */
7106       if (is_parallel_ctx (ctx))
7107 	return;
7108 
7109       ctx = ctx->outer;
7110       if (ctx == NULL || !is_parallel_ctx (ctx))
7111 	return;
7112 
7113       clauses = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7114 				 OMP_CLAUSE_LASTPRIVATE);
7115       if (clauses == NULL)
7116 	return;
7117       par_clauses = true;
7118     }
7119 
7120   bool maybe_simt = false;
7121   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7122       && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7123     {
7124       maybe_simt = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMT_);
7125       simduid = omp_find_clause (orig_clauses, OMP_CLAUSE__SIMDUID_);
7126       if (simduid)
7127 	simduid = OMP_CLAUSE__SIMDUID__DECL (simduid);
7128     }
7129 
7130   if (predicate)
7131     {
7132       gcond *stmt;
7133       tree label_true, arm1, arm2;
7134       enum tree_code pred_code = TREE_CODE (predicate);
7135 
7136       label = create_artificial_label (UNKNOWN_LOCATION);
7137       label_true = create_artificial_label (UNKNOWN_LOCATION);
7138       if (TREE_CODE_CLASS (pred_code) == tcc_comparison)
7139 	{
7140 	  arm1 = TREE_OPERAND (predicate, 0);
7141 	  arm2 = TREE_OPERAND (predicate, 1);
7142 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7143 	  gimplify_expr (&arm2, stmt_list, NULL, is_gimple_val, fb_rvalue);
7144 	}
7145       else
7146 	{
7147 	  arm1 = predicate;
7148 	  gimplify_expr (&arm1, stmt_list, NULL, is_gimple_val, fb_rvalue);
7149 	  arm2 = boolean_false_node;
7150 	  pred_code = NE_EXPR;
7151 	}
7152       if (maybe_simt)
7153 	{
7154 	  c = build2 (pred_code, boolean_type_node, arm1, arm2);
7155 	  c = fold_convert (integer_type_node, c);
7156 	  simtcond = create_tmp_var (integer_type_node);
7157 	  gimplify_assign (simtcond, c, stmt_list);
7158 	  gcall *g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY,
7159 						 1, simtcond);
7160 	  c = create_tmp_var (integer_type_node);
7161 	  gimple_call_set_lhs (g, c);
7162 	  gimple_seq_add_stmt (stmt_list, g);
7163 	  stmt = gimple_build_cond (NE_EXPR, c, integer_zero_node,
7164 				    label_true, label);
7165 	}
7166       else
7167 	stmt = gimple_build_cond (pred_code, arm1, arm2, label_true, label);
7168       gimple_seq_add_stmt (stmt_list, stmt);
7169       gimple_seq_add_stmt (stmt_list, gimple_build_label (label_true));
7170     }
7171 
7172   tree cond_ptr = NULL_TREE;
7173   for (c = clauses; c ;)
7174     {
7175       tree var, new_var;
7176       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7177       gimple_seq *this_stmt_list = stmt_list;
7178       tree lab2 = NULL_TREE;
7179 
7180       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7181 	  && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7182 	  && ctx->lastprivate_conditional_map
7183 	  && !ctx->combined_into_simd_safelen1)
7184 	{
7185 	  gcc_assert (body_p);
7186 	  if (simduid)
7187 	    goto next;
7188 	  if (cond_ptr == NULL_TREE)
7189 	    {
7190 	      cond_ptr = omp_find_clause (orig_clauses, OMP_CLAUSE__CONDTEMP_);
7191 	      cond_ptr = OMP_CLAUSE_DECL (cond_ptr);
7192 	    }
7193 	  tree type = TREE_TYPE (TREE_TYPE (cond_ptr));
7194 	  tree o = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
7195 	  tree v = *ctx->lastprivate_conditional_map->get (o);
7196 	  gimplify_assign (v, build_zero_cst (type), body_p);
7197 	  this_stmt_list = cstmt_list;
7198 	  tree mem;
7199 	  if (POINTER_TYPE_P (TREE_TYPE (cond_ptr)))
7200 	    {
7201 	      mem = build2 (MEM_REF, type, cond_ptr,
7202 			    build_int_cst (TREE_TYPE (cond_ptr),
7203 					   conditional_off));
7204 	      conditional_off += tree_to_uhwi (TYPE_SIZE_UNIT (type));
7205 	    }
7206 	  else
7207 	    mem = build4 (ARRAY_REF, type, cond_ptr,
7208 			  size_int (conditional_off++), NULL_TREE, NULL_TREE);
7209 	  tree mem2 = copy_node (mem);
7210 	  gimple_seq seq = NULL;
7211 	  mem = force_gimple_operand (mem, &seq, true, NULL_TREE);
7212 	  gimple_seq_add_seq (this_stmt_list, seq);
7213 	  tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
7214 	  lab2 = create_artificial_label (UNKNOWN_LOCATION);
7215 	  gimple *g = gimple_build_cond (GT_EXPR, v, mem, lab1, lab2);
7216 	  gimple_seq_add_stmt (this_stmt_list, g);
7217 	  gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab1));
7218 	  gimplify_assign (mem2, v, this_stmt_list);
7219 	}
7220       else if (predicate
7221 	       && ctx->combined_into_simd_safelen1
7222 	       && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7223 	       && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
7224 	       && ctx->lastprivate_conditional_map)
7225 	this_stmt_list = &post_stmt_list;
7226 
7227       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7228 	  || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7229 	      && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7230 	{
7231 	  var = OMP_CLAUSE_DECL (c);
7232 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7233 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
7234 	      && is_taskloop_ctx (ctx))
7235 	    {
7236 	      gcc_checking_assert (ctx->outer && is_task_ctx (ctx->outer));
7237 	      new_var = lookup_decl (var, ctx->outer);
7238 	    }
7239 	  else
7240 	    {
7241 	      new_var = lookup_decl (var, ctx);
7242 	      /* Avoid uninitialized warnings for lastprivate and
7243 		 for linear iterators.  */
7244 	      if (predicate
7245 		  && (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7246 		      || OMP_CLAUSE_LINEAR_NO_COPYIN (c)))
7247 		suppress_warning (new_var, OPT_Wuninitialized);
7248 	    }
7249 
7250 	  if (!maybe_simt && simduid && DECL_HAS_VALUE_EXPR_P (new_var))
7251 	    {
7252 	      tree val = DECL_VALUE_EXPR (new_var);
7253 	      if (TREE_CODE (val) == ARRAY_REF
7254 		  && VAR_P (TREE_OPERAND (val, 0))
7255 		  && lookup_attribute ("omp simd array",
7256 				       DECL_ATTRIBUTES (TREE_OPERAND (val,
7257 								      0))))
7258 		{
7259 		  if (lastlane == NULL)
7260 		    {
7261 		      lastlane = create_tmp_var (unsigned_type_node);
7262 		      gcall *g
7263 			= gimple_build_call_internal (IFN_GOMP_SIMD_LAST_LANE,
7264 						      2, simduid,
7265 						      TREE_OPERAND (val, 1));
7266 		      gimple_call_set_lhs (g, lastlane);
7267 		      gimple_seq_add_stmt (this_stmt_list, g);
7268 		    }
7269 		  new_var = build4 (ARRAY_REF, TREE_TYPE (val),
7270 				    TREE_OPERAND (val, 0), lastlane,
7271 				    NULL_TREE, NULL_TREE);
7272 		  TREE_THIS_NOTRAP (new_var) = 1;
7273 		}
7274 	    }
7275 	  else if (maybe_simt)
7276 	    {
7277 	      tree val = (DECL_HAS_VALUE_EXPR_P (new_var)
7278 			  ? DECL_VALUE_EXPR (new_var)
7279 			  : new_var);
7280 	      if (simtlast == NULL)
7281 		{
7282 		  simtlast = create_tmp_var (unsigned_type_node);
7283 		  gcall *g = gimple_build_call_internal
7284 		    (IFN_GOMP_SIMT_LAST_LANE, 1, simtcond);
7285 		  gimple_call_set_lhs (g, simtlast);
7286 		  gimple_seq_add_stmt (this_stmt_list, g);
7287 		}
7288 	      x = build_call_expr_internal_loc
7289 		(UNKNOWN_LOCATION, IFN_GOMP_SIMT_XCHG_IDX,
7290 		 TREE_TYPE (val), 2, val, simtlast);
7291 	      new_var = unshare_expr (new_var);
7292 	      gimplify_assign (new_var, x, this_stmt_list);
7293 	      new_var = unshare_expr (new_var);
7294 	    }
7295 
7296 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7297 	      && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c))
7298 	    {
7299 	      lower_omp (&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c), ctx);
7300 	      gimple_seq_add_seq (this_stmt_list,
7301 				  OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
7302 	      OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) = NULL;
7303 	    }
7304 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7305 		   && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c))
7306 	    {
7307 	      lower_omp (&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c), ctx);
7308 	      gimple_seq_add_seq (this_stmt_list,
7309 				  OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
7310 	      OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) = NULL;
7311 	    }
7312 
7313 	  x = NULL_TREE;
7314 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7315 	      && OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c)
7316 	      && is_taskloop_ctx (ctx))
7317 	    {
7318 	      tree ovar = maybe_lookup_decl_in_outer_ctx (var,
7319 							  ctx->outer->outer);
7320 	      if (is_global_var (ovar))
7321 		x = ovar;
7322 	    }
7323 	  if (!x)
7324 	    x = build_outer_var_ref (var, ctx, OMP_CLAUSE_LASTPRIVATE);
7325 	  if (omp_privatize_by_reference (var))
7326 	    new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7327 	  x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
7328 	  gimplify_and_add (x, this_stmt_list);
7329 
7330 	  if (lab2)
7331 	    gimple_seq_add_stmt (this_stmt_list, gimple_build_label (lab2));
7332 	}
7333 
7334      next:
7335       c = OMP_CLAUSE_CHAIN (c);
7336       if (c == NULL && !par_clauses)
7337 	{
7338 	  /* If this was a workshare clause, see if it had been combined
7339 	     with its parallel.  In that case, continue looking for the
7340 	     clauses also on the parallel statement itself.  */
7341 	  if (is_parallel_ctx (ctx))
7342 	    break;
7343 
7344 	  ctx = ctx->outer;
7345 	  if (ctx == NULL || !is_parallel_ctx (ctx))
7346 	    break;
7347 
7348 	  c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
7349 			       OMP_CLAUSE_LASTPRIVATE);
7350 	  par_clauses = true;
7351 	}
7352     }
7353 
7354   if (label)
7355     gimple_seq_add_stmt (stmt_list, gimple_build_label (label));
7356   gimple_seq_add_seq (stmt_list, post_stmt_list);
7357 }
7358 
7359 /* Lower the OpenACC reductions of CLAUSES for compute axis LEVEL
7360    (which might be a placeholder).  INNER is true if this is an inner
7361    axis of a multi-axis loop.  FORK and JOIN are (optional) fork and
7362    join markers.  Generate the before-loop forking sequence in
7363    FORK_SEQ and the after-loop joining sequence to JOIN_SEQ.  The
7364    general form of these sequences is
7365 
7366      GOACC_REDUCTION_SETUP
7367      GOACC_FORK
7368      GOACC_REDUCTION_INIT
7369      ...
7370      GOACC_REDUCTION_FINI
7371      GOACC_JOIN
7372      GOACC_REDUCTION_TEARDOWN.  */
7373 
7374 static void
lower_oacc_reductions(location_t loc,tree clauses,tree level,bool inner,gcall * fork,gcall * private_marker,gcall * join,gimple_seq * fork_seq,gimple_seq * join_seq,omp_context * ctx)7375 lower_oacc_reductions (location_t loc, tree clauses, tree level, bool inner,
7376 		       gcall *fork, gcall *private_marker, gcall *join,
7377 		       gimple_seq *fork_seq, gimple_seq *join_seq,
7378 		       omp_context *ctx)
7379 {
7380   gimple_seq before_fork = NULL;
7381   gimple_seq after_fork = NULL;
7382   gimple_seq before_join = NULL;
7383   gimple_seq after_join = NULL;
7384   tree init_code = NULL_TREE, fini_code = NULL_TREE,
7385     setup_code = NULL_TREE, teardown_code = NULL_TREE;
7386   unsigned offset = 0;
7387 
7388   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
7389     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION)
7390       {
7391 	/* No 'reduction' clauses on OpenACC 'kernels'.  */
7392 	gcc_checking_assert (!is_oacc_kernels (ctx));
7393 	/* Likewise, on OpenACC 'kernels' decomposed parts.  */
7394 	gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
7395 
7396 	tree orig = OMP_CLAUSE_DECL (c);
7397 	tree var = maybe_lookup_decl (orig, ctx);
7398 	tree ref_to_res = NULL_TREE;
7399 	tree incoming, outgoing, v1, v2, v3;
7400 	bool is_private = false;
7401 
7402 	enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
7403 	if (rcode == MINUS_EXPR)
7404 	  rcode = PLUS_EXPR;
7405 	else if (rcode == TRUTH_ANDIF_EXPR)
7406 	  rcode = BIT_AND_EXPR;
7407 	else if (rcode == TRUTH_ORIF_EXPR)
7408 	  rcode = BIT_IOR_EXPR;
7409 	tree op = build_int_cst (unsigned_type_node, rcode);
7410 
7411 	if (!var)
7412 	  var = orig;
7413 
7414 	incoming = outgoing = var;
7415 
7416 	if (!inner)
7417 	  {
7418 	    /* See if an outer construct also reduces this variable.  */
7419 	    omp_context *outer = ctx;
7420 
7421 	    while (omp_context *probe = outer->outer)
7422 	      {
7423 		enum gimple_code type = gimple_code (probe->stmt);
7424 		tree cls;
7425 
7426 		switch (type)
7427 		  {
7428 		  case GIMPLE_OMP_FOR:
7429 		    cls = gimple_omp_for_clauses (probe->stmt);
7430 		    break;
7431 
7432 		  case GIMPLE_OMP_TARGET:
7433 		    /* No 'reduction' clauses inside OpenACC 'kernels'
7434 		       regions.  */
7435 		    gcc_checking_assert (!is_oacc_kernels (probe));
7436 
7437 		    if (!is_gimple_omp_offloaded (probe->stmt))
7438 		      goto do_lookup;
7439 
7440 		    cls = gimple_omp_target_clauses (probe->stmt);
7441 		    break;
7442 
7443 		  default:
7444 		    goto do_lookup;
7445 		  }
7446 
7447 		outer = probe;
7448 		for (; cls;  cls = OMP_CLAUSE_CHAIN (cls))
7449 		  if (OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_REDUCTION
7450 		      && orig == OMP_CLAUSE_DECL (cls))
7451 		    {
7452 		      incoming = outgoing = lookup_decl (orig, probe);
7453 		      goto has_outer_reduction;
7454 		    }
7455 		  else if ((OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_FIRSTPRIVATE
7456 			    || OMP_CLAUSE_CODE (cls) == OMP_CLAUSE_PRIVATE)
7457 			   && orig == OMP_CLAUSE_DECL (cls))
7458 		    {
7459 		      is_private = true;
7460 		      goto do_lookup;
7461 		    }
7462 	      }
7463 
7464 	  do_lookup:
7465 	    /* This is the outermost construct with this reduction,
7466 	       see if there's a mapping for it.  */
7467 	    if (gimple_code (outer->stmt) == GIMPLE_OMP_TARGET
7468 		&& maybe_lookup_field (orig, outer) && !is_private)
7469 	      {
7470 		ref_to_res = build_receiver_ref (orig, false, outer);
7471 		if (omp_privatize_by_reference (orig))
7472 		  ref_to_res = build_simple_mem_ref (ref_to_res);
7473 
7474 		tree type = TREE_TYPE (var);
7475 		if (POINTER_TYPE_P (type))
7476 		  type = TREE_TYPE (type);
7477 
7478 		outgoing = var;
7479 		incoming = omp_reduction_init_op (loc, rcode, type);
7480 	      }
7481 	    else
7482 	      {
7483 		/* Try to look at enclosing contexts for reduction var,
7484 		   use original if no mapping found.  */
7485 		tree t = NULL_TREE;
7486 		omp_context *c = ctx->outer;
7487 		while (c && !t)
7488 		  {
7489 		    t = maybe_lookup_decl (orig, c);
7490 		    c = c->outer;
7491 		  }
7492 		incoming = outgoing = (t ? t : orig);
7493 	      }
7494 
7495 	  has_outer_reduction:;
7496 	  }
7497 
7498 	if (!ref_to_res)
7499 	  ref_to_res = integer_zero_node;
7500 
7501 	if (omp_privatize_by_reference (orig))
7502 	  {
7503 	    tree type = TREE_TYPE (var);
7504 	    const char *id = IDENTIFIER_POINTER (DECL_NAME (var));
7505 
7506 	    if (!inner)
7507 	      {
7508 		tree x = create_tmp_var (TREE_TYPE (type), id);
7509 		gimplify_assign (var, build_fold_addr_expr (x), fork_seq);
7510 	      }
7511 
7512 	    v1 = create_tmp_var (type, id);
7513 	    v2 = create_tmp_var (type, id);
7514 	    v3 = create_tmp_var (type, id);
7515 
7516 	    gimplify_assign (v1, var, fork_seq);
7517 	    gimplify_assign (v2, var, fork_seq);
7518 	    gimplify_assign (v3, var, fork_seq);
7519 
7520 	    var = build_simple_mem_ref (var);
7521 	    v1 = build_simple_mem_ref (v1);
7522 	    v2 = build_simple_mem_ref (v2);
7523 	    v3 = build_simple_mem_ref (v3);
7524 	    outgoing = build_simple_mem_ref (outgoing);
7525 
7526 	    if (!TREE_CONSTANT (incoming))
7527 	      incoming = build_simple_mem_ref (incoming);
7528 	  }
7529 	else
7530 	  v1 = v2 = v3 = var;
7531 
7532 	/* Determine position in reduction buffer, which may be used
7533 	   by target.  The parser has ensured that this is not a
7534 	   variable-sized type.  */
7535 	fixed_size_mode mode
7536 	  = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (var)));
7537 	unsigned align = GET_MODE_ALIGNMENT (mode) /  BITS_PER_UNIT;
7538 	offset = (offset + align - 1) & ~(align - 1);
7539 	tree off = build_int_cst (sizetype, offset);
7540 	offset += GET_MODE_SIZE (mode);
7541 
7542 	if (!init_code)
7543 	  {
7544 	    init_code = build_int_cst (integer_type_node,
7545 				       IFN_GOACC_REDUCTION_INIT);
7546 	    fini_code = build_int_cst (integer_type_node,
7547 				       IFN_GOACC_REDUCTION_FINI);
7548 	    setup_code = build_int_cst (integer_type_node,
7549 					IFN_GOACC_REDUCTION_SETUP);
7550 	    teardown_code = build_int_cst (integer_type_node,
7551 					   IFN_GOACC_REDUCTION_TEARDOWN);
7552 	  }
7553 
7554 	tree setup_call
7555 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7556 					  TREE_TYPE (var), 6, setup_code,
7557 					  unshare_expr (ref_to_res),
7558 					  incoming, level, op, off);
7559 	tree init_call
7560 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7561 					  TREE_TYPE (var), 6, init_code,
7562 					  unshare_expr (ref_to_res),
7563 					  v1, level, op, off);
7564 	tree fini_call
7565 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7566 					  TREE_TYPE (var), 6, fini_code,
7567 					  unshare_expr (ref_to_res),
7568 					  v2, level, op, off);
7569 	tree teardown_call
7570 	  = build_call_expr_internal_loc (loc, IFN_GOACC_REDUCTION,
7571 					  TREE_TYPE (var), 6, teardown_code,
7572 					  ref_to_res, v3, level, op, off);
7573 
7574 	gimplify_assign (v1, setup_call, &before_fork);
7575 	gimplify_assign (v2, init_call, &after_fork);
7576 	gimplify_assign (v3, fini_call, &before_join);
7577 	gimplify_assign (outgoing, teardown_call, &after_join);
7578       }
7579 
7580   /* Now stitch things together.  */
7581   gimple_seq_add_seq (fork_seq, before_fork);
7582   if (private_marker)
7583     gimple_seq_add_stmt (fork_seq, private_marker);
7584   if (fork)
7585     gimple_seq_add_stmt (fork_seq, fork);
7586   gimple_seq_add_seq (fork_seq, after_fork);
7587 
7588   gimple_seq_add_seq (join_seq, before_join);
7589   if (join)
7590     gimple_seq_add_stmt (join_seq, join);
7591   gimple_seq_add_seq (join_seq, after_join);
7592 }
7593 
7594 /* Generate code to implement the REDUCTION clauses, append it
7595    to STMT_SEQP.  CLIST if non-NULL is a pointer to a sequence
7596    that should be emitted also inside of the critical section,
7597    in that case clear *CLIST afterwards, otherwise leave it as is
7598    and let the caller emit it itself.  */
7599 
7600 static void
lower_reduction_clauses(tree clauses,gimple_seq * stmt_seqp,gimple_seq * clist,omp_context * ctx)7601 lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp,
7602 			 gimple_seq *clist, omp_context *ctx)
7603 {
7604   gimple_seq sub_seq = NULL;
7605   gimple *stmt;
7606   tree x, c;
7607   int count = 0;
7608 
7609   /* OpenACC loop reductions are handled elsewhere.  */
7610   if (is_gimple_omp_oacc (ctx->stmt))
7611     return;
7612 
7613   /* SIMD reductions are handled in lower_rec_input_clauses.  */
7614   if (gimple_code (ctx->stmt) == GIMPLE_OMP_FOR
7615       && gimple_omp_for_kind (ctx->stmt) == GF_OMP_FOR_KIND_SIMD)
7616     return;
7617 
7618   /* inscan reductions are handled elsewhere.  */
7619   if (ctx->scan_inclusive || ctx->scan_exclusive)
7620     return;
7621 
7622   /* First see if there is exactly one reduction clause.  Use OMP_ATOMIC
7623      update in that case, otherwise use a lock.  */
7624   for (c = clauses; c && count < 2; c = OMP_CLAUSE_CHAIN (c))
7625     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
7626 	&& !OMP_CLAUSE_REDUCTION_TASK (c))
7627       {
7628 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c)
7629 	    || TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7630 	  {
7631 	    /* Never use OMP_ATOMIC for array reductions or UDRs.  */
7632 	    count = -1;
7633 	    break;
7634 	  }
7635 	count++;
7636       }
7637 
7638   if (count == 0)
7639     return;
7640 
7641   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7642     {
7643       tree var, ref, new_var, orig_var;
7644       enum tree_code code;
7645       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7646 
7647       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
7648 	  || OMP_CLAUSE_REDUCTION_TASK (c))
7649 	continue;
7650 
7651       enum omp_clause_code ccode = OMP_CLAUSE_REDUCTION;
7652       orig_var = var = OMP_CLAUSE_DECL (c);
7653       if (TREE_CODE (var) == MEM_REF)
7654 	{
7655 	  var = TREE_OPERAND (var, 0);
7656 	  if (TREE_CODE (var) == POINTER_PLUS_EXPR)
7657 	    var = TREE_OPERAND (var, 0);
7658 	  if (TREE_CODE (var) == ADDR_EXPR)
7659 	    var = TREE_OPERAND (var, 0);
7660 	  else
7661 	    {
7662 	      /* If this is a pointer or referenced based array
7663 		 section, the var could be private in the outer
7664 		 context e.g. on orphaned loop construct.  Pretend this
7665 		 is private variable's outer reference.  */
7666 	      ccode = OMP_CLAUSE_PRIVATE;
7667 	      if (TREE_CODE (var) == INDIRECT_REF)
7668 		var = TREE_OPERAND (var, 0);
7669 	    }
7670 	  orig_var = var;
7671 	  if (is_variable_sized (var))
7672 	    {
7673 	      gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
7674 	      var = DECL_VALUE_EXPR (var);
7675 	      gcc_assert (TREE_CODE (var) == INDIRECT_REF);
7676 	      var = TREE_OPERAND (var, 0);
7677 	      gcc_assert (DECL_P (var));
7678 	    }
7679 	}
7680       new_var = lookup_decl (var, ctx);
7681       if (var == OMP_CLAUSE_DECL (c)
7682 	  && omp_privatize_by_reference (var))
7683 	new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7684       ref = build_outer_var_ref (var, ctx, ccode);
7685       code = OMP_CLAUSE_REDUCTION_CODE (c);
7686 
7687       /* reduction(-:var) sums up the partial results, so it acts
7688 	 identically to reduction(+:var).  */
7689       if (code == MINUS_EXPR)
7690         code = PLUS_EXPR;
7691 
7692       bool is_truth_op = (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR);
7693       if (count == 1)
7694 	{
7695 	  tree addr = build_fold_addr_expr_loc (clause_loc, ref);
7696 
7697 	  addr = save_expr (addr);
7698 	  ref = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (addr)), addr);
7699 	  tree new_var2 = new_var;
7700 	  tree ref2 = ref;
7701 	  if (is_truth_op)
7702 	    {
7703 	      tree zero = build_zero_cst (TREE_TYPE (new_var));
7704 	      new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7705 					  boolean_type_node, new_var, zero);
7706 	      ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7707 				      ref, zero);
7708 	    }
7709 	  x = fold_build2_loc (clause_loc, code, TREE_TYPE (new_var2), ref2,
7710 			       new_var2);
7711 	  if (is_truth_op)
7712 	    x = fold_convert (TREE_TYPE (new_var), x);
7713 	  x = build2 (OMP_ATOMIC, void_type_node, addr, x);
7714 	  OMP_ATOMIC_MEMORY_ORDER (x) = OMP_MEMORY_ORDER_RELAXED;
7715 	  gimplify_and_add (x, stmt_seqp);
7716 	  return;
7717 	}
7718       else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF)
7719 	{
7720 	  tree d = OMP_CLAUSE_DECL (c);
7721 	  tree type = TREE_TYPE (d);
7722 	  tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7723 	  tree i = create_tmp_var (TREE_TYPE (v));
7724 	  tree ptype = build_pointer_type (TREE_TYPE (type));
7725 	  tree bias = TREE_OPERAND (d, 1);
7726 	  d = TREE_OPERAND (d, 0);
7727 	  if (TREE_CODE (d) == POINTER_PLUS_EXPR)
7728 	    {
7729 	      tree b = TREE_OPERAND (d, 1);
7730 	      b = maybe_lookup_decl (b, ctx);
7731 	      if (b == NULL)
7732 		{
7733 		  b = TREE_OPERAND (d, 1);
7734 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
7735 		}
7736 	      if (integer_zerop (bias))
7737 		bias = b;
7738 	      else
7739 		{
7740 		  bias = fold_convert_loc (clause_loc, TREE_TYPE (b), bias);
7741 		  bias = fold_build2_loc (clause_loc, PLUS_EXPR,
7742 					  TREE_TYPE (b), b, bias);
7743 		}
7744 	      d = TREE_OPERAND (d, 0);
7745 	    }
7746 	  /* For ref build_outer_var_ref already performs this, so
7747 	     only new_var needs a dereference.  */
7748 	  if (TREE_CODE (d) == INDIRECT_REF)
7749 	    {
7750 	      new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7751 	      gcc_assert (omp_privatize_by_reference (var)
7752 			  && var == orig_var);
7753 	    }
7754 	  else if (TREE_CODE (d) == ADDR_EXPR)
7755 	    {
7756 	      if (orig_var == var)
7757 		{
7758 		  new_var = build_fold_addr_expr (new_var);
7759 		  ref = build_fold_addr_expr (ref);
7760 		}
7761 	    }
7762 	  else
7763 	    {
7764 	      gcc_assert (orig_var == var);
7765 	      if (omp_privatize_by_reference (var))
7766 		ref = build_fold_addr_expr (ref);
7767 	    }
7768 	  if (DECL_P (v))
7769 	    {
7770 	      tree t = maybe_lookup_decl (v, ctx);
7771 	      if (t)
7772 		v = t;
7773 	      else
7774 		v = maybe_lookup_decl_in_outer_ctx (v, ctx);
7775 	      gimplify_expr (&v, stmt_seqp, NULL, is_gimple_val, fb_rvalue);
7776 	    }
7777 	  if (!integer_zerop (bias))
7778 	    {
7779 	      bias = fold_convert_loc (clause_loc, sizetype, bias);
7780 	      new_var = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7781 					 TREE_TYPE (new_var), new_var,
7782 					 unshare_expr (bias));
7783 	      ref = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
7784 					 TREE_TYPE (ref), ref, bias);
7785 	    }
7786 	  new_var = fold_convert_loc (clause_loc, ptype, new_var);
7787 	  ref = fold_convert_loc (clause_loc, ptype, ref);
7788 	  tree m = create_tmp_var (ptype);
7789 	  gimplify_assign (m, new_var, stmt_seqp);
7790 	  new_var = m;
7791 	  m = create_tmp_var (ptype);
7792 	  gimplify_assign (m, ref, stmt_seqp);
7793 	  ref = m;
7794 	  gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), stmt_seqp);
7795 	  tree body = create_artificial_label (UNKNOWN_LOCATION);
7796 	  tree end = create_artificial_label (UNKNOWN_LOCATION);
7797 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (body));
7798 	  tree priv = build_simple_mem_ref_loc (clause_loc, new_var);
7799 	  tree out = build_simple_mem_ref_loc (clause_loc, ref);
7800 	  if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7801 	    {
7802 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7803 	      tree decl_placeholder
7804 		= OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
7805 	      SET_DECL_VALUE_EXPR (placeholder, out);
7806 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7807 	      SET_DECL_VALUE_EXPR (decl_placeholder, priv);
7808 	      DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
7809 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7810 	      gimple_seq_add_seq (&sub_seq,
7811 				  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7812 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7813 	      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7814 	      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
7815 	    }
7816 	  else
7817 	    {
7818 	      tree out2 = out;
7819 	      tree priv2 = priv;
7820 	      if (is_truth_op)
7821 		{
7822 		  tree zero = build_zero_cst (TREE_TYPE (out));
7823 		  out2 = fold_build2_loc (clause_loc, NE_EXPR,
7824 					  boolean_type_node, out, zero);
7825 		  priv2 = fold_build2_loc (clause_loc, NE_EXPR,
7826 					   boolean_type_node, priv, zero);
7827 		}
7828 	      x = build2 (code, TREE_TYPE (out2), out2, priv2);
7829 	      if (is_truth_op)
7830 		x = fold_convert (TREE_TYPE (out), x);
7831 	      out = unshare_expr (out);
7832 	      gimplify_assign (out, x, &sub_seq);
7833 	    }
7834 	  gimple *g = gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
7835 					   TYPE_SIZE_UNIT (TREE_TYPE (type)));
7836 	  gimple_seq_add_stmt (&sub_seq, g);
7837 	  g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
7838 				   TYPE_SIZE_UNIT (TREE_TYPE (type)));
7839 	  gimple_seq_add_stmt (&sub_seq, g);
7840 	  g = gimple_build_assign (i, PLUS_EXPR, i,
7841 				   build_int_cst (TREE_TYPE (i), 1));
7842 	  gimple_seq_add_stmt (&sub_seq, g);
7843 	  g = gimple_build_cond (LE_EXPR, i, v, body, end);
7844 	  gimple_seq_add_stmt (&sub_seq, g);
7845 	  gimple_seq_add_stmt (&sub_seq, gimple_build_label (end));
7846 	}
7847       else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
7848 	{
7849 	  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
7850 
7851 	  if (omp_privatize_by_reference (var)
7852 	      && !useless_type_conversion_p (TREE_TYPE (placeholder),
7853 					     TREE_TYPE (ref)))
7854 	    ref = build_fold_addr_expr_loc (clause_loc, ref);
7855 	  SET_DECL_VALUE_EXPR (placeholder, ref);
7856 	  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
7857 	  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
7858 	  gimple_seq_add_seq (&sub_seq, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
7859 	  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
7860 	  OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
7861 	}
7862       else
7863 	{
7864 	  tree new_var2 = new_var;
7865 	  tree ref2 = ref;
7866 	  if (is_truth_op)
7867 	    {
7868 	      tree zero = build_zero_cst (TREE_TYPE (new_var));
7869 	      new_var2 = fold_build2_loc (clause_loc, NE_EXPR,
7870 					  boolean_type_node, new_var, zero);
7871 	      ref2 = fold_build2_loc (clause_loc, NE_EXPR, boolean_type_node,
7872 				      ref, zero);
7873 	    }
7874 	  x = build2 (code, TREE_TYPE (ref), ref2, new_var2);
7875 	  if (is_truth_op)
7876 	    x = fold_convert (TREE_TYPE (new_var), x);
7877 	  ref = build_outer_var_ref (var, ctx);
7878 	  gimplify_assign (ref, x, &sub_seq);
7879 	}
7880     }
7881 
7882   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START),
7883 			    0);
7884   gimple_seq_add_stmt (stmt_seqp, stmt);
7885 
7886   gimple_seq_add_seq (stmt_seqp, sub_seq);
7887 
7888   if (clist)
7889     {
7890       gimple_seq_add_seq (stmt_seqp, *clist);
7891       *clist = NULL;
7892     }
7893 
7894   stmt = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END),
7895 			    0);
7896   gimple_seq_add_stmt (stmt_seqp, stmt);
7897 }
7898 
7899 
7900 /* Generate code to implement the COPYPRIVATE clauses.  */
7901 
7902 static void
lower_copyprivate_clauses(tree clauses,gimple_seq * slist,gimple_seq * rlist,omp_context * ctx)7903 lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
7904 			    omp_context *ctx)
7905 {
7906   tree c;
7907 
7908   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7909     {
7910       tree var, new_var, ref, x;
7911       bool by_ref;
7912       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7913 
7914       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYPRIVATE)
7915 	continue;
7916 
7917       var = OMP_CLAUSE_DECL (c);
7918       by_ref = use_pointer_for_field (var, NULL);
7919 
7920       ref = build_sender_ref (var, ctx);
7921       x = new_var = lookup_decl_in_outer_ctx (var, ctx);
7922       if (by_ref)
7923 	{
7924 	  x = build_fold_addr_expr_loc (clause_loc, new_var);
7925 	  x = fold_convert_loc (clause_loc, TREE_TYPE (ref), x);
7926 	}
7927       gimplify_assign (ref, x, slist);
7928 
7929       ref = build_receiver_ref (var, false, ctx);
7930       if (by_ref)
7931 	{
7932 	  ref = fold_convert_loc (clause_loc,
7933 				  build_pointer_type (TREE_TYPE (new_var)),
7934 				  ref);
7935 	  ref = build_fold_indirect_ref_loc (clause_loc, ref);
7936 	}
7937       if (omp_privatize_by_reference (var))
7938 	{
7939 	  ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
7940 	  ref = build_simple_mem_ref_loc (clause_loc, ref);
7941 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
7942 	}
7943       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
7944       gimplify_and_add (x, rlist);
7945     }
7946 }
7947 
7948 
7949 /* Generate code to implement the clauses, FIRSTPRIVATE, COPYIN, LASTPRIVATE,
7950    and REDUCTION from the sender (aka parent) side.  */
7951 
7952 static void
lower_send_clauses(tree clauses,gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)7953 lower_send_clauses (tree clauses, gimple_seq *ilist, gimple_seq *olist,
7954     		    omp_context *ctx)
7955 {
7956   tree c, t;
7957   int ignored_looptemp = 0;
7958   bool is_taskloop = false;
7959 
7960   /* For taskloop, ignore first two _looptemp_ clauses, those are initialized
7961      by GOMP_taskloop.  */
7962   if (is_task_ctx (ctx) && gimple_omp_task_taskloop_p (ctx->stmt))
7963     {
7964       ignored_looptemp = 2;
7965       is_taskloop = true;
7966     }
7967 
7968   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
7969     {
7970       tree val, ref, x, var;
7971       bool by_ref, do_in = false, do_out = false;
7972       location_t clause_loc = OMP_CLAUSE_LOCATION (c);
7973 
7974       switch (OMP_CLAUSE_CODE (c))
7975 	{
7976 	case OMP_CLAUSE_PRIVATE:
7977 	  if (OMP_CLAUSE_PRIVATE_OUTER_REF (c))
7978 	    break;
7979 	  continue;
7980 	case OMP_CLAUSE_FIRSTPRIVATE:
7981 	case OMP_CLAUSE_COPYIN:
7982 	case OMP_CLAUSE_LASTPRIVATE:
7983 	case OMP_CLAUSE_IN_REDUCTION:
7984 	case OMP_CLAUSE__REDUCTEMP_:
7985 	  break;
7986 	case OMP_CLAUSE_REDUCTION:
7987 	  if (is_task_ctx (ctx) || OMP_CLAUSE_REDUCTION_TASK (c))
7988 	    continue;
7989 	  break;
7990 	case OMP_CLAUSE_SHARED:
7991 	  if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
7992 	    break;
7993 	  continue;
7994 	case OMP_CLAUSE__LOOPTEMP_:
7995 	  if (ignored_looptemp)
7996 	    {
7997 	      ignored_looptemp--;
7998 	      continue;
7999 	    }
8000 	  break;
8001 	default:
8002 	  continue;
8003 	}
8004 
8005       val = OMP_CLAUSE_DECL (c);
8006       if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8007 	   || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION)
8008 	  && TREE_CODE (val) == MEM_REF)
8009 	{
8010 	  val = TREE_OPERAND (val, 0);
8011 	  if (TREE_CODE (val) == POINTER_PLUS_EXPR)
8012 	    val = TREE_OPERAND (val, 0);
8013 	  if (TREE_CODE (val) == INDIRECT_REF
8014 	      || TREE_CODE (val) == ADDR_EXPR)
8015 	    val = TREE_OPERAND (val, 0);
8016 	  if (is_variable_sized (val))
8017 	    continue;
8018 	}
8019 
8020       /* For OMP_CLAUSE_SHARED_FIRSTPRIVATE, look beyond the
8021 	 outer taskloop region.  */
8022       omp_context *ctx_for_o = ctx;
8023       if (is_taskloop
8024 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
8025 	  && OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
8026 	ctx_for_o = ctx->outer;
8027 
8028       var = lookup_decl_in_outer_ctx (val, ctx_for_o);
8029 
8030       if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_COPYIN
8031 	  && is_global_var (var)
8032 	  && (val == OMP_CLAUSE_DECL (c)
8033 	      || !is_task_ctx (ctx)
8034 	      || (TREE_CODE (TREE_TYPE (val)) != POINTER_TYPE
8035 		  && (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE
8036 		      || (TREE_CODE (TREE_TYPE (TREE_TYPE (val)))
8037 			  != POINTER_TYPE)))))
8038 	continue;
8039 
8040       t = omp_member_access_dummy_var (var);
8041       if (t)
8042 	{
8043 	  var = DECL_VALUE_EXPR (var);
8044 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx_for_o);
8045 	  if (o != t)
8046 	    var = unshare_and_remap (var, t, o);
8047 	  else
8048 	    var = unshare_expr (var);
8049 	}
8050 
8051       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED)
8052 	{
8053 	  /* Handle taskloop firstprivate/lastprivate, where the
8054 	     lastprivate on GIMPLE_OMP_TASK is represented as
8055 	     OMP_CLAUSE_SHARED_FIRSTPRIVATE.  */
8056 	  tree f = lookup_sfield ((splay_tree_key) &DECL_UID (val), ctx);
8057 	  x = omp_build_component_ref (ctx->sender_decl, f);
8058 	  if (use_pointer_for_field (val, ctx))
8059 	    var = build_fold_addr_expr (var);
8060 	  gimplify_assign (x, var, ilist);
8061 	  DECL_ABSTRACT_ORIGIN (f) = NULL;
8062 	  continue;
8063 	}
8064 
8065       if (((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8066 	    && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IN_REDUCTION)
8067 	   || val == OMP_CLAUSE_DECL (c))
8068 	  && is_variable_sized (val))
8069 	continue;
8070       by_ref = use_pointer_for_field (val, NULL);
8071 
8072       switch (OMP_CLAUSE_CODE (c))
8073 	{
8074 	case OMP_CLAUSE_FIRSTPRIVATE:
8075 	  if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c)
8076 	      && !by_ref
8077 	      && is_task_ctx (ctx))
8078 	    suppress_warning (var);
8079 	  do_in = true;
8080 	  break;
8081 
8082 	case OMP_CLAUSE_PRIVATE:
8083 	case OMP_CLAUSE_COPYIN:
8084 	case OMP_CLAUSE__LOOPTEMP_:
8085 	case OMP_CLAUSE__REDUCTEMP_:
8086 	  do_in = true;
8087 	  break;
8088 
8089 	case OMP_CLAUSE_LASTPRIVATE:
8090 	  if (by_ref || omp_privatize_by_reference (val))
8091 	    {
8092 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
8093 		continue;
8094 	      do_in = true;
8095 	    }
8096 	  else
8097 	    {
8098 	      do_out = true;
8099 	      if (lang_hooks.decls.omp_private_outer_ref (val))
8100 		do_in = true;
8101 	    }
8102 	  break;
8103 
8104 	case OMP_CLAUSE_REDUCTION:
8105 	case OMP_CLAUSE_IN_REDUCTION:
8106 	  do_in = true;
8107 	  if (val == OMP_CLAUSE_DECL (c))
8108 	    {
8109 	      if (is_task_ctx (ctx))
8110 		by_ref = use_pointer_for_field (val, ctx);
8111 	      else
8112 		do_out = !(by_ref || omp_privatize_by_reference (val));
8113 	    }
8114 	  else
8115 	    by_ref = TREE_CODE (TREE_TYPE (val)) == ARRAY_TYPE;
8116 	  break;
8117 
8118 	default:
8119 	  gcc_unreachable ();
8120 	}
8121 
8122       if (do_in)
8123 	{
8124 	  ref = build_sender_ref (val, ctx);
8125 	  x = by_ref ? build_fold_addr_expr_loc (clause_loc, var) : var;
8126 	  gimplify_assign (ref, x, ilist);
8127 	  if (is_task_ctx (ctx))
8128 	    DECL_ABSTRACT_ORIGIN (TREE_OPERAND (ref, 1)) = NULL;
8129 	}
8130 
8131       if (do_out)
8132 	{
8133 	  ref = build_sender_ref (val, ctx);
8134 	  gimplify_assign (var, ref, olist);
8135 	}
8136     }
8137 }
8138 
8139 /* Generate code to implement SHARED from the sender (aka parent)
8140    side.  This is trickier, since GIMPLE_OMP_PARALLEL_CLAUSES doesn't
8141    list things that got automatically shared.  */
8142 
8143 static void
lower_send_shared_vars(gimple_seq * ilist,gimple_seq * olist,omp_context * ctx)8144 lower_send_shared_vars (gimple_seq *ilist, gimple_seq *olist, omp_context *ctx)
8145 {
8146   tree var, ovar, nvar, t, f, x, record_type;
8147 
8148   if (ctx->record_type == NULL)
8149     return;
8150 
8151   record_type = ctx->srecord_type ? ctx->srecord_type : ctx->record_type;
8152   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
8153     {
8154       ovar = DECL_ABSTRACT_ORIGIN (f);
8155       if (!ovar || TREE_CODE (ovar) == FIELD_DECL)
8156 	continue;
8157 
8158       nvar = maybe_lookup_decl (ovar, ctx);
8159       if (!nvar
8160 	  || !DECL_HAS_VALUE_EXPR_P (nvar)
8161 	  || (ctx->allocate_map
8162 	      && ctx->allocate_map->get (ovar)))
8163 	continue;
8164 
8165       /* If CTX is a nested parallel directive.  Find the immediately
8166 	 enclosing parallel or workshare construct that contains a
8167 	 mapping for OVAR.  */
8168       var = lookup_decl_in_outer_ctx (ovar, ctx);
8169 
8170       t = omp_member_access_dummy_var (var);
8171       if (t)
8172 	{
8173 	  var = DECL_VALUE_EXPR (var);
8174 	  tree o = maybe_lookup_decl_in_outer_ctx (t, ctx);
8175 	  if (o != t)
8176 	    var = unshare_and_remap (var, t, o);
8177 	  else
8178 	    var = unshare_expr (var);
8179 	}
8180 
8181       if (use_pointer_for_field (ovar, ctx))
8182 	{
8183 	  x = build_sender_ref (ovar, ctx);
8184 	  if (TREE_CODE (TREE_TYPE (f)) == ARRAY_TYPE
8185 	      && TREE_TYPE (f) == TREE_TYPE (ovar))
8186 	    {
8187 	      gcc_assert (is_parallel_ctx (ctx)
8188 			  && DECL_ARTIFICIAL (ovar));
8189 	      /* _condtemp_ clause.  */
8190 	      var = build_constructor (TREE_TYPE (x), NULL);
8191 	    }
8192 	  else
8193 	    var = build_fold_addr_expr (var);
8194 	  gimplify_assign (x, var, ilist);
8195 	}
8196       else
8197 	{
8198 	  x = build_sender_ref (ovar, ctx);
8199 	  gimplify_assign (x, var, ilist);
8200 
8201 	  if (!TREE_READONLY (var)
8202 	      /* We don't need to receive a new reference to a result
8203 	         or parm decl.  In fact we may not store to it as we will
8204 		 invalidate any pending RSO and generate wrong gimple
8205 		 during inlining.  */
8206 	      && !((TREE_CODE (var) == RESULT_DECL
8207 		    || TREE_CODE (var) == PARM_DECL)
8208 		   && DECL_BY_REFERENCE (var)))
8209 	    {
8210 	      x = build_sender_ref (ovar, ctx);
8211 	      gimplify_assign (var, x, olist);
8212 	    }
8213 	}
8214     }
8215 }
8216 
8217 /* Emit an OpenACC head marker call, encapulating the partitioning and
8218    other information that must be processed by the target compiler.
8219    Return the maximum number of dimensions the associated loop might
8220    be partitioned over.  */
8221 
8222 static unsigned
lower_oacc_head_mark(location_t loc,tree ddvar,tree clauses,gimple_seq * seq,omp_context * ctx)8223 lower_oacc_head_mark (location_t loc, tree ddvar, tree clauses,
8224 		      gimple_seq *seq, omp_context *ctx)
8225 {
8226   unsigned levels = 0;
8227   unsigned tag = 0;
8228   tree gang_static = NULL_TREE;
8229   auto_vec<tree, 5> args;
8230 
8231   args.quick_push (build_int_cst
8232 		   (integer_type_node, IFN_UNIQUE_OACC_HEAD_MARK));
8233   args.quick_push (ddvar);
8234   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
8235     {
8236       switch (OMP_CLAUSE_CODE (c))
8237 	{
8238 	case OMP_CLAUSE_GANG:
8239 	  tag |= OLF_DIM_GANG;
8240 	  gang_static = OMP_CLAUSE_GANG_STATIC_EXPR (c);
8241 	  /* static:* is represented by -1, and we can ignore it, as
8242 	     scheduling is always static.  */
8243 	  if (gang_static && integer_minus_onep (gang_static))
8244 	    gang_static = NULL_TREE;
8245 	  levels++;
8246 	  break;
8247 
8248 	case OMP_CLAUSE_WORKER:
8249 	  tag |= OLF_DIM_WORKER;
8250 	  levels++;
8251 	  break;
8252 
8253 	case OMP_CLAUSE_VECTOR:
8254 	  tag |= OLF_DIM_VECTOR;
8255 	  levels++;
8256 	  break;
8257 
8258 	case OMP_CLAUSE_SEQ:
8259 	  tag |= OLF_SEQ;
8260 	  break;
8261 
8262 	case OMP_CLAUSE_AUTO:
8263 	  tag |= OLF_AUTO;
8264 	  break;
8265 
8266 	case OMP_CLAUSE_INDEPENDENT:
8267 	  tag |= OLF_INDEPENDENT;
8268 	  break;
8269 
8270 	case OMP_CLAUSE_TILE:
8271 	  tag |= OLF_TILE;
8272 	  break;
8273 
8274 	case OMP_CLAUSE_REDUCTION:
8275 	  tag |= OLF_REDUCTION;
8276 	  break;
8277 
8278 	default:
8279 	  continue;
8280 	}
8281     }
8282 
8283   if (gang_static)
8284     {
8285       if (DECL_P (gang_static))
8286 	gang_static = build_outer_var_ref (gang_static, ctx);
8287       tag |= OLF_GANG_STATIC;
8288     }
8289 
8290   omp_context *tgt = enclosing_target_ctx (ctx);
8291   if (!tgt || is_oacc_parallel_or_serial (tgt))
8292     ;
8293   else if (is_oacc_kernels (tgt))
8294     /* Not using this loops handling inside OpenACC 'kernels' regions.  */
8295     gcc_unreachable ();
8296   else if (is_oacc_kernels_decomposed_part (tgt))
8297     ;
8298   else
8299     gcc_unreachable ();
8300 
8301   /* In a parallel region, loops are implicitly INDEPENDENT.  */
8302   if (!tgt || is_oacc_parallel_or_serial (tgt))
8303     tag |= OLF_INDEPENDENT;
8304 
8305   /* Loops inside OpenACC 'kernels' decomposed parts' regions are expected to
8306      have an explicit 'seq' or 'independent' clause, and no 'auto' clause.  */
8307   if (tgt && is_oacc_kernels_decomposed_part (tgt))
8308     {
8309       gcc_assert (tag & (OLF_SEQ | OLF_INDEPENDENT));
8310       gcc_assert (!(tag & OLF_AUTO));
8311     }
8312 
8313   if (tag & OLF_TILE)
8314     /* Tiling could use all 3 levels.  */
8315     levels = 3;
8316   else
8317     {
8318       /* A loop lacking SEQ, GANG, WORKER and/or VECTOR could be AUTO.
8319 	 Ensure at least one level, or 2 for possible auto
8320 	 partitioning */
8321       bool maybe_auto = !(tag & (((GOMP_DIM_MASK (GOMP_DIM_MAX) - 1)
8322 				  << OLF_DIM_BASE) | OLF_SEQ));
8323 
8324       if (levels < 1u + maybe_auto)
8325 	levels = 1u + maybe_auto;
8326     }
8327 
8328   args.quick_push (build_int_cst (integer_type_node, levels));
8329   args.quick_push (build_int_cst (integer_type_node, tag));
8330   if (gang_static)
8331     args.quick_push (gang_static);
8332 
8333   gcall *call = gimple_build_call_internal_vec (IFN_UNIQUE, args);
8334   gimple_set_location (call, loc);
8335   gimple_set_lhs (call, ddvar);
8336   gimple_seq_add_stmt (seq, call);
8337 
8338   return levels;
8339 }
8340 
8341 /* Emit an OpenACC lopp head or tail marker to SEQ.  LEVEL is the
8342    partitioning level of the enclosed region.  */
8343 
8344 static void
lower_oacc_loop_marker(location_t loc,tree ddvar,bool head,tree tofollow,gimple_seq * seq)8345 lower_oacc_loop_marker (location_t loc, tree ddvar, bool head,
8346 			tree tofollow, gimple_seq *seq)
8347 {
8348   int marker_kind = (head ? IFN_UNIQUE_OACC_HEAD_MARK
8349 		     : IFN_UNIQUE_OACC_TAIL_MARK);
8350   tree marker = build_int_cst (integer_type_node, marker_kind);
8351   int nargs = 2 + (tofollow != NULL_TREE);
8352   gcall *call = gimple_build_call_internal (IFN_UNIQUE, nargs,
8353 					    marker, ddvar, tofollow);
8354   gimple_set_location (call, loc);
8355   gimple_set_lhs (call, ddvar);
8356   gimple_seq_add_stmt (seq, call);
8357 }
8358 
8359 /* Generate the before and after OpenACC loop sequences.  CLAUSES are
8360    the loop clauses, from which we extract reductions.  Initialize
8361    HEAD and TAIL.  */
8362 
8363 static void
lower_oacc_head_tail(location_t loc,tree clauses,gcall * private_marker,gimple_seq * head,gimple_seq * tail,omp_context * ctx)8364 lower_oacc_head_tail (location_t loc, tree clauses, gcall *private_marker,
8365 		      gimple_seq *head, gimple_seq *tail, omp_context *ctx)
8366 {
8367   bool inner = false;
8368   tree ddvar = create_tmp_var (integer_type_node, ".data_dep");
8369   gimple_seq_add_stmt (head, gimple_build_assign (ddvar, integer_zero_node));
8370 
8371   unsigned count = lower_oacc_head_mark (loc, ddvar, clauses, head, ctx);
8372 
8373   if (private_marker)
8374     {
8375       gimple_set_location (private_marker, loc);
8376       gimple_call_set_lhs (private_marker, ddvar);
8377       gimple_call_set_arg (private_marker, 1, ddvar);
8378     }
8379 
8380   tree fork_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_FORK);
8381   tree join_kind = build_int_cst (unsigned_type_node, IFN_UNIQUE_OACC_JOIN);
8382 
8383   gcc_assert (count);
8384   for (unsigned done = 1; count; count--, done++)
8385     {
8386       gimple_seq fork_seq = NULL;
8387       gimple_seq join_seq = NULL;
8388 
8389       tree place = build_int_cst (integer_type_node, -1);
8390       gcall *fork = gimple_build_call_internal (IFN_UNIQUE, 3,
8391 						fork_kind, ddvar, place);
8392       gimple_set_location (fork, loc);
8393       gimple_set_lhs (fork, ddvar);
8394 
8395       gcall *join = gimple_build_call_internal (IFN_UNIQUE, 3,
8396 						join_kind, ddvar, place);
8397       gimple_set_location (join, loc);
8398       gimple_set_lhs (join, ddvar);
8399 
8400       /* Mark the beginning of this level sequence.  */
8401       if (inner)
8402 	lower_oacc_loop_marker (loc, ddvar, true,
8403 				build_int_cst (integer_type_node, count),
8404 				&fork_seq);
8405       lower_oacc_loop_marker (loc, ddvar, false,
8406 			      build_int_cst (integer_type_node, done),
8407 			      &join_seq);
8408 
8409       lower_oacc_reductions (loc, clauses, place, inner,
8410 			     fork, (count == 1) ? private_marker : NULL,
8411 			     join, &fork_seq, &join_seq,  ctx);
8412 
8413       /* Append this level to head. */
8414       gimple_seq_add_seq (head, fork_seq);
8415       /* Prepend it to tail.  */
8416       gimple_seq_add_seq (&join_seq, *tail);
8417       *tail = join_seq;
8418 
8419       inner = true;
8420     }
8421 
8422   /* Mark the end of the sequence.  */
8423   lower_oacc_loop_marker (loc, ddvar, true, NULL_TREE, head);
8424   lower_oacc_loop_marker (loc, ddvar, false, NULL_TREE, tail);
8425 }
8426 
8427 /* If exceptions are enabled, wrap the statements in BODY in a MUST_NOT_THROW
8428    catch handler and return it.  This prevents programs from violating the
8429    structured block semantics with throws.  */
8430 
8431 static gimple_seq
maybe_catch_exception(gimple_seq body)8432 maybe_catch_exception (gimple_seq body)
8433 {
8434   gimple *g;
8435   tree decl;
8436 
8437   if (!flag_exceptions)
8438     return body;
8439 
8440   if (lang_hooks.eh_protect_cleanup_actions != NULL)
8441     decl = lang_hooks.eh_protect_cleanup_actions ();
8442   else
8443     decl = builtin_decl_explicit (BUILT_IN_TRAP);
8444 
8445   g = gimple_build_eh_must_not_throw (decl);
8446   g = gimple_build_try (body, gimple_seq_alloc_with_stmt (g),
8447       			GIMPLE_TRY_CATCH);
8448 
8449  return gimple_seq_alloc_with_stmt (g);
8450 }
8451 
8452 
8453 /* Routines to lower OMP directives into OMP-GIMPLE.  */
8454 
8455 /* If ctx is a worksharing context inside of a cancellable parallel
8456    region and it isn't nowait, add lhs to its GIMPLE_OMP_RETURN
8457    and conditional branch to parallel's cancel_label to handle
8458    cancellation in the implicit barrier.  */
8459 
8460 static void
maybe_add_implicit_barrier_cancel(omp_context * ctx,gimple * omp_return,gimple_seq * body)8461 maybe_add_implicit_barrier_cancel (omp_context *ctx, gimple *omp_return,
8462 				   gimple_seq *body)
8463 {
8464   gcc_assert (gimple_code (omp_return) == GIMPLE_OMP_RETURN);
8465   if (gimple_omp_return_nowait_p (omp_return))
8466     return;
8467   for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
8468     if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
8469 	&& outer->cancellable)
8470       {
8471 	tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_CANCEL);
8472 	tree c_bool_type = TREE_TYPE (TREE_TYPE (fndecl));
8473 	tree lhs = create_tmp_var (c_bool_type);
8474 	gimple_omp_return_set_lhs (omp_return, lhs);
8475 	tree fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
8476 	gimple *g = gimple_build_cond (NE_EXPR, lhs,
8477 				       fold_convert (c_bool_type,
8478 						     boolean_false_node),
8479 				       outer->cancel_label, fallthru_label);
8480 	gimple_seq_add_stmt (body, g);
8481 	gimple_seq_add_stmt (body, gimple_build_label (fallthru_label));
8482       }
8483     else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
8484 	     && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
8485       return;
8486 }
8487 
8488 /* Find the first task_reduction or reduction clause or return NULL
8489    if there are none.  */
8490 
8491 static inline tree
omp_task_reductions_find_first(tree clauses,enum tree_code code,enum omp_clause_code ccode)8492 omp_task_reductions_find_first (tree clauses, enum tree_code code,
8493 				enum omp_clause_code ccode)
8494 {
8495   while (1)
8496     {
8497       clauses = omp_find_clause (clauses, ccode);
8498       if (clauses == NULL_TREE)
8499 	return NULL_TREE;
8500       if (ccode != OMP_CLAUSE_REDUCTION
8501 	  || code == OMP_TASKLOOP
8502 	  || OMP_CLAUSE_REDUCTION_TASK (clauses))
8503 	return clauses;
8504       clauses = OMP_CLAUSE_CHAIN (clauses);
8505     }
8506 }
8507 
8508 static void lower_omp_task_reductions (omp_context *, enum tree_code, tree,
8509 				       gimple_seq *, gimple_seq *);
8510 
8511 /* Lower the OpenMP sections directive in the current statement in GSI_P.
8512    CTX is the enclosing OMP context for the current statement.  */
8513 
8514 static void
lower_omp_sections(gimple_stmt_iterator * gsi_p,omp_context * ctx)8515 lower_omp_sections (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8516 {
8517   tree block, control;
8518   gimple_stmt_iterator tgsi;
8519   gomp_sections *stmt;
8520   gimple *t;
8521   gbind *new_stmt, *bind;
8522   gimple_seq ilist, dlist, olist, tred_dlist = NULL, clist = NULL, new_body;
8523 
8524   stmt = as_a <gomp_sections *> (gsi_stmt (*gsi_p));
8525 
8526   push_gimplify_context ();
8527 
8528   dlist = NULL;
8529   ilist = NULL;
8530 
8531   tree rclauses
8532     = omp_task_reductions_find_first (gimple_omp_sections_clauses (stmt),
8533 				      OMP_SECTIONS, OMP_CLAUSE_REDUCTION);
8534   tree rtmp = NULL_TREE;
8535   if (rclauses)
8536     {
8537       tree type = build_pointer_type (pointer_sized_int_node);
8538       tree temp = create_tmp_var (type);
8539       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8540       OMP_CLAUSE_DECL (c) = temp;
8541       OMP_CLAUSE_CHAIN (c) = gimple_omp_sections_clauses (stmt);
8542       gimple_omp_sections_set_clauses (stmt, c);
8543       lower_omp_task_reductions (ctx, OMP_SECTIONS,
8544 				 gimple_omp_sections_clauses (stmt),
8545 				 &ilist, &tred_dlist);
8546       rclauses = c;
8547       rtmp = make_ssa_name (type);
8548       gimple_seq_add_stmt (&ilist, gimple_build_assign (rtmp, temp));
8549     }
8550 
8551   tree *clauses_ptr = gimple_omp_sections_clauses_ptr (stmt);
8552   lower_lastprivate_conditional_clauses (clauses_ptr, ctx);
8553 
8554   lower_rec_input_clauses (gimple_omp_sections_clauses (stmt),
8555       			   &ilist, &dlist, ctx, NULL);
8556 
8557   control = create_tmp_var (unsigned_type_node, ".section");
8558   gimple_omp_sections_set_control (stmt, control);
8559 
8560   new_body = gimple_omp_body (stmt);
8561   gimple_omp_set_body (stmt, NULL);
8562   tgsi = gsi_start (new_body);
8563   for (; !gsi_end_p (tgsi); gsi_next (&tgsi))
8564     {
8565       omp_context *sctx;
8566       gimple *sec_start;
8567 
8568       sec_start = gsi_stmt (tgsi);
8569       sctx = maybe_lookup_ctx (sec_start);
8570       gcc_assert (sctx);
8571 
8572       lower_omp (gimple_omp_body_ptr (sec_start), sctx);
8573       gsi_insert_seq_after (&tgsi, gimple_omp_body (sec_start),
8574 			    GSI_CONTINUE_LINKING);
8575       gimple_omp_set_body (sec_start, NULL);
8576 
8577       if (gsi_one_before_end_p (tgsi))
8578 	{
8579 	  gimple_seq l = NULL;
8580 	  lower_lastprivate_clauses (gimple_omp_sections_clauses (stmt), NULL,
8581 				     &ilist, &l, &clist, ctx);
8582 	  gsi_insert_seq_after (&tgsi, l, GSI_CONTINUE_LINKING);
8583 	  gimple_omp_section_set_last (sec_start);
8584 	}
8585 
8586       gsi_insert_after (&tgsi, gimple_build_omp_return (false),
8587 			GSI_CONTINUE_LINKING);
8588     }
8589 
8590   block = make_node (BLOCK);
8591   bind = gimple_build_bind (NULL, new_body, block);
8592 
8593   olist = NULL;
8594   lower_reduction_clauses (gimple_omp_sections_clauses (stmt), &olist,
8595 			   &clist, ctx);
8596   if (clist)
8597     {
8598       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8599       gcall *g = gimple_build_call (fndecl, 0);
8600       gimple_seq_add_stmt (&olist, g);
8601       gimple_seq_add_seq (&olist, clist);
8602       fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8603       g = gimple_build_call (fndecl, 0);
8604       gimple_seq_add_stmt (&olist, g);
8605     }
8606 
8607   block = make_node (BLOCK);
8608   new_stmt = gimple_build_bind (NULL, NULL, block);
8609   gsi_replace (gsi_p, new_stmt, true);
8610 
8611   pop_gimplify_context (new_stmt);
8612   gimple_bind_append_vars (new_stmt, ctx->block_vars);
8613   BLOCK_VARS (block) = gimple_bind_vars (bind);
8614   if (BLOCK_VARS (block))
8615     TREE_USED (block) = 1;
8616 
8617   new_body = NULL;
8618   gimple_seq_add_seq (&new_body, ilist);
8619   gimple_seq_add_stmt (&new_body, stmt);
8620   gimple_seq_add_stmt (&new_body, gimple_build_omp_sections_switch ());
8621   gimple_seq_add_stmt (&new_body, bind);
8622 
8623   t = gimple_build_omp_continue (control, control);
8624   gimple_seq_add_stmt (&new_body, t);
8625 
8626   gimple_seq_add_seq (&new_body, olist);
8627   if (ctx->cancellable)
8628     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
8629   gimple_seq_add_seq (&new_body, dlist);
8630 
8631   new_body = maybe_catch_exception (new_body);
8632 
8633   bool nowait = omp_find_clause (gimple_omp_sections_clauses (stmt),
8634 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8635   t = gimple_build_omp_return (nowait);
8636   gimple_seq_add_stmt (&new_body, t);
8637   gimple_seq_add_seq (&new_body, tred_dlist);
8638   maybe_add_implicit_barrier_cancel (ctx, t, &new_body);
8639 
8640   if (rclauses)
8641     OMP_CLAUSE_DECL (rclauses) = rtmp;
8642 
8643   gimple_bind_set_body (new_stmt, new_body);
8644 }
8645 
8646 
8647 /* A subroutine of lower_omp_single.  Expand the simple form of
8648    a GIMPLE_OMP_SINGLE, without a copyprivate clause:
8649 
8650      	if (GOMP_single_start ())
8651 	  BODY;
8652 	[ GOMP_barrier (); ]	-> unless 'nowait' is present.
8653 
8654   FIXME.  It may be better to delay expanding the logic of this until
8655   pass_expand_omp.  The expanded logic may make the job more difficult
8656   to a synchronization analysis pass.  */
8657 
8658 static void
lower_omp_single_simple(gomp_single * single_stmt,gimple_seq * pre_p)8659 lower_omp_single_simple (gomp_single *single_stmt, gimple_seq *pre_p)
8660 {
8661   location_t loc = gimple_location (single_stmt);
8662   tree tlabel = create_artificial_label (loc);
8663   tree flabel = create_artificial_label (loc);
8664   gimple *call, *cond;
8665   tree lhs, decl;
8666 
8667   decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_START);
8668   lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (decl)));
8669   call = gimple_build_call (decl, 0);
8670   gimple_call_set_lhs (call, lhs);
8671   gimple_seq_add_stmt (pre_p, call);
8672 
8673   cond = gimple_build_cond (EQ_EXPR, lhs,
8674 			    fold_convert_loc (loc, TREE_TYPE (lhs),
8675 					      boolean_true_node),
8676 			    tlabel, flabel);
8677   gimple_seq_add_stmt (pre_p, cond);
8678   gimple_seq_add_stmt (pre_p, gimple_build_label (tlabel));
8679   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8680   gimple_seq_add_stmt (pre_p, gimple_build_label (flabel));
8681 }
8682 
8683 
8684 /* A subroutine of lower_omp_single.  Expand the simple form of
8685    a GIMPLE_OMP_SINGLE, with a copyprivate clause:
8686 
8687 	#pragma omp single copyprivate (a, b, c)
8688 
8689    Create a new structure to hold copies of 'a', 'b' and 'c' and emit:
8690 
8691       {
8692 	if ((copyout_p = GOMP_single_copy_start ()) == NULL)
8693 	  {
8694 	    BODY;
8695 	    copyout.a = a;
8696 	    copyout.b = b;
8697 	    copyout.c = c;
8698 	    GOMP_single_copy_end (&copyout);
8699 	  }
8700 	else
8701 	  {
8702 	    a = copyout_p->a;
8703 	    b = copyout_p->b;
8704 	    c = copyout_p->c;
8705 	  }
8706 	GOMP_barrier ();
8707       }
8708 
8709   FIXME.  It may be better to delay expanding the logic of this until
8710   pass_expand_omp.  The expanded logic may make the job more difficult
8711   to a synchronization analysis pass.  */
8712 
8713 static void
lower_omp_single_copy(gomp_single * single_stmt,gimple_seq * pre_p,omp_context * ctx)8714 lower_omp_single_copy (gomp_single *single_stmt, gimple_seq *pre_p,
8715 		       omp_context *ctx)
8716 {
8717   tree ptr_type, t, l0, l1, l2, bfn_decl;
8718   gimple_seq copyin_seq;
8719   location_t loc = gimple_location (single_stmt);
8720 
8721   ctx->sender_decl = create_tmp_var (ctx->record_type, ".omp_copy_o");
8722 
8723   ptr_type = build_pointer_type (ctx->record_type);
8724   ctx->receiver_decl = create_tmp_var (ptr_type, ".omp_copy_i");
8725 
8726   l0 = create_artificial_label (loc);
8727   l1 = create_artificial_label (loc);
8728   l2 = create_artificial_label (loc);
8729 
8730   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_START);
8731   t = build_call_expr_loc (loc, bfn_decl, 0);
8732   t = fold_convert_loc (loc, ptr_type, t);
8733   gimplify_assign (ctx->receiver_decl, t, pre_p);
8734 
8735   t = build2 (EQ_EXPR, boolean_type_node, ctx->receiver_decl,
8736 	      build_int_cst (ptr_type, 0));
8737   t = build3 (COND_EXPR, void_type_node, t,
8738 	      build_and_jump (&l0), build_and_jump (&l1));
8739   gimplify_and_add (t, pre_p);
8740 
8741   gimple_seq_add_stmt (pre_p, gimple_build_label (l0));
8742 
8743   gimple_seq_add_seq (pre_p, gimple_omp_body (single_stmt));
8744 
8745   copyin_seq = NULL;
8746   lower_copyprivate_clauses (gimple_omp_single_clauses (single_stmt), pre_p,
8747 			      &copyin_seq, ctx);
8748 
8749   t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
8750   bfn_decl = builtin_decl_explicit (BUILT_IN_GOMP_SINGLE_COPY_END);
8751   t = build_call_expr_loc (loc, bfn_decl, 1, t);
8752   gimplify_and_add (t, pre_p);
8753 
8754   t = build_and_jump (&l2);
8755   gimplify_and_add (t, pre_p);
8756 
8757   gimple_seq_add_stmt (pre_p, gimple_build_label (l1));
8758 
8759   gimple_seq_add_seq (pre_p, copyin_seq);
8760 
8761   gimple_seq_add_stmt (pre_p, gimple_build_label (l2));
8762 }
8763 
8764 
8765 /* Expand code for an OpenMP single directive.  */
8766 
8767 static void
lower_omp_single(gimple_stmt_iterator * gsi_p,omp_context * ctx)8768 lower_omp_single (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8769 {
8770   tree block;
8771   gomp_single *single_stmt = as_a <gomp_single *> (gsi_stmt (*gsi_p));
8772   gbind *bind;
8773   gimple_seq bind_body, bind_body_tail = NULL, dlist;
8774 
8775   push_gimplify_context ();
8776 
8777   block = make_node (BLOCK);
8778   bind = gimple_build_bind (NULL, NULL, block);
8779   gsi_replace (gsi_p, bind, true);
8780   bind_body = NULL;
8781   dlist = NULL;
8782   lower_rec_input_clauses (gimple_omp_single_clauses (single_stmt),
8783 			   &bind_body, &dlist, ctx, NULL);
8784   lower_omp (gimple_omp_body_ptr (single_stmt), ctx);
8785 
8786   gimple_seq_add_stmt (&bind_body, single_stmt);
8787 
8788   if (ctx->record_type)
8789     lower_omp_single_copy (single_stmt, &bind_body, ctx);
8790   else
8791     lower_omp_single_simple (single_stmt, &bind_body);
8792 
8793   gimple_omp_set_body (single_stmt, NULL);
8794 
8795   gimple_seq_add_seq (&bind_body, dlist);
8796 
8797   bind_body = maybe_catch_exception (bind_body);
8798 
8799   bool nowait = omp_find_clause (gimple_omp_single_clauses (single_stmt),
8800 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8801   gimple *g = gimple_build_omp_return (nowait);
8802   gimple_seq_add_stmt (&bind_body_tail, g);
8803   maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8804   if (ctx->record_type)
8805     {
8806       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8807       tree clobber = build_clobber (ctx->record_type);
8808       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8809 						   clobber), GSI_SAME_STMT);
8810     }
8811   gimple_seq_add_seq (&bind_body, bind_body_tail);
8812   gimple_bind_set_body (bind, bind_body);
8813 
8814   pop_gimplify_context (bind);
8815 
8816   gimple_bind_append_vars (bind, ctx->block_vars);
8817   BLOCK_VARS (block) = ctx->block_vars;
8818   if (BLOCK_VARS (block))
8819     TREE_USED (block) = 1;
8820 }
8821 
8822 
8823 /* Lower code for an OMP scope directive.  */
8824 
8825 static void
lower_omp_scope(gimple_stmt_iterator * gsi_p,omp_context * ctx)8826 lower_omp_scope (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8827 {
8828   tree block;
8829   gimple *scope_stmt = gsi_stmt (*gsi_p);
8830   gbind *bind;
8831   gimple_seq bind_body, bind_body_tail = NULL, dlist;
8832   gimple_seq tred_dlist = NULL;
8833 
8834   push_gimplify_context ();
8835 
8836   block = make_node (BLOCK);
8837   bind = gimple_build_bind (NULL, NULL, block);
8838   gsi_replace (gsi_p, bind, true);
8839   bind_body = NULL;
8840   dlist = NULL;
8841 
8842   tree rclauses
8843     = omp_task_reductions_find_first (gimple_omp_scope_clauses (scope_stmt),
8844 				      OMP_SCOPE, OMP_CLAUSE_REDUCTION);
8845   if (rclauses)
8846     {
8847       tree type = build_pointer_type (pointer_sized_int_node);
8848       tree temp = create_tmp_var (type);
8849       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
8850       OMP_CLAUSE_DECL (c) = temp;
8851       OMP_CLAUSE_CHAIN (c) = gimple_omp_scope_clauses (scope_stmt);
8852       gimple_omp_scope_set_clauses (scope_stmt, c);
8853       lower_omp_task_reductions (ctx, OMP_SCOPE,
8854 				 gimple_omp_scope_clauses (scope_stmt),
8855 				 &bind_body, &tred_dlist);
8856       rclauses = c;
8857       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_SCOPE_START);
8858       gimple *stmt = gimple_build_call (fndecl, 1, temp);
8859       gimple_seq_add_stmt (&bind_body, stmt);
8860     }
8861 
8862   lower_rec_input_clauses (gimple_omp_scope_clauses (scope_stmt),
8863 			   &bind_body, &dlist, ctx, NULL);
8864   lower_omp (gimple_omp_body_ptr (scope_stmt), ctx);
8865 
8866   gimple_seq_add_stmt (&bind_body, scope_stmt);
8867 
8868   gimple_seq_add_seq (&bind_body, gimple_omp_body (scope_stmt));
8869 
8870   gimple_omp_set_body (scope_stmt, NULL);
8871 
8872   gimple_seq clist = NULL;
8873   lower_reduction_clauses (gimple_omp_scope_clauses (scope_stmt),
8874 			   &bind_body, &clist, ctx);
8875   if (clist)
8876     {
8877       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
8878       gcall *g = gimple_build_call (fndecl, 0);
8879       gimple_seq_add_stmt (&bind_body, g);
8880       gimple_seq_add_seq (&bind_body, clist);
8881       fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
8882       g = gimple_build_call (fndecl, 0);
8883       gimple_seq_add_stmt (&bind_body, g);
8884     }
8885 
8886   gimple_seq_add_seq (&bind_body, dlist);
8887 
8888   bind_body = maybe_catch_exception (bind_body);
8889 
8890   bool nowait = omp_find_clause (gimple_omp_scope_clauses (scope_stmt),
8891 				 OMP_CLAUSE_NOWAIT) != NULL_TREE;
8892   gimple *g = gimple_build_omp_return (nowait);
8893   gimple_seq_add_stmt (&bind_body_tail, g);
8894   gimple_seq_add_seq (&bind_body_tail, tred_dlist);
8895   maybe_add_implicit_barrier_cancel (ctx, g, &bind_body_tail);
8896   if (ctx->record_type)
8897     {
8898       gimple_stmt_iterator gsi = gsi_start (bind_body_tail);
8899       tree clobber = build_clobber (ctx->record_type);
8900       gsi_insert_after (&gsi, gimple_build_assign (ctx->sender_decl,
8901 						   clobber), GSI_SAME_STMT);
8902     }
8903   gimple_seq_add_seq (&bind_body, bind_body_tail);
8904 
8905   gimple_bind_set_body (bind, bind_body);
8906 
8907   pop_gimplify_context (bind);
8908 
8909   gimple_bind_append_vars (bind, ctx->block_vars);
8910   BLOCK_VARS (block) = ctx->block_vars;
8911   if (BLOCK_VARS (block))
8912     TREE_USED (block) = 1;
8913 }
8914 /* Expand code for an OpenMP master or masked directive.  */
8915 
8916 static void
lower_omp_master(gimple_stmt_iterator * gsi_p,omp_context * ctx)8917 lower_omp_master (gimple_stmt_iterator *gsi_p, omp_context *ctx)
8918 {
8919   tree block, lab = NULL, x, bfn_decl;
8920   gimple *stmt = gsi_stmt (*gsi_p);
8921   gbind *bind;
8922   location_t loc = gimple_location (stmt);
8923   gimple_seq tseq;
8924   tree filter = integer_zero_node;
8925 
8926   push_gimplify_context ();
8927 
8928   if (gimple_code (stmt) == GIMPLE_OMP_MASKED)
8929     {
8930       filter = omp_find_clause (gimple_omp_masked_clauses (stmt),
8931 				OMP_CLAUSE_FILTER);
8932       if (filter)
8933 	filter = fold_convert (integer_type_node,
8934 			       OMP_CLAUSE_FILTER_EXPR (filter));
8935       else
8936 	filter = integer_zero_node;
8937     }
8938   block = make_node (BLOCK);
8939   bind = gimple_build_bind (NULL, NULL, block);
8940   gsi_replace (gsi_p, bind, true);
8941   gimple_bind_add_stmt (bind, stmt);
8942 
8943   bfn_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
8944   x = build_call_expr_loc (loc, bfn_decl, 0);
8945   x = build2 (EQ_EXPR, boolean_type_node, x, filter);
8946   x = build3 (COND_EXPR, void_type_node, x, NULL, build_and_jump (&lab));
8947   tseq = NULL;
8948   gimplify_and_add (x, &tseq);
8949   gimple_bind_add_seq (bind, tseq);
8950 
8951   lower_omp (gimple_omp_body_ptr (stmt), ctx);
8952   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
8953   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
8954   gimple_omp_set_body (stmt, NULL);
8955 
8956   gimple_bind_add_stmt (bind, gimple_build_label (lab));
8957 
8958   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
8959 
8960   pop_gimplify_context (bind);
8961 
8962   gimple_bind_append_vars (bind, ctx->block_vars);
8963   BLOCK_VARS (block) = ctx->block_vars;
8964 }
8965 
8966 /* Helper function for lower_omp_task_reductions.  For a specific PASS
8967    find out the current clause it should be processed, or return false
8968    if all have been processed already.  */
8969 
8970 static inline bool
omp_task_reduction_iterate(int pass,enum tree_code code,enum omp_clause_code ccode,tree * c,tree * decl,tree * type,tree * next)8971 omp_task_reduction_iterate (int pass, enum tree_code code,
8972 			    enum omp_clause_code ccode, tree *c, tree *decl,
8973 			    tree *type, tree *next)
8974 {
8975   for (; *c; *c = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode))
8976     {
8977       if (ccode == OMP_CLAUSE_REDUCTION
8978 	  && code != OMP_TASKLOOP
8979 	  && !OMP_CLAUSE_REDUCTION_TASK (*c))
8980 	continue;
8981       *decl = OMP_CLAUSE_DECL (*c);
8982       *type = TREE_TYPE (*decl);
8983       if (TREE_CODE (*decl) == MEM_REF)
8984 	{
8985 	  if (pass != 1)
8986 	    continue;
8987 	}
8988       else
8989 	{
8990 	  if (omp_privatize_by_reference (*decl))
8991 	    *type = TREE_TYPE (*type);
8992 	  if (pass != (!TREE_CONSTANT (TYPE_SIZE_UNIT (*type))))
8993 	    continue;
8994 	}
8995       *next = omp_find_clause (OMP_CLAUSE_CHAIN (*c), ccode);
8996       return true;
8997     }
8998   *decl = NULL_TREE;
8999   *type = NULL_TREE;
9000   *next = NULL_TREE;
9001   return false;
9002 }
9003 
9004 /* Lower task_reduction and reduction clauses (the latter unless CODE is
9005    OMP_TASKGROUP only with task modifier).  Register mapping of those in
9006    START sequence and reducing them and unregister them in the END sequence.  */
9007 
9008 static void
lower_omp_task_reductions(omp_context * ctx,enum tree_code code,tree clauses,gimple_seq * start,gimple_seq * end)9009 lower_omp_task_reductions (omp_context *ctx, enum tree_code code, tree clauses,
9010 			   gimple_seq *start, gimple_seq *end)
9011 {
9012   enum omp_clause_code ccode
9013     = (code == OMP_TASKGROUP
9014        ? OMP_CLAUSE_TASK_REDUCTION : OMP_CLAUSE_REDUCTION);
9015   tree cancellable = NULL_TREE;
9016   clauses = omp_task_reductions_find_first (clauses, code, ccode);
9017   if (clauses == NULL_TREE)
9018     return;
9019   if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9020     {
9021       for (omp_context *outer = ctx->outer; outer; outer = outer->outer)
9022 	if (gimple_code (outer->stmt) == GIMPLE_OMP_PARALLEL
9023 	    && outer->cancellable)
9024 	  {
9025 	    cancellable = error_mark_node;
9026 	    break;
9027 	  }
9028 	else if (gimple_code (outer->stmt) != GIMPLE_OMP_TASKGROUP
9029 		 && gimple_code (outer->stmt) != GIMPLE_OMP_SCOPE)
9030 	  break;
9031     }
9032   tree record_type = lang_hooks.types.make_type (RECORD_TYPE);
9033   tree *last = &TYPE_FIELDS (record_type);
9034   unsigned cnt = 0;
9035   if (cancellable)
9036     {
9037       tree field = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9038 			       ptr_type_node);
9039       tree ifield = build_decl (UNKNOWN_LOCATION, FIELD_DECL, NULL_TREE,
9040 				integer_type_node);
9041       *last = field;
9042       DECL_CHAIN (field) = ifield;
9043       last = &DECL_CHAIN (ifield);
9044       DECL_CONTEXT (field) = record_type;
9045       if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9046 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9047       DECL_CONTEXT (ifield) = record_type;
9048       if (TYPE_ALIGN (record_type) < DECL_ALIGN (ifield))
9049 	SET_TYPE_ALIGN (record_type, DECL_ALIGN (ifield));
9050     }
9051   for (int pass = 0; pass < 2; pass++)
9052     {
9053       tree decl, type, next;
9054       for (tree c = clauses;
9055 	   omp_task_reduction_iterate (pass, code, ccode,
9056 				       &c, &decl, &type, &next); c = next)
9057 	{
9058 	  ++cnt;
9059 	  tree new_type = type;
9060 	  if (ctx->outer)
9061 	    new_type = remap_type (type, &ctx->outer->cb);
9062 	  tree field
9063 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL,
9064 			  DECL_P (decl) ? DECL_NAME (decl) : NULL_TREE,
9065 			  new_type);
9066 	  if (DECL_P (decl) && type == TREE_TYPE (decl))
9067 	    {
9068 	      SET_DECL_ALIGN (field, DECL_ALIGN (decl));
9069 	      DECL_USER_ALIGN (field) = DECL_USER_ALIGN (decl);
9070 	      TREE_THIS_VOLATILE (field) = TREE_THIS_VOLATILE (decl);
9071 	    }
9072 	  else
9073 	    SET_DECL_ALIGN (field, TYPE_ALIGN (type));
9074 	  DECL_CONTEXT (field) = record_type;
9075 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (field))
9076 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (field));
9077 	  *last = field;
9078 	  last = &DECL_CHAIN (field);
9079 	  tree bfield
9080 	    = build_decl (OMP_CLAUSE_LOCATION (c), FIELD_DECL, NULL_TREE,
9081 			  boolean_type_node);
9082 	  DECL_CONTEXT (bfield) = record_type;
9083 	  if (TYPE_ALIGN (record_type) < DECL_ALIGN (bfield))
9084 	    SET_TYPE_ALIGN (record_type, DECL_ALIGN (bfield));
9085 	  *last = bfield;
9086 	  last = &DECL_CHAIN (bfield);
9087 	}
9088     }
9089   *last = NULL_TREE;
9090   layout_type (record_type);
9091 
9092   /* Build up an array which registers with the runtime all the reductions
9093      and deregisters them at the end.  Format documented in libgomp/task.c.  */
9094   tree atype = build_array_type_nelts (pointer_sized_int_node, 7 + cnt * 3);
9095   tree avar = create_tmp_var_raw (atype);
9096   gimple_add_tmp_var (avar);
9097   TREE_ADDRESSABLE (avar) = 1;
9098   tree r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_zero_node,
9099 		   NULL_TREE, NULL_TREE);
9100   tree t = build_int_cst (pointer_sized_int_node, cnt);
9101   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9102   gimple_seq seq = NULL;
9103   tree sz = fold_convert (pointer_sized_int_node,
9104 			  TYPE_SIZE_UNIT (record_type));
9105   int cachesz = 64;
9106   sz = fold_build2 (PLUS_EXPR, pointer_sized_int_node, sz,
9107 		    build_int_cst (pointer_sized_int_node, cachesz - 1));
9108   sz = fold_build2 (BIT_AND_EXPR, pointer_sized_int_node, sz,
9109 		    build_int_cst (pointer_sized_int_node, ~(cachesz - 1)));
9110   ctx->task_reductions.create (1 + cnt);
9111   ctx->task_reduction_map = new hash_map<tree, unsigned>;
9112   ctx->task_reductions.quick_push (TREE_CODE (sz) == INTEGER_CST
9113 				   ? sz : NULL_TREE);
9114   sz = force_gimple_operand (sz, &seq, true, NULL_TREE);
9115   gimple_seq_add_seq (start, seq);
9116   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_one_node,
9117 	      NULL_TREE, NULL_TREE);
9118   gimple_seq_add_stmt (start, gimple_build_assign (r, sz));
9119   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9120 	      NULL_TREE, NULL_TREE);
9121   t = build_int_cst (pointer_sized_int_node,
9122 		     MAX (TYPE_ALIGN_UNIT (record_type), (unsigned) cachesz));
9123   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9124   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (3),
9125 	      NULL_TREE, NULL_TREE);
9126   t = build_int_cst (pointer_sized_int_node, -1);
9127   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9128   r = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (4),
9129 	      NULL_TREE, NULL_TREE);
9130   t = build_int_cst (pointer_sized_int_node, 0);
9131   gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9132 
9133   /* In end, build a loop that iterates from 0 to < omp_get_num_threads ()
9134      and for each task reduction checks a bool right after the private variable
9135      within that thread's chunk; if the bool is clear, it hasn't been
9136      initialized and thus isn't going to be reduced nor destructed, otherwise
9137      reduce and destruct it.  */
9138   tree idx = create_tmp_var (size_type_node);
9139   gimple_seq_add_stmt (end, gimple_build_assign (idx, size_zero_node));
9140   tree num_thr_sz = create_tmp_var (size_type_node);
9141   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
9142   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
9143   tree lab3 = NULL_TREE, lab7 = NULL_TREE;
9144   gimple *g;
9145   if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9146     {
9147       /* For worksharing constructs or scope, only perform it in the master
9148 	 thread, with the exception of cancelled implicit barriers - then only
9149 	 handle the current thread.  */
9150       tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9151       t = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
9152       tree thr_num = create_tmp_var (integer_type_node);
9153       g = gimple_build_call (t, 0);
9154       gimple_call_set_lhs (g, thr_num);
9155       gimple_seq_add_stmt (end, g);
9156       if (cancellable)
9157 	{
9158 	  tree c;
9159 	  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9160 	  tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
9161 	  lab3 = create_artificial_label (UNKNOWN_LOCATION);
9162 	  if (code == OMP_FOR)
9163 	    c = gimple_omp_for_clauses (ctx->stmt);
9164 	  else if (code == OMP_SECTIONS)
9165 	    c = gimple_omp_sections_clauses (ctx->stmt);
9166 	  else /* if (code == OMP_SCOPE) */
9167 	    c = gimple_omp_scope_clauses (ctx->stmt);
9168 	  c = OMP_CLAUSE_DECL (omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_));
9169 	  cancellable = c;
9170 	  g = gimple_build_cond (NE_EXPR, c, build_zero_cst (TREE_TYPE (c)),
9171 				 lab5, lab6);
9172 	  gimple_seq_add_stmt (end, g);
9173 	  gimple_seq_add_stmt (end, gimple_build_label (lab5));
9174 	  g = gimple_build_assign (idx, NOP_EXPR, thr_num);
9175 	  gimple_seq_add_stmt (end, g);
9176 	  g = gimple_build_assign (num_thr_sz, PLUS_EXPR, idx,
9177 				   build_one_cst (TREE_TYPE (idx)));
9178 	  gimple_seq_add_stmt (end, g);
9179 	  gimple_seq_add_stmt (end, gimple_build_goto (lab3));
9180 	  gimple_seq_add_stmt (end, gimple_build_label (lab6));
9181 	}
9182       g = gimple_build_cond (NE_EXPR, thr_num, integer_zero_node, lab2, lab4);
9183       gimple_seq_add_stmt (end, g);
9184       gimple_seq_add_stmt (end, gimple_build_label (lab4));
9185     }
9186   if (code != OMP_PARALLEL)
9187     {
9188       t = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
9189       tree num_thr = create_tmp_var (integer_type_node);
9190       g = gimple_build_call (t, 0);
9191       gimple_call_set_lhs (g, num_thr);
9192       gimple_seq_add_stmt (end, g);
9193       g = gimple_build_assign (num_thr_sz, NOP_EXPR, num_thr);
9194       gimple_seq_add_stmt (end, g);
9195       if (cancellable)
9196 	gimple_seq_add_stmt (end, gimple_build_label (lab3));
9197     }
9198   else
9199     {
9200       tree c = omp_find_clause (gimple_omp_parallel_clauses (ctx->stmt),
9201 				OMP_CLAUSE__REDUCTEMP_);
9202       t = fold_convert (pointer_sized_int_node, OMP_CLAUSE_DECL (c));
9203       t = fold_convert (size_type_node, t);
9204       gimplify_assign (num_thr_sz, t, end);
9205     }
9206   t = build4 (ARRAY_REF, pointer_sized_int_node, avar, size_int (2),
9207 	      NULL_TREE, NULL_TREE);
9208   tree data = create_tmp_var (pointer_sized_int_node);
9209   gimple_seq_add_stmt (end, gimple_build_assign (data, t));
9210   if (code == OMP_TASKLOOP)
9211     {
9212       lab7 = create_artificial_label (UNKNOWN_LOCATION);
9213       g = gimple_build_cond (NE_EXPR, data,
9214 			     build_zero_cst (pointer_sized_int_node),
9215 			     lab1, lab7);
9216       gimple_seq_add_stmt (end, g);
9217     }
9218   gimple_seq_add_stmt (end, gimple_build_label (lab1));
9219   tree ptr;
9220   if (TREE_CODE (TYPE_SIZE_UNIT (record_type)) == INTEGER_CST)
9221     ptr = create_tmp_var (build_pointer_type (record_type));
9222   else
9223     ptr = create_tmp_var (ptr_type_node);
9224   gimple_seq_add_stmt (end, gimple_build_assign (ptr, NOP_EXPR, data));
9225 
9226   tree field = TYPE_FIELDS (record_type);
9227   cnt = 0;
9228   if (cancellable)
9229     field = DECL_CHAIN (DECL_CHAIN (field));
9230   for (int pass = 0; pass < 2; pass++)
9231     {
9232       tree decl, type, next;
9233       for (tree c = clauses;
9234 	   omp_task_reduction_iterate (pass, code, ccode,
9235 				       &c, &decl, &type, &next); c = next)
9236 	{
9237 	  tree var = decl, ref;
9238 	  if (TREE_CODE (decl) == MEM_REF)
9239 	    {
9240 	      var = TREE_OPERAND (var, 0);
9241 	      if (TREE_CODE (var) == POINTER_PLUS_EXPR)
9242 		var = TREE_OPERAND (var, 0);
9243 	      tree v = var;
9244 	      if (TREE_CODE (var) == ADDR_EXPR)
9245 		var = TREE_OPERAND (var, 0);
9246 	      else if (TREE_CODE (var) == INDIRECT_REF)
9247 		var = TREE_OPERAND (var, 0);
9248 	      tree orig_var = var;
9249 	      if (is_variable_sized (var))
9250 		{
9251 		  gcc_assert (DECL_HAS_VALUE_EXPR_P (var));
9252 		  var = DECL_VALUE_EXPR (var);
9253 		  gcc_assert (TREE_CODE (var) == INDIRECT_REF);
9254 		  var = TREE_OPERAND (var, 0);
9255 		  gcc_assert (DECL_P (var));
9256 		}
9257 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9258 	      if (orig_var != var)
9259 		gcc_assert (TREE_CODE (v) == ADDR_EXPR);
9260 	      else if (TREE_CODE (v) == ADDR_EXPR)
9261 		t = build_fold_addr_expr (t);
9262 	      else if (TREE_CODE (v) == INDIRECT_REF)
9263 		t = build_fold_indirect_ref (t);
9264 	      if (TREE_CODE (TREE_OPERAND (decl, 0)) == POINTER_PLUS_EXPR)
9265 		{
9266 		  tree b = TREE_OPERAND (TREE_OPERAND (decl, 0), 1);
9267 		  b = maybe_lookup_decl_in_outer_ctx (b, ctx);
9268 		  t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, b);
9269 		}
9270 	      if (!integer_zerop (TREE_OPERAND (decl, 1)))
9271 		t = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
9272 				 fold_convert (size_type_node,
9273 					       TREE_OPERAND (decl, 1)));
9274 	    }
9275 	  else
9276 	    {
9277 	      t = ref = maybe_lookup_decl_in_outer_ctx (var, ctx);
9278 	      if (!omp_privatize_by_reference (decl))
9279 		t = build_fold_addr_expr (t);
9280 	    }
9281 	  t = fold_convert (pointer_sized_int_node, t);
9282 	  seq = NULL;
9283 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
9284 	  gimple_seq_add_seq (start, seq);
9285 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9286 		      size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9287 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9288 	  t = unshare_expr (byte_position (field));
9289 	  t = fold_convert (pointer_sized_int_node, t);
9290 	  ctx->task_reduction_map->put (c, cnt);
9291 	  ctx->task_reductions.quick_push (TREE_CODE (t) == INTEGER_CST
9292 					   ? t : NULL_TREE);
9293 	  seq = NULL;
9294 	  t = force_gimple_operand (t, &seq, true, NULL_TREE);
9295 	  gimple_seq_add_seq (start, seq);
9296 	  r = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9297 		      size_int (7 + cnt * 3 + 1), NULL_TREE, NULL_TREE);
9298 	  gimple_seq_add_stmt (start, gimple_build_assign (r, t));
9299 
9300 	  tree bfield = DECL_CHAIN (field);
9301 	  tree cond;
9302 	  if (code == OMP_PARALLEL
9303 	      || code == OMP_FOR
9304 	      || code == OMP_SECTIONS
9305 	      || code == OMP_SCOPE)
9306 	    /* In parallel, worksharing or scope all threads unconditionally
9307 	       initialize all their task reduction private variables.  */
9308 	    cond = boolean_true_node;
9309 	  else if (TREE_TYPE (ptr) == ptr_type_node)
9310 	    {
9311 	      cond = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9312 			     unshare_expr (byte_position (bfield)));
9313 	      seq = NULL;
9314 	      cond = force_gimple_operand (cond, &seq, true, NULL_TREE);
9315 	      gimple_seq_add_seq (end, seq);
9316 	      tree pbool = build_pointer_type (TREE_TYPE (bfield));
9317 	      cond = build2 (MEM_REF, TREE_TYPE (bfield), cond,
9318 			     build_int_cst (pbool, 0));
9319 	    }
9320 	  else
9321 	    cond = build3 (COMPONENT_REF, TREE_TYPE (bfield),
9322 			   build_simple_mem_ref (ptr), bfield, NULL_TREE);
9323 	  tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
9324 	  tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
9325 	  tree condv = create_tmp_var (boolean_type_node);
9326 	  gimple_seq_add_stmt (end, gimple_build_assign (condv, cond));
9327 	  g = gimple_build_cond (NE_EXPR, condv, boolean_false_node,
9328 				 lab3, lab4);
9329 	  gimple_seq_add_stmt (end, g);
9330 	  gimple_seq_add_stmt (end, gimple_build_label (lab3));
9331 	  if (cancellable && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) == NULL_TREE)
9332 	    {
9333 	      /* If this reduction doesn't need destruction and parallel
9334 		 has been cancelled, there is nothing to do for this
9335 		 reduction, so jump around the merge operation.  */
9336 	      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9337 	      g = gimple_build_cond (NE_EXPR, cancellable,
9338 				     build_zero_cst (TREE_TYPE (cancellable)),
9339 				     lab4, lab5);
9340 	      gimple_seq_add_stmt (end, g);
9341 	      gimple_seq_add_stmt (end, gimple_build_label (lab5));
9342 	    }
9343 
9344 	  tree new_var;
9345 	  if (TREE_TYPE (ptr) == ptr_type_node)
9346 	    {
9347 	      new_var = build2 (POINTER_PLUS_EXPR, ptr_type_node, ptr,
9348 				unshare_expr (byte_position (field)));
9349 	      seq = NULL;
9350 	      new_var = force_gimple_operand (new_var, &seq, true, NULL_TREE);
9351 	      gimple_seq_add_seq (end, seq);
9352 	      tree pbool = build_pointer_type (TREE_TYPE (field));
9353 	      new_var = build2 (MEM_REF, TREE_TYPE (field), new_var,
9354 				build_int_cst (pbool, 0));
9355 	    }
9356 	  else
9357 	    new_var = build3 (COMPONENT_REF, TREE_TYPE (field),
9358 			      build_simple_mem_ref (ptr), field, NULL_TREE);
9359 
9360 	  enum tree_code rcode = OMP_CLAUSE_REDUCTION_CODE (c);
9361 	  if (TREE_CODE (decl) != MEM_REF
9362 	      && omp_privatize_by_reference (decl))
9363 	    ref = build_simple_mem_ref (ref);
9364 	  /* reduction(-:var) sums up the partial results, so it acts
9365 	     identically to reduction(+:var).  */
9366 	  if (rcode == MINUS_EXPR)
9367 	    rcode = PLUS_EXPR;
9368 	  if (TREE_CODE (decl) == MEM_REF)
9369 	    {
9370 	      tree type = TREE_TYPE (new_var);
9371 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9372 	      tree i = create_tmp_var (TREE_TYPE (v));
9373 	      tree ptype = build_pointer_type (TREE_TYPE (type));
9374 	      if (DECL_P (v))
9375 		{
9376 		  v = maybe_lookup_decl_in_outer_ctx (v, ctx);
9377 		  tree vv = create_tmp_var (TREE_TYPE (v));
9378 		  gimplify_assign (vv, v, start);
9379 		  v = vv;
9380 		}
9381 	      ref = build4 (ARRAY_REF, pointer_sized_int_node, avar,
9382 			    size_int (7 + cnt * 3), NULL_TREE, NULL_TREE);
9383 	      new_var = build_fold_addr_expr (new_var);
9384 	      new_var = fold_convert (ptype, new_var);
9385 	      ref = fold_convert (ptype, ref);
9386 	      tree m = create_tmp_var (ptype);
9387 	      gimplify_assign (m, new_var, end);
9388 	      new_var = m;
9389 	      m = create_tmp_var (ptype);
9390 	      gimplify_assign (m, ref, end);
9391 	      ref = m;
9392 	      gimplify_assign (i, build_int_cst (TREE_TYPE (v), 0), end);
9393 	      tree body = create_artificial_label (UNKNOWN_LOCATION);
9394 	      tree endl = create_artificial_label (UNKNOWN_LOCATION);
9395 	      gimple_seq_add_stmt (end, gimple_build_label (body));
9396 	      tree priv = build_simple_mem_ref (new_var);
9397 	      tree out = build_simple_mem_ref (ref);
9398 	      if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9399 		{
9400 		  tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9401 		  tree decl_placeholder
9402 		    = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c);
9403 		  tree lab6 = NULL_TREE;
9404 		  if (cancellable)
9405 		    {
9406 		      /* If this reduction needs destruction and parallel
9407 			 has been cancelled, jump around the merge operation
9408 			 to the destruction.  */
9409 		      tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9410 		      lab6 = create_artificial_label (UNKNOWN_LOCATION);
9411 		      tree zero = build_zero_cst (TREE_TYPE (cancellable));
9412 		      g = gimple_build_cond (NE_EXPR, cancellable, zero,
9413 					     lab6, lab5);
9414 		      gimple_seq_add_stmt (end, g);
9415 		      gimple_seq_add_stmt (end, gimple_build_label (lab5));
9416 		    }
9417 		  SET_DECL_VALUE_EXPR (placeholder, out);
9418 		  DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9419 		  SET_DECL_VALUE_EXPR (decl_placeholder, priv);
9420 		  DECL_HAS_VALUE_EXPR_P (decl_placeholder) = 1;
9421 		  lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9422 		  gimple_seq_add_seq (end,
9423 				      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9424 		  OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9425 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9426 		    {
9427 		      OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9428 		      OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c) = NULL;
9429 		    }
9430 		  if (cancellable)
9431 		    gimple_seq_add_stmt (end, gimple_build_label (lab6));
9432 		  tree x = lang_hooks.decls.omp_clause_dtor (c, priv);
9433 		  if (x)
9434 		    {
9435 		      gimple_seq tseq = NULL;
9436 		      gimplify_stmt (&x, &tseq);
9437 		      gimple_seq_add_seq (end, tseq);
9438 		    }
9439 		}
9440 	      else
9441 		{
9442 		  tree x = build2 (rcode, TREE_TYPE (out), out, priv);
9443 		  out = unshare_expr (out);
9444 		  gimplify_assign (out, x, end);
9445 		}
9446 	      gimple *g
9447 		= gimple_build_assign (new_var, POINTER_PLUS_EXPR, new_var,
9448 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
9449 	      gimple_seq_add_stmt (end, g);
9450 	      g = gimple_build_assign (ref, POINTER_PLUS_EXPR, ref,
9451 				       TYPE_SIZE_UNIT (TREE_TYPE (type)));
9452 	      gimple_seq_add_stmt (end, g);
9453 	      g = gimple_build_assign (i, PLUS_EXPR, i,
9454 				       build_int_cst (TREE_TYPE (i), 1));
9455 	      gimple_seq_add_stmt (end, g);
9456 	      g = gimple_build_cond (LE_EXPR, i, v, body, endl);
9457 	      gimple_seq_add_stmt (end, g);
9458 	      gimple_seq_add_stmt (end, gimple_build_label (endl));
9459 	    }
9460 	  else if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9461 	    {
9462 	      tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
9463 	      tree oldv = NULL_TREE;
9464 	      tree lab6 = NULL_TREE;
9465 	      if (cancellable)
9466 		{
9467 		  /* If this reduction needs destruction and parallel
9468 		     has been cancelled, jump around the merge operation
9469 		     to the destruction.  */
9470 		  tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
9471 		  lab6 = create_artificial_label (UNKNOWN_LOCATION);
9472 		  tree zero = build_zero_cst (TREE_TYPE (cancellable));
9473 		  g = gimple_build_cond (NE_EXPR, cancellable, zero,
9474 					 lab6, lab5);
9475 		  gimple_seq_add_stmt (end, g);
9476 		  gimple_seq_add_stmt (end, gimple_build_label (lab5));
9477 		}
9478 	      if (omp_privatize_by_reference (decl)
9479 		  && !useless_type_conversion_p (TREE_TYPE (placeholder),
9480 						 TREE_TYPE (ref)))
9481 		ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9482 	      ref = build_fold_addr_expr_loc (OMP_CLAUSE_LOCATION (c), ref);
9483 	      tree refv = create_tmp_var (TREE_TYPE (ref));
9484 	      gimplify_assign (refv, ref, end);
9485 	      ref = build_simple_mem_ref_loc (OMP_CLAUSE_LOCATION (c), refv);
9486 	      SET_DECL_VALUE_EXPR (placeholder, ref);
9487 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
9488 	      tree d = maybe_lookup_decl (decl, ctx);
9489 	      gcc_assert (d);
9490 	      if (DECL_HAS_VALUE_EXPR_P (d))
9491 		oldv = DECL_VALUE_EXPR (d);
9492 	      if (omp_privatize_by_reference (var))
9493 		{
9494 		  tree v = fold_convert (TREE_TYPE (d),
9495 					 build_fold_addr_expr (new_var));
9496 		  SET_DECL_VALUE_EXPR (d, v);
9497 		}
9498 	      else
9499 		SET_DECL_VALUE_EXPR (d, new_var);
9500 	      DECL_HAS_VALUE_EXPR_P (d) = 1;
9501 	      lower_omp (&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c), ctx);
9502 	      if (oldv)
9503 		SET_DECL_VALUE_EXPR (d, oldv);
9504 	      else
9505 		{
9506 		  SET_DECL_VALUE_EXPR (d, NULL_TREE);
9507 		  DECL_HAS_VALUE_EXPR_P (d) = 0;
9508 		}
9509 	      gimple_seq_add_seq (end, OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9510 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9511 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9512 		OMP_CLAUSE_REDUCTION_PLACEHOLDER (c) = NULL;
9513 	      if (cancellable)
9514 		gimple_seq_add_stmt (end, gimple_build_label (lab6));
9515 	      tree x = lang_hooks.decls.omp_clause_dtor (c, new_var);
9516 	      if (x)
9517 		{
9518 		  gimple_seq tseq = NULL;
9519 		  gimplify_stmt (&x, &tseq);
9520 		  gimple_seq_add_seq (end, tseq);
9521 		}
9522 	    }
9523 	  else
9524 	    {
9525 	      tree x = build2 (rcode, TREE_TYPE (ref), ref, new_var);
9526 	      ref = unshare_expr (ref);
9527 	      gimplify_assign (ref, x, end);
9528 	    }
9529 	  gimple_seq_add_stmt (end, gimple_build_label (lab4));
9530 	  ++cnt;
9531 	  field = DECL_CHAIN (bfield);
9532 	}
9533     }
9534 
9535   if (code == OMP_TASKGROUP)
9536     {
9537       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_REGISTER);
9538       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9539       gimple_seq_add_stmt (start, g);
9540     }
9541   else
9542     {
9543       tree c;
9544       if (code == OMP_FOR)
9545 	c = gimple_omp_for_clauses (ctx->stmt);
9546       else if (code == OMP_SECTIONS)
9547 	c = gimple_omp_sections_clauses (ctx->stmt);
9548       else if (code == OMP_SCOPE)
9549 	c = gimple_omp_scope_clauses (ctx->stmt);
9550       else
9551 	c = gimple_omp_taskreg_clauses (ctx->stmt);
9552       c = omp_find_clause (c, OMP_CLAUSE__REDUCTEMP_);
9553       t = fold_convert (TREE_TYPE (OMP_CLAUSE_DECL (c)),
9554 			build_fold_addr_expr (avar));
9555       gimplify_assign (OMP_CLAUSE_DECL (c), t, start);
9556     }
9557 
9558   gimple_seq_add_stmt (end, gimple_build_assign (data, PLUS_EXPR, data, sz));
9559   gimple_seq_add_stmt (end, gimple_build_assign (idx, PLUS_EXPR, idx,
9560 						 size_one_node));
9561   g = gimple_build_cond (NE_EXPR, idx, num_thr_sz, lab1, lab2);
9562   gimple_seq_add_stmt (end, g);
9563   gimple_seq_add_stmt (end, gimple_build_label (lab2));
9564   if (code == OMP_FOR || code == OMP_SECTIONS || code == OMP_SCOPE)
9565     {
9566       enum built_in_function bfn
9567 	= BUILT_IN_GOMP_WORKSHARE_TASK_REDUCTION_UNREGISTER;
9568       t = builtin_decl_explicit (bfn);
9569       tree c_bool_type = TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (t)));
9570       tree arg;
9571       if (cancellable)
9572 	{
9573 	  arg = create_tmp_var (c_bool_type);
9574 	  gimple_seq_add_stmt (end, gimple_build_assign (arg, NOP_EXPR,
9575 							 cancellable));
9576 	}
9577       else
9578 	arg = build_int_cst (c_bool_type, 0);
9579       g = gimple_build_call (t, 1, arg);
9580     }
9581   else
9582     {
9583       t = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_REDUCTION_UNREGISTER);
9584       g = gimple_build_call (t, 1, build_fold_addr_expr (avar));
9585     }
9586   gimple_seq_add_stmt (end, g);
9587   if (lab7)
9588     gimple_seq_add_stmt (end, gimple_build_label (lab7));
9589   t = build_constructor (atype, NULL);
9590   TREE_THIS_VOLATILE (t) = 1;
9591   gimple_seq_add_stmt (end, gimple_build_assign (avar, t));
9592 }
9593 
9594 /* Expand code for an OpenMP taskgroup directive.  */
9595 
9596 static void
lower_omp_taskgroup(gimple_stmt_iterator * gsi_p,omp_context * ctx)9597 lower_omp_taskgroup (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9598 {
9599   gimple *stmt = gsi_stmt (*gsi_p);
9600   gcall *x;
9601   gbind *bind;
9602   gimple_seq dseq = NULL;
9603   tree block = make_node (BLOCK);
9604 
9605   bind = gimple_build_bind (NULL, NULL, block);
9606   gsi_replace (gsi_p, bind, true);
9607   gimple_bind_add_stmt (bind, stmt);
9608 
9609   push_gimplify_context ();
9610 
9611   x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_START),
9612 			 0);
9613   gimple_bind_add_stmt (bind, x);
9614 
9615   lower_omp_task_reductions (ctx, OMP_TASKGROUP,
9616 			     gimple_omp_taskgroup_clauses (stmt),
9617 			     gimple_bind_body_ptr (bind), &dseq);
9618 
9619   lower_omp (gimple_omp_body_ptr (stmt), ctx);
9620   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9621   gimple_omp_set_body (stmt, NULL);
9622 
9623   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9624   gimple_bind_add_seq (bind, dseq);
9625 
9626   pop_gimplify_context (bind);
9627 
9628   gimple_bind_append_vars (bind, ctx->block_vars);
9629   BLOCK_VARS (block) = ctx->block_vars;
9630 }
9631 
9632 
9633 /* Fold the OMP_ORDERED_CLAUSES for the OMP_ORDERED in STMT if possible.  */
9634 
9635 static void
lower_omp_ordered_clauses(gimple_stmt_iterator * gsi_p,gomp_ordered * ord_stmt,omp_context * ctx)9636 lower_omp_ordered_clauses (gimple_stmt_iterator *gsi_p, gomp_ordered *ord_stmt,
9637 			   omp_context *ctx)
9638 {
9639   struct omp_for_data fd;
9640   if (!ctx->outer || gimple_code (ctx->outer->stmt) != GIMPLE_OMP_FOR)
9641     return;
9642 
9643   unsigned int len = gimple_omp_for_collapse (ctx->outer->stmt);
9644   struct omp_for_data_loop *loops = XALLOCAVEC (struct omp_for_data_loop, len);
9645   omp_extract_for_data (as_a <gomp_for *> (ctx->outer->stmt), &fd, loops);
9646   if (!fd.ordered)
9647     return;
9648 
9649   tree *list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9650   tree c = gimple_omp_ordered_clauses (ord_stmt);
9651   if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
9652       && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9653     {
9654       /* Merge depend clauses from multiple adjacent
9655 	 #pragma omp ordered depend(sink:...) constructs
9656 	 into one #pragma omp ordered depend(sink:...), so that
9657 	 we can optimize them together.  */
9658       gimple_stmt_iterator gsi = *gsi_p;
9659       gsi_next (&gsi);
9660       while (!gsi_end_p (gsi))
9661 	{
9662 	  gimple *stmt = gsi_stmt (gsi);
9663 	  if (is_gimple_debug (stmt)
9664 	      || gimple_code (stmt) == GIMPLE_NOP)
9665 	    {
9666 	      gsi_next (&gsi);
9667 	      continue;
9668 	    }
9669 	  if (gimple_code (stmt) != GIMPLE_OMP_ORDERED)
9670 	    break;
9671 	  gomp_ordered *ord_stmt2 = as_a <gomp_ordered *> (stmt);
9672 	  c = gimple_omp_ordered_clauses (ord_stmt2);
9673 	  if (c == NULL_TREE
9674 	      || OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND
9675 	      || OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9676 	    break;
9677 	  while (*list_p)
9678 	    list_p = &OMP_CLAUSE_CHAIN (*list_p);
9679 	  *list_p = c;
9680 	  gsi_remove (&gsi, true);
9681 	}
9682     }
9683 
9684   /* Canonicalize sink dependence clauses into one folded clause if
9685      possible.
9686 
9687      The basic algorithm is to create a sink vector whose first
9688      element is the GCD of all the first elements, and whose remaining
9689      elements are the minimum of the subsequent columns.
9690 
9691      We ignore dependence vectors whose first element is zero because
9692      such dependencies are known to be executed by the same thread.
9693 
9694      We take into account the direction of the loop, so a minimum
9695      becomes a maximum if the loop is iterating forwards.  We also
9696      ignore sink clauses where the loop direction is unknown, or where
9697      the offsets are clearly invalid because they are not a multiple
9698      of the loop increment.
9699 
9700      For example:
9701 
9702 	#pragma omp for ordered(2)
9703 	for (i=0; i < N; ++i)
9704 	  for (j=0; j < M; ++j)
9705 	    {
9706 	      #pragma omp ordered \
9707 		depend(sink:i-8,j-2) \
9708 		depend(sink:i,j-1) \	// Completely ignored because i+0.
9709 		depend(sink:i-4,j-3) \
9710 		depend(sink:i-6,j-4)
9711 	      #pragma omp ordered depend(source)
9712 	    }
9713 
9714      Folded clause is:
9715 
9716 	depend(sink:-gcd(8,4,6),-min(2,3,4))
9717 	  -or-
9718 	depend(sink:-2,-2)
9719   */
9720 
9721   /* FIXME: Computing GCD's where the first element is zero is
9722      non-trivial in the presence of collapsed loops.  Do this later.  */
9723   if (fd.collapse > 1)
9724     return;
9725 
9726   wide_int *folded_deps = XALLOCAVEC (wide_int, 2 * len - 1);
9727 
9728   /* wide_int is not a POD so it must be default-constructed.  */
9729   for (unsigned i = 0; i != 2 * len - 1; ++i)
9730     new (static_cast<void*>(folded_deps + i)) wide_int ();
9731 
9732   tree folded_dep = NULL_TREE;
9733   /* TRUE if the first dimension's offset is negative.  */
9734   bool neg_offset_p = false;
9735 
9736   list_p = gimple_omp_ordered_clauses_ptr (ord_stmt);
9737   unsigned int i;
9738   while ((c = *list_p) != NULL)
9739     {
9740       bool remove = false;
9741 
9742       gcc_assert (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND);
9743       if (OMP_CLAUSE_DEPEND_KIND (c) != OMP_CLAUSE_DEPEND_SINK)
9744 	goto next_ordered_clause;
9745 
9746       tree vec;
9747       for (vec = OMP_CLAUSE_DECL (c), i = 0;
9748 	   vec && TREE_CODE (vec) == TREE_LIST;
9749 	   vec = TREE_CHAIN (vec), ++i)
9750 	{
9751 	  gcc_assert (i < len);
9752 
9753 	  /* omp_extract_for_data has canonicalized the condition.  */
9754 	  gcc_assert (fd.loops[i].cond_code == LT_EXPR
9755 		      || fd.loops[i].cond_code == GT_EXPR);
9756 	  bool forward = fd.loops[i].cond_code == LT_EXPR;
9757 	  bool maybe_lexically_later = true;
9758 
9759 	  /* While the committee makes up its mind, bail if we have any
9760 	     non-constant steps.  */
9761 	  if (TREE_CODE (fd.loops[i].step) != INTEGER_CST)
9762 	    goto lower_omp_ordered_ret;
9763 
9764 	  tree itype = TREE_TYPE (TREE_VALUE (vec));
9765 	  if (POINTER_TYPE_P (itype))
9766 	    itype = sizetype;
9767 	  wide_int offset = wide_int::from (wi::to_wide (TREE_PURPOSE (vec)),
9768 					    TYPE_PRECISION (itype),
9769 					    TYPE_SIGN (itype));
9770 
9771 	  /* Ignore invalid offsets that are not multiples of the step.  */
9772 	  if (!wi::multiple_of_p (wi::abs (offset),
9773 				  wi::abs (wi::to_wide (fd.loops[i].step)),
9774 				  UNSIGNED))
9775 	    {
9776 	      warning_at (OMP_CLAUSE_LOCATION (c), 0,
9777 			  "ignoring sink clause with offset that is not "
9778 			  "a multiple of the loop step");
9779 	      remove = true;
9780 	      goto next_ordered_clause;
9781 	    }
9782 
9783 	  /* Calculate the first dimension.  The first dimension of
9784 	     the folded dependency vector is the GCD of the first
9785 	     elements, while ignoring any first elements whose offset
9786 	     is 0.  */
9787 	  if (i == 0)
9788 	    {
9789 	      /* Ignore dependence vectors whose first dimension is 0.  */
9790 	      if (offset == 0)
9791 		{
9792 		  remove = true;
9793 		  goto next_ordered_clause;
9794 		}
9795 	      else
9796 		{
9797 		  if (!TYPE_UNSIGNED (itype) && (forward ^ wi::neg_p (offset)))
9798 		    {
9799 		      error_at (OMP_CLAUSE_LOCATION (c),
9800 				"first offset must be in opposite direction "
9801 				"of loop iterations");
9802 		      goto lower_omp_ordered_ret;
9803 		    }
9804 		  if (forward)
9805 		    offset = -offset;
9806 		  neg_offset_p = forward;
9807 		  /* Initialize the first time around.  */
9808 		  if (folded_dep == NULL_TREE)
9809 		    {
9810 		      folded_dep = c;
9811 		      folded_deps[0] = offset;
9812 		    }
9813 		  else
9814 		    folded_deps[0] = wi::gcd (folded_deps[0],
9815 					      offset, UNSIGNED);
9816 		}
9817 	    }
9818 	  /* Calculate minimum for the remaining dimensions.  */
9819 	  else
9820 	    {
9821 	      folded_deps[len + i - 1] = offset;
9822 	      if (folded_dep == c)
9823 		folded_deps[i] = offset;
9824 	      else if (maybe_lexically_later
9825 		       && !wi::eq_p (folded_deps[i], offset))
9826 		{
9827 		  if (forward ^ wi::gts_p (folded_deps[i], offset))
9828 		    {
9829 		      unsigned int j;
9830 		      folded_dep = c;
9831 		      for (j = 1; j <= i; j++)
9832 			folded_deps[j] = folded_deps[len + j - 1];
9833 		    }
9834 		  else
9835 		    maybe_lexically_later = false;
9836 		}
9837 	    }
9838 	}
9839       gcc_assert (i == len);
9840 
9841       remove = true;
9842 
9843     next_ordered_clause:
9844       if (remove)
9845 	*list_p = OMP_CLAUSE_CHAIN (c);
9846       else
9847 	list_p = &OMP_CLAUSE_CHAIN (c);
9848     }
9849 
9850   if (folded_dep)
9851     {
9852       if (neg_offset_p)
9853 	folded_deps[0] = -folded_deps[0];
9854 
9855       tree itype = TREE_TYPE (TREE_VALUE (OMP_CLAUSE_DECL (folded_dep)));
9856       if (POINTER_TYPE_P (itype))
9857 	itype = sizetype;
9858 
9859       TREE_PURPOSE (OMP_CLAUSE_DECL (folded_dep))
9860 	= wide_int_to_tree (itype, folded_deps[0]);
9861       OMP_CLAUSE_CHAIN (folded_dep) = gimple_omp_ordered_clauses (ord_stmt);
9862       *gimple_omp_ordered_clauses_ptr (ord_stmt) = folded_dep;
9863     }
9864 
9865  lower_omp_ordered_ret:
9866 
9867   /* Ordered without clauses is #pragma omp threads, while we want
9868      a nop instead if we remove all clauses.  */
9869   if (gimple_omp_ordered_clauses (ord_stmt) == NULL_TREE)
9870     gsi_replace (gsi_p, gimple_build_nop (), true);
9871 }
9872 
9873 
9874 /* Expand code for an OpenMP ordered directive.  */
9875 
9876 static void
lower_omp_ordered(gimple_stmt_iterator * gsi_p,omp_context * ctx)9877 lower_omp_ordered (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9878 {
9879   tree block;
9880   gimple *stmt = gsi_stmt (*gsi_p), *g;
9881   gomp_ordered *ord_stmt = as_a <gomp_ordered *> (stmt);
9882   gcall *x;
9883   gbind *bind;
9884   bool simd = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9885 			       OMP_CLAUSE_SIMD);
9886   /* FIXME: this should check presence of OMP_CLAUSE__SIMT_ on the enclosing
9887      loop.  */
9888   bool maybe_simt
9889     = simd && omp_maybe_offloaded_ctx (ctx) && omp_max_simt_vf () > 1;
9890   bool threads = omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9891 				  OMP_CLAUSE_THREADS);
9892 
9893   if (omp_find_clause (gimple_omp_ordered_clauses (ord_stmt),
9894 		       OMP_CLAUSE_DEPEND))
9895     {
9896       /* FIXME: This is needs to be moved to the expansion to verify various
9897 	 conditions only testable on cfg with dominators computed, and also
9898 	 all the depend clauses to be merged still might need to be available
9899 	 for the runtime checks.  */
9900       if (0)
9901 	lower_omp_ordered_clauses (gsi_p, ord_stmt, ctx);
9902       return;
9903     }
9904 
9905   push_gimplify_context ();
9906 
9907   block = make_node (BLOCK);
9908   bind = gimple_build_bind (NULL, NULL, block);
9909   gsi_replace (gsi_p, bind, true);
9910   gimple_bind_add_stmt (bind, stmt);
9911 
9912   if (simd)
9913     {
9914       x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_START, 1,
9915 				      build_int_cst (NULL_TREE, threads));
9916       cfun->has_simduid_loops = true;
9917     }
9918   else
9919     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_START),
9920 			   0);
9921   gimple_bind_add_stmt (bind, x);
9922 
9923   tree counter = NULL_TREE, test = NULL_TREE, body = NULL_TREE;
9924   if (maybe_simt)
9925     {
9926       counter = create_tmp_var (integer_type_node);
9927       g = gimple_build_call_internal (IFN_GOMP_SIMT_LANE, 0);
9928       gimple_call_set_lhs (g, counter);
9929       gimple_bind_add_stmt (bind, g);
9930 
9931       body = create_artificial_label (UNKNOWN_LOCATION);
9932       test = create_artificial_label (UNKNOWN_LOCATION);
9933       gimple_bind_add_stmt (bind, gimple_build_label (body));
9934 
9935       tree simt_pred = create_tmp_var (integer_type_node);
9936       g = gimple_build_call_internal (IFN_GOMP_SIMT_ORDERED_PRED, 1, counter);
9937       gimple_call_set_lhs (g, simt_pred);
9938       gimple_bind_add_stmt (bind, g);
9939 
9940       tree t = create_artificial_label (UNKNOWN_LOCATION);
9941       g = gimple_build_cond (EQ_EXPR, simt_pred, integer_zero_node, t, test);
9942       gimple_bind_add_stmt (bind, g);
9943 
9944       gimple_bind_add_stmt (bind, gimple_build_label (t));
9945     }
9946   lower_omp (gimple_omp_body_ptr (stmt), ctx);
9947   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
9948   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
9949   gimple_omp_set_body (stmt, NULL);
9950 
9951   if (maybe_simt)
9952     {
9953       gimple_bind_add_stmt (bind, gimple_build_label (test));
9954       g = gimple_build_assign (counter, MINUS_EXPR, counter, integer_one_node);
9955       gimple_bind_add_stmt (bind, g);
9956 
9957       tree c = build2 (GE_EXPR, boolean_type_node, counter, integer_zero_node);
9958       tree nonneg = create_tmp_var (integer_type_node);
9959       gimple_seq tseq = NULL;
9960       gimplify_assign (nonneg, fold_convert (integer_type_node, c), &tseq);
9961       gimple_bind_add_seq (bind, tseq);
9962 
9963       g = gimple_build_call_internal (IFN_GOMP_SIMT_VOTE_ANY, 1, nonneg);
9964       gimple_call_set_lhs (g, nonneg);
9965       gimple_bind_add_stmt (bind, g);
9966 
9967       tree end = create_artificial_label (UNKNOWN_LOCATION);
9968       g = gimple_build_cond (NE_EXPR, nonneg, integer_zero_node, body, end);
9969       gimple_bind_add_stmt (bind, g);
9970 
9971       gimple_bind_add_stmt (bind, gimple_build_label (end));
9972     }
9973   if (simd)
9974     x = gimple_build_call_internal (IFN_GOMP_SIMD_ORDERED_END, 1,
9975 				    build_int_cst (NULL_TREE, threads));
9976   else
9977     x = gimple_build_call (builtin_decl_explicit (BUILT_IN_GOMP_ORDERED_END),
9978 			   0);
9979   gimple_bind_add_stmt (bind, x);
9980 
9981   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
9982 
9983   pop_gimplify_context (bind);
9984 
9985   gimple_bind_append_vars (bind, ctx->block_vars);
9986   BLOCK_VARS (block) = gimple_bind_vars (bind);
9987 }
9988 
9989 
9990 /* Expand code for an OpenMP scan directive and the structured block
9991    before the scan directive.  */
9992 
9993 static void
lower_omp_scan(gimple_stmt_iterator * gsi_p,omp_context * ctx)9994 lower_omp_scan (gimple_stmt_iterator *gsi_p, omp_context *ctx)
9995 {
9996   gimple *stmt = gsi_stmt (*gsi_p);
9997   bool has_clauses
9998     = gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt)) != NULL;
9999   tree lane = NULL_TREE;
10000   gimple_seq before = NULL;
10001   omp_context *octx = ctx->outer;
10002   gcc_assert (octx);
10003   if (octx->scan_exclusive && !has_clauses)
10004     {
10005       gimple_stmt_iterator gsi2 = *gsi_p;
10006       gsi_next (&gsi2);
10007       gimple *stmt2 = gsi_stmt (gsi2);
10008       /* For exclusive scan, swap GIMPLE_OMP_SCAN without clauses
10009 	 with following GIMPLE_OMP_SCAN with clauses, so that input_phase,
10010 	 the one with exclusive clause(s), comes first.  */
10011       if (stmt2
10012 	  && gimple_code (stmt2) == GIMPLE_OMP_SCAN
10013 	  && gimple_omp_scan_clauses (as_a <gomp_scan *> (stmt2)) != NULL)
10014 	{
10015 	  gsi_remove (gsi_p, false);
10016 	  gsi_insert_after (gsi_p, stmt, GSI_SAME_STMT);
10017 	  ctx = maybe_lookup_ctx (stmt2);
10018 	  gcc_assert (ctx);
10019 	  lower_omp_scan (gsi_p, ctx);
10020 	  return;
10021 	}
10022     }
10023 
10024   bool input_phase = has_clauses ^ octx->scan_inclusive;
10025   bool is_simd = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10026 		  && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_SIMD);
10027   bool is_for = (gimple_code (octx->stmt) == GIMPLE_OMP_FOR
10028 		 && gimple_omp_for_kind (octx->stmt) == GF_OMP_FOR_KIND_FOR
10029 		 && !gimple_omp_for_combined_p (octx->stmt));
10030   bool is_for_simd = is_simd && gimple_omp_for_combined_into_p (octx->stmt);
10031   if (is_for_simd && octx->for_simd_scan_phase)
10032     is_simd = false;
10033   if (is_simd)
10034     if (tree c = omp_find_clause (gimple_omp_for_clauses (octx->stmt),
10035 				  OMP_CLAUSE__SIMDUID_))
10036       {
10037 	tree uid = OMP_CLAUSE__SIMDUID__DECL (c);
10038 	lane = create_tmp_var (unsigned_type_node);
10039 	tree t = build_int_cst (integer_type_node,
10040 				input_phase ? 1
10041 				: octx->scan_inclusive ? 2 : 3);
10042 	gimple *g
10043 	  = gimple_build_call_internal (IFN_GOMP_SIMD_LANE, 2, uid, t);
10044 	gimple_call_set_lhs (g, lane);
10045 	gimple_seq_add_stmt (&before, g);
10046       }
10047 
10048   if (is_simd || is_for)
10049     {
10050       for (tree c = gimple_omp_for_clauses (octx->stmt);
10051 	   c; c = OMP_CLAUSE_CHAIN (c))
10052 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10053 	    && OMP_CLAUSE_REDUCTION_INSCAN (c))
10054 	  {
10055 	    location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10056 	    tree var = OMP_CLAUSE_DECL (c);
10057 	    tree new_var = lookup_decl (var, octx);
10058 	    tree val = new_var;
10059 	    tree var2 = NULL_TREE;
10060 	    tree var3 = NULL_TREE;
10061 	    tree var4 = NULL_TREE;
10062 	    tree lane0 = NULL_TREE;
10063 	    tree new_vard = new_var;
10064 	    if (omp_privatize_by_reference (var))
10065 	      {
10066 		new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10067 		val = new_var;
10068 	      }
10069 	    if (DECL_HAS_VALUE_EXPR_P (new_vard))
10070 	      {
10071 		val = DECL_VALUE_EXPR (new_vard);
10072 		if (new_vard != new_var)
10073 		  {
10074 		    gcc_assert (TREE_CODE (val) == ADDR_EXPR);
10075 		    val = TREE_OPERAND (val, 0);
10076 		  }
10077 		if (TREE_CODE (val) == ARRAY_REF
10078 		    && VAR_P (TREE_OPERAND (val, 0)))
10079 		  {
10080 		    tree v = TREE_OPERAND (val, 0);
10081 		    if (lookup_attribute ("omp simd array",
10082 					  DECL_ATTRIBUTES (v)))
10083 		      {
10084 			val = unshare_expr (val);
10085 			lane0 = TREE_OPERAND (val, 1);
10086 			TREE_OPERAND (val, 1) = lane;
10087 			var2 = lookup_decl (v, octx);
10088 			if (octx->scan_exclusive)
10089 			  var4 = lookup_decl (var2, octx);
10090 			if (input_phase
10091 			    && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10092 			  var3 = maybe_lookup_decl (var4 ? var4 : var2, octx);
10093 			if (!input_phase)
10094 			  {
10095 			    var2 = build4 (ARRAY_REF, TREE_TYPE (val),
10096 					   var2, lane, NULL_TREE, NULL_TREE);
10097 			    TREE_THIS_NOTRAP (var2) = 1;
10098 			    if (octx->scan_exclusive)
10099 			      {
10100 				var4 = build4 (ARRAY_REF, TREE_TYPE (val),
10101 					       var4, lane, NULL_TREE,
10102 					       NULL_TREE);
10103 				TREE_THIS_NOTRAP (var4) = 1;
10104 			      }
10105 			  }
10106 			else
10107 			  var2 = val;
10108 		      }
10109 		  }
10110 		gcc_assert (var2);
10111 	      }
10112 	    else
10113 	      {
10114 		var2 = build_outer_var_ref (var, octx);
10115 		if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10116 		  {
10117 		    var3 = maybe_lookup_decl (new_vard, octx);
10118 		    if (var3 == new_vard || var3 == NULL_TREE)
10119 		      var3 = NULL_TREE;
10120 		    else if (is_simd && octx->scan_exclusive && !input_phase)
10121 		      {
10122 			var4 = maybe_lookup_decl (var3, octx);
10123 			if (var4 == var3 || var4 == NULL_TREE)
10124 			  {
10125 			    if (TREE_ADDRESSABLE (TREE_TYPE (new_var)))
10126 			      {
10127 				var4 = var3;
10128 				var3 = NULL_TREE;
10129 			      }
10130 			    else
10131 			      var4 = NULL_TREE;
10132 			  }
10133 		      }
10134 		  }
10135 		if (is_simd
10136 		    && octx->scan_exclusive
10137 		    && !input_phase
10138 		    && var4 == NULL_TREE)
10139 		  var4 = create_tmp_var (TREE_TYPE (val));
10140 	      }
10141 	    if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10142 	      {
10143 		tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
10144 		if (input_phase)
10145 		  {
10146 		    if (var3)
10147 		      {
10148 			/* If we've added a separate identity element
10149 			   variable, copy it over into val.  */
10150 			tree x = lang_hooks.decls.omp_clause_assign_op (c, val,
10151 									var3);
10152 			gimplify_and_add (x, &before);
10153 		      }
10154 		    else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
10155 		      {
10156 			/* Otherwise, assign to it the identity element.  */
10157 			gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
10158 			if (is_for)
10159 			  tseq = copy_gimple_seq_and_replace_locals (tseq);
10160 			tree ref = build_outer_var_ref (var, octx);
10161 			tree x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10162 				  ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10163 			if (x)
10164 			  {
10165 			    if (new_vard != new_var)
10166 			      val = build_fold_addr_expr_loc (clause_loc, val);
10167 			    SET_DECL_VALUE_EXPR (new_vard, val);
10168 			  }
10169 			SET_DECL_VALUE_EXPR (placeholder, ref);
10170 			DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10171 			lower_omp (&tseq, octx);
10172 			if (x)
10173 			  SET_DECL_VALUE_EXPR (new_vard, x);
10174 			SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10175 			DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10176 			gimple_seq_add_seq (&before, tseq);
10177 			if (is_simd)
10178 			  OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
10179 		      }
10180 		  }
10181 		else if (is_simd)
10182 		  {
10183 		    tree x;
10184 		    if (octx->scan_exclusive)
10185 		      {
10186 			tree v4 = unshare_expr (var4);
10187 			tree v2 = unshare_expr (var2);
10188 			x = lang_hooks.decls.omp_clause_assign_op (c, v4, v2);
10189 			gimplify_and_add (x, &before);
10190 		      }
10191 		    gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
10192 		    x = (DECL_HAS_VALUE_EXPR_P (new_vard)
10193 			 ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
10194 		    tree vexpr = val;
10195 		    if (x && new_vard != new_var)
10196 		      vexpr = build_fold_addr_expr_loc (clause_loc, val);
10197 		    if (x)
10198 		      SET_DECL_VALUE_EXPR (new_vard, vexpr);
10199 		    SET_DECL_VALUE_EXPR (placeholder, var2);
10200 		    DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
10201 		    lower_omp (&tseq, octx);
10202 		    gimple_seq_add_seq (&before, tseq);
10203 		    OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
10204 		    if (x)
10205 		      SET_DECL_VALUE_EXPR (new_vard, x);
10206 		    SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
10207 		    DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
10208 		    if (octx->scan_inclusive)
10209 		      {
10210 			x = lang_hooks.decls.omp_clause_assign_op (c, val,
10211 								   var2);
10212 			gimplify_and_add (x, &before);
10213 		      }
10214 		    else if (lane0 == NULL_TREE)
10215 		      {
10216 			x = lang_hooks.decls.omp_clause_assign_op (c, val,
10217 								   var4);
10218 			gimplify_and_add (x, &before);
10219 		      }
10220 		  }
10221 	      }
10222 	    else
10223 	      {
10224 		if (input_phase)
10225 		  {
10226 		    /* input phase.  Set val to initializer before
10227 		       the body.  */
10228 		    tree x = omp_reduction_init (c, TREE_TYPE (new_var));
10229 		    gimplify_assign (val, x, &before);
10230 		  }
10231 		else if (is_simd)
10232 		  {
10233 		    /* scan phase.  */
10234 		    enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
10235 		    if (code == MINUS_EXPR)
10236 		      code = PLUS_EXPR;
10237 
10238 		    tree x = build2 (code, TREE_TYPE (var2),
10239 				     unshare_expr (var2), unshare_expr (val));
10240 		    if (octx->scan_inclusive)
10241 		      {
10242 			gimplify_assign (unshare_expr (var2), x, &before);
10243 			gimplify_assign (val, var2, &before);
10244 		      }
10245 		    else
10246 		      {
10247 			gimplify_assign (unshare_expr (var4),
10248 					 unshare_expr (var2), &before);
10249 			gimplify_assign (var2, x, &before);
10250 			if (lane0 == NULL_TREE)
10251 			  gimplify_assign (val, var4, &before);
10252 		      }
10253 		  }
10254 	      }
10255 	    if (octx->scan_exclusive && !input_phase && lane0)
10256 	      {
10257 		tree vexpr = unshare_expr (var4);
10258 		TREE_OPERAND (vexpr, 1) = lane0;
10259 		if (new_vard != new_var)
10260 		  vexpr = build_fold_addr_expr_loc (clause_loc, vexpr);
10261 		SET_DECL_VALUE_EXPR (new_vard, vexpr);
10262 	      }
10263 	  }
10264     }
10265   if (is_simd && !is_for_simd)
10266     {
10267       gsi_insert_seq_after (gsi_p, gimple_omp_body (stmt), GSI_SAME_STMT);
10268       gsi_insert_seq_after (gsi_p, before, GSI_SAME_STMT);
10269       gsi_replace (gsi_p, gimple_build_nop (), true);
10270       return;
10271     }
10272   lower_omp (gimple_omp_body_ptr (stmt), octx);
10273   if (before)
10274     {
10275       gimple_stmt_iterator gsi = gsi_start_1 (gimple_omp_body_ptr (stmt));
10276       gsi_insert_seq_before (&gsi, before, GSI_SAME_STMT);
10277     }
10278 }
10279 
10280 
10281 /* Gimplify a GIMPLE_OMP_CRITICAL statement.  This is a relatively simple
10282    substitution of a couple of function calls.  But in the NAMED case,
10283    requires that languages coordinate a symbol name.  It is therefore
10284    best put here in common code.  */
10285 
10286 static GTY(()) hash_map<tree, tree> *critical_name_mutexes;
10287 
10288 static void
lower_omp_critical(gimple_stmt_iterator * gsi_p,omp_context * ctx)10289 lower_omp_critical (gimple_stmt_iterator *gsi_p, omp_context *ctx)
10290 {
10291   tree block;
10292   tree name, lock, unlock;
10293   gomp_critical *stmt = as_a <gomp_critical *> (gsi_stmt (*gsi_p));
10294   gbind *bind;
10295   location_t loc = gimple_location (stmt);
10296   gimple_seq tbody;
10297 
10298   name = gimple_omp_critical_name (stmt);
10299   if (name)
10300     {
10301       tree decl;
10302 
10303       if (!critical_name_mutexes)
10304 	critical_name_mutexes = hash_map<tree, tree>::create_ggc (10);
10305 
10306       tree *n = critical_name_mutexes->get (name);
10307       if (n == NULL)
10308 	{
10309 	  char *new_str;
10310 
10311 	  decl = create_tmp_var_raw (ptr_type_node);
10312 
10313 	  new_str = ACONCAT ((".gomp_critical_user_",
10314 			      IDENTIFIER_POINTER (name), NULL));
10315 	  DECL_NAME (decl) = get_identifier (new_str);
10316 	  TREE_PUBLIC (decl) = 1;
10317 	  TREE_STATIC (decl) = 1;
10318 	  DECL_COMMON (decl) = 1;
10319 	  DECL_ARTIFICIAL (decl) = 1;
10320 	  DECL_IGNORED_P (decl) = 1;
10321 
10322 	  varpool_node::finalize_decl (decl);
10323 
10324 	  critical_name_mutexes->put (name, decl);
10325 	}
10326       else
10327 	decl = *n;
10328 
10329       /* If '#pragma omp critical' is inside offloaded region or
10330 	 inside function marked as offloadable, the symbol must be
10331 	 marked as offloadable too.  */
10332       omp_context *octx;
10333       if (cgraph_node::get (current_function_decl)->offloadable)
10334 	varpool_node::get_create (decl)->offloadable = 1;
10335       else
10336 	for (octx = ctx->outer; octx; octx = octx->outer)
10337 	  if (is_gimple_omp_offloaded (octx->stmt))
10338 	    {
10339 	      varpool_node::get_create (decl)->offloadable = 1;
10340 	      break;
10341 	    }
10342 
10343       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_START);
10344       lock = build_call_expr_loc (loc, lock, 1,
10345 				  build_fold_addr_expr_loc (loc, decl));
10346 
10347       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_NAME_END);
10348       unlock = build_call_expr_loc (loc, unlock, 1,
10349 				build_fold_addr_expr_loc (loc, decl));
10350     }
10351   else
10352     {
10353       lock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_START);
10354       lock = build_call_expr_loc (loc, lock, 0);
10355 
10356       unlock = builtin_decl_explicit (BUILT_IN_GOMP_CRITICAL_END);
10357       unlock = build_call_expr_loc (loc, unlock, 0);
10358     }
10359 
10360   push_gimplify_context ();
10361 
10362   block = make_node (BLOCK);
10363   bind = gimple_build_bind (NULL, NULL, block);
10364   gsi_replace (gsi_p, bind, true);
10365   gimple_bind_add_stmt (bind, stmt);
10366 
10367   tbody = gimple_bind_body (bind);
10368   gimplify_and_add (lock, &tbody);
10369   gimple_bind_set_body (bind, tbody);
10370 
10371   lower_omp (gimple_omp_body_ptr (stmt), ctx);
10372   gimple_omp_set_body (stmt, maybe_catch_exception (gimple_omp_body (stmt)));
10373   gimple_bind_add_seq (bind, gimple_omp_body (stmt));
10374   gimple_omp_set_body (stmt, NULL);
10375 
10376   tbody = gimple_bind_body (bind);
10377   gimplify_and_add (unlock, &tbody);
10378   gimple_bind_set_body (bind, tbody);
10379 
10380   gimple_bind_add_stmt (bind, gimple_build_omp_return (true));
10381 
10382   pop_gimplify_context (bind);
10383   gimple_bind_append_vars (bind, ctx->block_vars);
10384   BLOCK_VARS (block) = gimple_bind_vars (bind);
10385 }
10386 
10387 /* A subroutine of lower_omp_for.  Generate code to emit the predicate
10388    for a lastprivate clause.  Given a loop control predicate of (V
10389    cond N2), we gate the clause on (!(V cond N2)).  The lowered form
10390    is appended to *DLIST, iterator initialization is appended to
10391    *BODY_P.  *CLIST is for lastprivate(conditional:) code that needs
10392    to be emitted in a critical section.  */
10393 
10394 static void
lower_omp_for_lastprivate(struct omp_for_data * fd,gimple_seq * body_p,gimple_seq * dlist,gimple_seq * clist,struct omp_context * ctx)10395 lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p,
10396 			   gimple_seq *dlist, gimple_seq *clist,
10397 			   struct omp_context *ctx)
10398 {
10399   tree clauses, cond, vinit;
10400   enum tree_code cond_code;
10401   gimple_seq stmts;
10402 
10403   cond_code = fd->loop.cond_code;
10404   cond_code = cond_code == LT_EXPR ? GE_EXPR : LE_EXPR;
10405 
10406   /* When possible, use a strict equality expression.  This can let VRP
10407      type optimizations deduce the value and remove a copy.  */
10408   if (tree_fits_shwi_p (fd->loop.step))
10409     {
10410       HOST_WIDE_INT step = tree_to_shwi (fd->loop.step);
10411       if (step == 1 || step == -1)
10412 	cond_code = EQ_EXPR;
10413     }
10414 
10415   tree n2 = fd->loop.n2;
10416   if (fd->collapse > 1
10417       && TREE_CODE (n2) != INTEGER_CST
10418       && gimple_omp_for_combined_into_p (fd->for_stmt))
10419     {
10420       struct omp_context *taskreg_ctx = NULL;
10421       if (gimple_code (ctx->outer->stmt) == GIMPLE_OMP_FOR)
10422 	{
10423 	  gomp_for *gfor = as_a <gomp_for *> (ctx->outer->stmt);
10424 	  if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_FOR
10425 	      || gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_DISTRIBUTE)
10426 	    {
10427 	      if (gimple_omp_for_combined_into_p (gfor))
10428 		{
10429 		  gcc_assert (ctx->outer->outer
10430 			      && is_parallel_ctx (ctx->outer->outer));
10431 		  taskreg_ctx = ctx->outer->outer;
10432 		}
10433 	      else
10434 		{
10435 		  struct omp_for_data outer_fd;
10436 		  omp_extract_for_data (gfor, &outer_fd, NULL);
10437 		  n2 = fold_convert (TREE_TYPE (n2), outer_fd.loop.n2);
10438 		}
10439 	    }
10440 	  else if (gimple_omp_for_kind (gfor) == GF_OMP_FOR_KIND_TASKLOOP)
10441 	    taskreg_ctx = ctx->outer->outer;
10442 	}
10443       else if (is_taskreg_ctx (ctx->outer))
10444 	taskreg_ctx = ctx->outer;
10445       if (taskreg_ctx)
10446 	{
10447 	  int i;
10448 	  tree taskreg_clauses
10449 	    = gimple_omp_taskreg_clauses (taskreg_ctx->stmt);
10450 	  tree innerc = omp_find_clause (taskreg_clauses,
10451 					 OMP_CLAUSE__LOOPTEMP_);
10452 	  gcc_assert (innerc);
10453 	  int count = fd->collapse;
10454 	  if (fd->non_rect
10455 	      && fd->last_nonrect == fd->first_nonrect + 1)
10456 	    if (tree v = gimple_omp_for_index (fd->for_stmt, fd->last_nonrect))
10457 	      if (!TYPE_UNSIGNED (TREE_TYPE (v)))
10458 		count += 4;
10459 	  for (i = 0; i < count; i++)
10460 	    {
10461 	      innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10462 					OMP_CLAUSE__LOOPTEMP_);
10463 	      gcc_assert (innerc);
10464 	    }
10465 	  innerc = omp_find_clause (OMP_CLAUSE_CHAIN (innerc),
10466 				    OMP_CLAUSE__LOOPTEMP_);
10467 	  if (innerc)
10468 	    n2 = fold_convert (TREE_TYPE (n2),
10469 			       lookup_decl (OMP_CLAUSE_DECL (innerc),
10470 					    taskreg_ctx));
10471 	}
10472     }
10473   cond = build2 (cond_code, boolean_type_node, fd->loop.v, n2);
10474 
10475   clauses = gimple_omp_for_clauses (fd->for_stmt);
10476   stmts = NULL;
10477   lower_lastprivate_clauses (clauses, cond, body_p, &stmts, clist, ctx);
10478   if (!gimple_seq_empty_p (stmts))
10479     {
10480       gimple_seq_add_seq (&stmts, *dlist);
10481       *dlist = stmts;
10482 
10483       /* Optimize: v = 0; is usually cheaper than v = some_other_constant.  */
10484       vinit = fd->loop.n1;
10485       if (cond_code == EQ_EXPR
10486 	  && tree_fits_shwi_p (fd->loop.n2)
10487 	  && ! integer_zerop (fd->loop.n2))
10488 	vinit = build_int_cst (TREE_TYPE (fd->loop.v), 0);
10489       else
10490 	vinit = unshare_expr (vinit);
10491 
10492       /* Initialize the iterator variable, so that threads that don't execute
10493 	 any iterations don't execute the lastprivate clauses by accident.  */
10494       gimplify_assign (fd->loop.v, vinit, body_p);
10495     }
10496 }
10497 
10498 /* OpenACC privatization.
10499 
10500    Or, in other words, *sharing* at the respective OpenACC level of
10501    parallelism.
10502 
10503    From a correctness perspective, a non-addressable variable can't be accessed
10504    outside the current thread, so it can go in a (faster than shared memory)
10505    register -- though that register may need to be broadcast in some
10506    circumstances.  A variable can only meaningfully be "shared" across workers
10507    or vector lanes if its address is taken, e.g. by a call to an atomic
10508    builtin.
10509 
10510    From an optimisation perspective, the answer might be fuzzier: maybe
10511    sometimes, using shared memory directly would be faster than
10512    broadcasting.  */
10513 
10514 static void
oacc_privatization_begin_diagnose_var(const dump_flags_t l_dump_flags,const location_t loc,const tree c,const tree decl)10515 oacc_privatization_begin_diagnose_var (const dump_flags_t l_dump_flags,
10516 				       const location_t loc, const tree c,
10517 				       const tree decl)
10518 {
10519   const dump_user_location_t d_u_loc
10520     = dump_user_location_t::from_location_t (loc);
10521 /* PR100695 "Format decoder, quoting in 'dump_printf' etc." */
10522 #if __GNUC__ >= 10
10523 # pragma GCC diagnostic push
10524 # pragma GCC diagnostic ignored "-Wformat"
10525 #endif
10526   dump_printf_loc (l_dump_flags, d_u_loc,
10527 		   "variable %<%T%> ", decl);
10528 #if __GNUC__ >= 10
10529 # pragma GCC diagnostic pop
10530 #endif
10531   if (c)
10532     dump_printf (l_dump_flags,
10533 		 "in %qs clause ",
10534 		 omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
10535   else
10536     dump_printf (l_dump_flags,
10537 		 "declared in block ");
10538 }
10539 
10540 static bool
oacc_privatization_candidate_p(const location_t loc,const tree c,const tree decl)10541 oacc_privatization_candidate_p (const location_t loc, const tree c,
10542 				const tree decl)
10543 {
10544   dump_flags_t l_dump_flags = get_openacc_privatization_dump_flags ();
10545 
10546   /* There is some differentiation depending on block vs. clause.  */
10547   bool block = !c;
10548 
10549   bool res = true;
10550 
10551   if (res && !VAR_P (decl))
10552     {
10553       res = false;
10554 
10555       if (dump_enabled_p ())
10556 	{
10557 	  oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10558 	  dump_printf (l_dump_flags,
10559 		       "potentially has improper OpenACC privatization level: %qs\n",
10560 		       get_tree_code_name (TREE_CODE (decl)));
10561 	}
10562     }
10563 
10564   if (res && block && TREE_STATIC (decl))
10565     {
10566       res = false;
10567 
10568       if (dump_enabled_p ())
10569 	{
10570 	  oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10571 	  dump_printf (l_dump_flags,
10572 		       "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10573 		       "static");
10574 	}
10575     }
10576 
10577   if (res && block && DECL_EXTERNAL (decl))
10578     {
10579       res = false;
10580 
10581       if (dump_enabled_p ())
10582 	{
10583 	  oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10584 	  dump_printf (l_dump_flags,
10585 		       "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10586 		       "external");
10587 	}
10588     }
10589 
10590   if (res && !TREE_ADDRESSABLE (decl))
10591     {
10592       res = false;
10593 
10594       if (dump_enabled_p ())
10595 	{
10596 	  oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10597 	  dump_printf (l_dump_flags,
10598 		       "isn%'t candidate for adjusting OpenACC privatization level: %s\n",
10599 		       "not addressable");
10600 	}
10601     }
10602 
10603   if (res)
10604     {
10605       if (dump_enabled_p ())
10606 	{
10607 	  oacc_privatization_begin_diagnose_var (l_dump_flags, loc, c, decl);
10608 	  dump_printf (l_dump_flags,
10609 		       "is candidate for adjusting OpenACC privatization level\n");
10610 	}
10611     }
10612 
10613   if (dump_file && (dump_flags & TDF_DETAILS))
10614     {
10615       print_generic_decl (dump_file, decl, dump_flags);
10616       fprintf (dump_file, "\n");
10617     }
10618 
10619   return res;
10620 }
10621 
10622 /* Scan CLAUSES for candidates for adjusting OpenACC privatization level in
10623    CTX.  */
10624 
10625 static void
oacc_privatization_scan_clause_chain(omp_context * ctx,tree clauses)10626 oacc_privatization_scan_clause_chain (omp_context *ctx, tree clauses)
10627 {
10628   for (tree c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
10629     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_PRIVATE)
10630       {
10631 	tree decl = OMP_CLAUSE_DECL (c);
10632 
10633 	if (!oacc_privatization_candidate_p (OMP_CLAUSE_LOCATION (c), c, decl))
10634 	  continue;
10635 
10636 	gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10637 	ctx->oacc_privatization_candidates.safe_push (decl);
10638       }
10639 }
10640 
10641 /* Scan DECLS for candidates for adjusting OpenACC privatization level in
10642    CTX.  */
10643 
10644 static void
oacc_privatization_scan_decl_chain(omp_context * ctx,tree decls)10645 oacc_privatization_scan_decl_chain (omp_context *ctx, tree decls)
10646 {
10647   for (tree decl = decls; decl; decl = DECL_CHAIN (decl))
10648     {
10649       if (!oacc_privatization_candidate_p (gimple_location (ctx->stmt), NULL, decl))
10650 	continue;
10651 
10652       gcc_checking_assert (!ctx->oacc_privatization_candidates.contains (decl));
10653       ctx->oacc_privatization_candidates.safe_push (decl);
10654     }
10655 }
10656 
10657 /* Callback for walk_gimple_seq.  Find #pragma omp scan statement.  */
10658 
10659 static tree
omp_find_scan(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)10660 omp_find_scan (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
10661 	       struct walk_stmt_info *wi)
10662 {
10663   gimple *stmt = gsi_stmt (*gsi_p);
10664 
10665   *handled_ops_p = true;
10666   switch (gimple_code (stmt))
10667     {
10668     WALK_SUBSTMTS;
10669 
10670     case GIMPLE_OMP_FOR:
10671       if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_SIMD
10672 	  && gimple_omp_for_combined_into_p (stmt))
10673 	*handled_ops_p = false;
10674       break;
10675 
10676     case GIMPLE_OMP_SCAN:
10677       *(gimple_stmt_iterator *) (wi->info) = *gsi_p;
10678       return integer_zero_node;
10679     default:
10680       break;
10681     }
10682   return NULL;
10683 }
10684 
10685 /* Helper function for lower_omp_for, add transformations for a worksharing
10686    loop with scan directives inside of it.
10687    For worksharing loop not combined with simd, transform:
10688    #pragma omp for reduction(inscan,+:r) private(i)
10689    for (i = 0; i < n; i = i + 1)
10690      {
10691        {
10692 	 update (r);
10693        }
10694        #pragma omp scan inclusive(r)
10695        {
10696 	 use (r);
10697        }
10698      }
10699 
10700    into two worksharing loops + code to merge results:
10701 
10702    num_threads = omp_get_num_threads ();
10703    thread_num = omp_get_thread_num ();
10704    if (thread_num == 0) goto <D.2099>; else goto <D.2100>;
10705    <D.2099>:
10706    var2 = r;
10707    goto <D.2101>;
10708    <D.2100>:
10709    // For UDRs this is UDR init, or if ctors are needed, copy from
10710    // var3 that has been constructed to contain the neutral element.
10711    var2 = 0;
10712    <D.2101>:
10713    ivar = 0;
10714    // The _scantemp_ clauses will arrange for rpriva to be initialized to
10715    // a shared array with num_threads elements and rprivb to a local array
10716    // number of elements equal to the number of (contiguous) iterations the
10717    // current thread will perform.  controlb and controlp variables are
10718    // temporaries to handle deallocation of rprivb at the end of second
10719    // GOMP_FOR.
10720    #pragma omp for _scantemp_(rpriva) _scantemp_(rprivb) _scantemp_(controlb) \
10721      _scantemp_(controlp) reduction(inscan,+:r) private(i) nowait
10722    for (i = 0; i < n; i = i + 1)
10723      {
10724        {
10725 	 // For UDRs this is UDR init or copy from var3.
10726 	 r = 0;
10727 	 // This is the input phase from user code.
10728 	 update (r);
10729        }
10730        {
10731 	 // For UDRs this is UDR merge.
10732 	 var2 = var2 + r;
10733 	 // Rather than handing it over to the user, save to local thread's
10734 	 // array.
10735 	 rprivb[ivar] = var2;
10736 	 // For exclusive scan, the above two statements are swapped.
10737 	 ivar = ivar + 1;
10738        }
10739      }
10740    // And remember the final value from this thread's into the shared
10741    // rpriva array.
10742    rpriva[(sizetype) thread_num] = var2;
10743    // If more than one thread, compute using Work-Efficient prefix sum
10744    // the inclusive parallel scan of the rpriva array.
10745    if (num_threads > 1) goto <D.2102>; else goto <D.2103>;
10746    <D.2102>:
10747    GOMP_barrier ();
10748    down = 0;
10749    k = 1;
10750    num_threadsu = (unsigned int) num_threads;
10751    thread_numup1 = (unsigned int) thread_num + 1;
10752    <D.2108>:
10753    twok = k << 1;
10754    if (twok > num_threadsu) goto <D.2110>; else goto <D.2111>;
10755    <D.2110>:
10756    down = 4294967295;
10757    k = k >> 1;
10758    if (k == num_threadsu) goto <D.2112>; else goto <D.2111>;
10759    <D.2112>:
10760    k = k >> 1;
10761    <D.2111>:
10762    twok = k << 1;
10763    cplx = .MUL_OVERFLOW (thread_nump1, twok);
10764    mul = REALPART_EXPR <cplx>;
10765    ovf = IMAGPART_EXPR <cplx>;
10766    if (ovf == 0) goto <D.2116>; else goto <D.2117>;
10767    <D.2116>:
10768    andv = k & down;
10769    andvm1 = andv + 4294967295;
10770    l = mul + andvm1;
10771    if (l < num_threadsu) goto <D.2120>; else goto <D.2117>;
10772    <D.2120>:
10773    // For UDRs this is UDR merge, performed using var2 variable as temporary,
10774    // i.e. var2 = rpriva[l - k]; UDR merge (var2, rpriva[l]); rpriva[l] = var2;
10775    rpriva[l] = rpriva[l - k] + rpriva[l];
10776    <D.2117>:
10777    if (down == 0) goto <D.2121>; else goto <D.2122>;
10778    <D.2121>:
10779    k = k << 1;
10780    goto <D.2123>;
10781    <D.2122>:
10782    k = k >> 1;
10783    <D.2123>:
10784    GOMP_barrier ();
10785    if (k != 0) goto <D.2108>; else goto <D.2103>;
10786    <D.2103>:
10787    if (thread_num == 0) goto <D.2124>; else goto <D.2125>;
10788    <D.2124>:
10789    // For UDRs this is UDR init or copy from var3.
10790    var2 = 0;
10791    goto <D.2126>;
10792    <D.2125>:
10793    var2 = rpriva[thread_num - 1];
10794    <D.2126>:
10795    ivar = 0;
10796    #pragma omp for _scantemp_(controlb) _scantemp_(controlp) \
10797      reduction(inscan,+:r) private(i)
10798    for (i = 0; i < n; i = i + 1)
10799      {
10800        {
10801 	 // For UDRs, this is r = var2; UDR merge (r, rprivb[ivar]);
10802 	 r = var2 + rprivb[ivar];
10803        }
10804        {
10805 	 // This is the scan phase from user code.
10806 	 use (r);
10807 	 // Plus a bump of the iterator.
10808 	 ivar = ivar + 1;
10809        }
10810      }  */
10811 
10812 static void
lower_omp_for_scan(gimple_seq * body_p,gimple_seq * dlist,gomp_for * stmt,struct omp_for_data * fd,omp_context * ctx)10813 lower_omp_for_scan (gimple_seq *body_p, gimple_seq *dlist, gomp_for *stmt,
10814 		    struct omp_for_data *fd, omp_context *ctx)
10815 {
10816   bool is_for_simd = gimple_omp_for_combined_p (stmt);
10817   gcc_assert (ctx->scan_inclusive || ctx->scan_exclusive);
10818 
10819   gimple_seq body = gimple_omp_body (stmt);
10820   gimple_stmt_iterator input1_gsi = gsi_none ();
10821   struct walk_stmt_info wi;
10822   memset (&wi, 0, sizeof (wi));
10823   wi.val_only = true;
10824   wi.info = (void *) &input1_gsi;
10825   walk_gimple_seq_mod (&body, omp_find_scan, NULL, &wi);
10826   gcc_assert (!gsi_end_p (input1_gsi));
10827 
10828   gimple *input_stmt1 = gsi_stmt (input1_gsi);
10829   gimple_stmt_iterator gsi = input1_gsi;
10830   gsi_next (&gsi);
10831   gimple_stmt_iterator scan1_gsi = gsi;
10832   gimple *scan_stmt1 = gsi_stmt (gsi);
10833   gcc_assert (scan_stmt1 && gimple_code (scan_stmt1) == GIMPLE_OMP_SCAN);
10834 
10835   gimple_seq input_body = gimple_omp_body (input_stmt1);
10836   gimple_seq scan_body = gimple_omp_body (scan_stmt1);
10837   gimple_omp_set_body (input_stmt1, NULL);
10838   gimple_omp_set_body (scan_stmt1, NULL);
10839   gimple_omp_set_body (stmt, NULL);
10840 
10841   gomp_for *new_stmt = as_a <gomp_for *> (gimple_copy (stmt));
10842   gimple_seq new_body = copy_gimple_seq_and_replace_locals (body);
10843   gimple_omp_set_body (stmt, body);
10844   gimple_omp_set_body (input_stmt1, input_body);
10845 
10846   gimple_stmt_iterator input2_gsi = gsi_none ();
10847   memset (&wi, 0, sizeof (wi));
10848   wi.val_only = true;
10849   wi.info = (void *) &input2_gsi;
10850   walk_gimple_seq_mod (&new_body, omp_find_scan, NULL, &wi);
10851   gcc_assert (!gsi_end_p (input2_gsi));
10852 
10853   gimple *input_stmt2 = gsi_stmt (input2_gsi);
10854   gsi = input2_gsi;
10855   gsi_next (&gsi);
10856   gimple_stmt_iterator scan2_gsi = gsi;
10857   gimple *scan_stmt2 = gsi_stmt (gsi);
10858   gcc_assert (scan_stmt2 && gimple_code (scan_stmt2) == GIMPLE_OMP_SCAN);
10859   gimple_omp_set_body (scan_stmt2, scan_body);
10860 
10861   gimple_stmt_iterator input3_gsi = gsi_none ();
10862   gimple_stmt_iterator scan3_gsi = gsi_none ();
10863   gimple_stmt_iterator input4_gsi = gsi_none ();
10864   gimple_stmt_iterator scan4_gsi = gsi_none ();
10865   gimple *input_stmt3 = NULL, *scan_stmt3 = NULL;
10866   gimple *input_stmt4 = NULL, *scan_stmt4 = NULL;
10867   omp_context *input_simd_ctx = NULL, *scan_simd_ctx = NULL;
10868   if (is_for_simd)
10869     {
10870       memset (&wi, 0, sizeof (wi));
10871       wi.val_only = true;
10872       wi.info = (void *) &input3_gsi;
10873       walk_gimple_seq_mod (&input_body, omp_find_scan, NULL, &wi);
10874       gcc_assert (!gsi_end_p (input3_gsi));
10875 
10876       input_stmt3 = gsi_stmt (input3_gsi);
10877       gsi = input3_gsi;
10878       gsi_next (&gsi);
10879       scan3_gsi = gsi;
10880       scan_stmt3 = gsi_stmt (gsi);
10881       gcc_assert (scan_stmt3 && gimple_code (scan_stmt3) == GIMPLE_OMP_SCAN);
10882 
10883       memset (&wi, 0, sizeof (wi));
10884       wi.val_only = true;
10885       wi.info = (void *) &input4_gsi;
10886       walk_gimple_seq_mod (&scan_body, omp_find_scan, NULL, &wi);
10887       gcc_assert (!gsi_end_p (input4_gsi));
10888 
10889       input_stmt4 = gsi_stmt (input4_gsi);
10890       gsi = input4_gsi;
10891       gsi_next (&gsi);
10892       scan4_gsi = gsi;
10893       scan_stmt4 = gsi_stmt (gsi);
10894       gcc_assert (scan_stmt4 && gimple_code (scan_stmt4) == GIMPLE_OMP_SCAN);
10895 
10896       input_simd_ctx = maybe_lookup_ctx (input_stmt3)->outer;
10897       scan_simd_ctx = maybe_lookup_ctx (input_stmt4)->outer;
10898     }
10899 
10900   tree num_threads = create_tmp_var (integer_type_node);
10901   tree thread_num = create_tmp_var (integer_type_node);
10902   tree nthreads_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_NUM_THREADS);
10903   tree threadnum_decl = builtin_decl_explicit (BUILT_IN_OMP_GET_THREAD_NUM);
10904   gimple *g = gimple_build_call (nthreads_decl, 0);
10905   gimple_call_set_lhs (g, num_threads);
10906   gimple_seq_add_stmt (body_p, g);
10907   g = gimple_build_call (threadnum_decl, 0);
10908   gimple_call_set_lhs (g, thread_num);
10909   gimple_seq_add_stmt (body_p, g);
10910 
10911   tree ivar = create_tmp_var (sizetype);
10912   tree new_clauses1 = NULL_TREE, new_clauses2 = NULL_TREE;
10913   tree *cp1 = &new_clauses1, *cp2 = &new_clauses2;
10914   tree k = create_tmp_var (unsigned_type_node);
10915   tree l = create_tmp_var (unsigned_type_node);
10916 
10917   gimple_seq clist = NULL, mdlist = NULL;
10918   gimple_seq thr01_list = NULL, thrn1_list = NULL;
10919   gimple_seq thr02_list = NULL, thrn2_list = NULL;
10920   gimple_seq scan1_list = NULL, input2_list = NULL;
10921   gimple_seq last_list = NULL, reduc_list = NULL;
10922   for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
10923     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
10924 	&& OMP_CLAUSE_REDUCTION_INSCAN (c))
10925       {
10926 	location_t clause_loc = OMP_CLAUSE_LOCATION (c);
10927 	tree var = OMP_CLAUSE_DECL (c);
10928 	tree new_var = lookup_decl (var, ctx);
10929 	tree var3 = NULL_TREE;
10930 	tree new_vard = new_var;
10931 	if (omp_privatize_by_reference (var))
10932 	  new_var = build_simple_mem_ref_loc (clause_loc, new_var);
10933 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
10934 	  {
10935 	    var3 = maybe_lookup_decl (new_vard, ctx);
10936 	    if (var3 == new_vard)
10937 	      var3 = NULL_TREE;
10938 	  }
10939 
10940 	tree ptype = build_pointer_type (TREE_TYPE (new_var));
10941 	tree rpriva = create_tmp_var (ptype);
10942 	tree nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10943 	OMP_CLAUSE_DECL (nc) = rpriva;
10944 	*cp1 = nc;
10945 	cp1 = &OMP_CLAUSE_CHAIN (nc);
10946 
10947 	tree rprivb = create_tmp_var (ptype);
10948 	nc = build_omp_clause (clause_loc, OMP_CLAUSE__SCANTEMP_);
10949 	OMP_CLAUSE_DECL (nc) = rprivb;
10950 	OMP_CLAUSE__SCANTEMP__ALLOC (nc) = 1;
10951 	*cp1 = nc;
10952 	cp1 = &OMP_CLAUSE_CHAIN (nc);
10953 
10954 	tree var2 = create_tmp_var_raw (TREE_TYPE (new_var));
10955 	if (new_vard != new_var)
10956 	  TREE_ADDRESSABLE (var2) = 1;
10957 	gimple_add_tmp_var (var2);
10958 
10959 	tree x = fold_convert_loc (clause_loc, sizetype, thread_num);
10960 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10961 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10962 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10963 	tree rpriva_ref = build_simple_mem_ref_loc (clause_loc, x);
10964 
10965 	x = fold_build2_loc (clause_loc, PLUS_EXPR, integer_type_node,
10966 			     thread_num, integer_minus_one_node);
10967 	x = fold_convert_loc (clause_loc, sizetype, x);
10968 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10969 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10970 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10971 	tree rprivam1_ref = build_simple_mem_ref_loc (clause_loc, x);
10972 
10973 	x = fold_convert_loc (clause_loc, sizetype, l);
10974 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10975 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10976 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10977 	tree rprival_ref = build_simple_mem_ref_loc (clause_loc, x);
10978 
10979 	x = fold_build2_loc (clause_loc, MINUS_EXPR, unsigned_type_node, l, k);
10980 	x = fold_convert_loc (clause_loc, sizetype, x);
10981 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, x,
10982 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10983 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rpriva), rpriva, x);
10984 	tree rprivalmk_ref = build_simple_mem_ref_loc (clause_loc, x);
10985 
10986 	x = fold_build2_loc (clause_loc, MULT_EXPR, sizetype, ivar,
10987 			     TYPE_SIZE_UNIT (TREE_TYPE (ptype)));
10988 	x = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (rprivb), rprivb, x);
10989 	tree rprivb_ref = build_simple_mem_ref_loc (clause_loc, x);
10990 
10991 	tree var4 = is_for_simd ? new_var : var2;
10992 	tree var5 = NULL_TREE, var6 = NULL_TREE;
10993 	if (is_for_simd)
10994 	  {
10995 	    var5 = lookup_decl (var, input_simd_ctx);
10996 	    var6 = lookup_decl (var, scan_simd_ctx);
10997 	    if (new_vard != new_var)
10998 	      {
10999 		var5 = build_simple_mem_ref_loc (clause_loc, var5);
11000 		var6 = build_simple_mem_ref_loc (clause_loc, var6);
11001 	      }
11002 	  }
11003 	if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
11004 	  {
11005 	    tree placeholder = OMP_CLAUSE_REDUCTION_PLACEHOLDER (c);
11006 	    tree val = var2;
11007 
11008 	    x = lang_hooks.decls.omp_clause_default_ctor
11009 		    (c, var2, build_outer_var_ref (var, ctx));
11010 	    if (x)
11011 	      gimplify_and_add (x, &clist);
11012 
11013 	    x = build_outer_var_ref (var, ctx);
11014 	    x = lang_hooks.decls.omp_clause_assign_op (c, unshare_expr (var4),
11015 						       x);
11016 	    gimplify_and_add (x, &thr01_list);
11017 
11018 	    tree y = (DECL_HAS_VALUE_EXPR_P (new_vard)
11019 		      ? DECL_VALUE_EXPR (new_vard) : NULL_TREE);
11020 	    if (var3)
11021 	      {
11022 		x = unshare_expr (var4);
11023 		x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11024 		gimplify_and_add (x, &thrn1_list);
11025 		x = unshare_expr (var4);
11026 		x = lang_hooks.decls.omp_clause_assign_op (c, x, var3);
11027 		gimplify_and_add (x, &thr02_list);
11028 	      }
11029 	    else if (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c))
11030 	      {
11031 		/* Otherwise, assign to it the identity element.  */
11032 		gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11033 		tseq = copy_gimple_seq_and_replace_locals (tseq);
11034 		if (!is_for_simd)
11035 		  {
11036 		    if (new_vard != new_var)
11037 		      val = build_fold_addr_expr_loc (clause_loc, val);
11038 		    SET_DECL_VALUE_EXPR (new_vard, val);
11039 		    DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11040 		  }
11041 		SET_DECL_VALUE_EXPR (placeholder, error_mark_node);
11042 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11043 		lower_omp (&tseq, ctx);
11044 		gimple_seq_add_seq (&thrn1_list, tseq);
11045 		tseq = OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c);
11046 		lower_omp (&tseq, ctx);
11047 		gimple_seq_add_seq (&thr02_list, tseq);
11048 		SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11049 		DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11050 		OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
11051 		if (y)
11052 		  SET_DECL_VALUE_EXPR (new_vard, y);
11053 		else
11054 		  {
11055 		    DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11056 		    SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11057 		  }
11058 	      }
11059 
11060 	    x = unshare_expr (var4);
11061 	    x = lang_hooks.decls.omp_clause_assign_op (c, x, rprivam1_ref);
11062 	    gimplify_and_add (x, &thrn2_list);
11063 
11064 	    if (is_for_simd)
11065 	      {
11066 		x = unshare_expr (rprivb_ref);
11067 		x = lang_hooks.decls.omp_clause_assign_op (c, x, var5);
11068 		gimplify_and_add (x, &scan1_list);
11069 	      }
11070 	    else
11071 	      {
11072 		if (ctx->scan_exclusive)
11073 		  {
11074 		    x = unshare_expr (rprivb_ref);
11075 		    x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11076 		    gimplify_and_add (x, &scan1_list);
11077 		  }
11078 
11079 		gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11080 		tseq = copy_gimple_seq_and_replace_locals (tseq);
11081 		SET_DECL_VALUE_EXPR (placeholder, var2);
11082 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11083 		lower_omp (&tseq, ctx);
11084 		gimple_seq_add_seq (&scan1_list, tseq);
11085 
11086 		if (ctx->scan_inclusive)
11087 		  {
11088 		    x = unshare_expr (rprivb_ref);
11089 		    x = lang_hooks.decls.omp_clause_assign_op (c, x, var2);
11090 		    gimplify_and_add (x, &scan1_list);
11091 		  }
11092 	      }
11093 
11094 	    x = unshare_expr (rpriva_ref);
11095 	    x = lang_hooks.decls.omp_clause_assign_op (c, x,
11096 						       unshare_expr (var4));
11097 	    gimplify_and_add (x, &mdlist);
11098 
11099 	    x = unshare_expr (is_for_simd ? var6 : new_var);
11100 	    x = lang_hooks.decls.omp_clause_assign_op (c, x, var4);
11101 	    gimplify_and_add (x, &input2_list);
11102 
11103 	    val = rprivb_ref;
11104 	    if (new_vard != new_var)
11105 	      val = build_fold_addr_expr_loc (clause_loc, val);
11106 
11107 	    gimple_seq tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11108 	    tseq = copy_gimple_seq_and_replace_locals (tseq);
11109 	    SET_DECL_VALUE_EXPR (new_vard, val);
11110 	    DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11111 	    if (is_for_simd)
11112 	      {
11113 		SET_DECL_VALUE_EXPR (placeholder, var6);
11114 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11115 	      }
11116 	    else
11117 	      DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11118 	    lower_omp (&tseq, ctx);
11119 	    if (y)
11120 	      SET_DECL_VALUE_EXPR (new_vard, y);
11121 	    else
11122 	      {
11123 		DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11124 		SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11125 	      }
11126 	    if (!is_for_simd)
11127 	      {
11128 		SET_DECL_VALUE_EXPR (placeholder, new_var);
11129 		DECL_HAS_VALUE_EXPR_P (placeholder) = 1;
11130 		lower_omp (&tseq, ctx);
11131 	      }
11132 	    gimple_seq_add_seq (&input2_list, tseq);
11133 
11134 	    x = build_outer_var_ref (var, ctx);
11135 	    x = lang_hooks.decls.omp_clause_assign_op (c, x, rpriva_ref);
11136 	    gimplify_and_add (x, &last_list);
11137 
11138 	    x = lang_hooks.decls.omp_clause_assign_op (c, var2, rprivalmk_ref);
11139 	    gimplify_and_add (x, &reduc_list);
11140 	    tseq = OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c);
11141 	    tseq = copy_gimple_seq_and_replace_locals (tseq);
11142 	    val = rprival_ref;
11143 	    if (new_vard != new_var)
11144 	      val = build_fold_addr_expr_loc (clause_loc, val);
11145 	    SET_DECL_VALUE_EXPR (new_vard, val);
11146 	    DECL_HAS_VALUE_EXPR_P (new_vard) = 1;
11147 	    SET_DECL_VALUE_EXPR (placeholder, var2);
11148 	    lower_omp (&tseq, ctx);
11149 	    OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
11150 	    SET_DECL_VALUE_EXPR (placeholder, NULL_TREE);
11151 	    DECL_HAS_VALUE_EXPR_P (placeholder) = 0;
11152 	    if (y)
11153 	      SET_DECL_VALUE_EXPR (new_vard, y);
11154 	    else
11155 	      {
11156 		DECL_HAS_VALUE_EXPR_P (new_vard) = 0;
11157 		SET_DECL_VALUE_EXPR (new_vard, NULL_TREE);
11158 	      }
11159 	    gimple_seq_add_seq (&reduc_list, tseq);
11160 	    x = lang_hooks.decls.omp_clause_assign_op (c, rprival_ref, var2);
11161 	    gimplify_and_add (x, &reduc_list);
11162 
11163 	    x = lang_hooks.decls.omp_clause_dtor (c, var2);
11164 	    if (x)
11165 	      gimplify_and_add (x, dlist);
11166 	  }
11167 	else
11168 	  {
11169 	    x = build_outer_var_ref (var, ctx);
11170 	    gimplify_assign (unshare_expr (var4), x, &thr01_list);
11171 
11172 	    x = omp_reduction_init (c, TREE_TYPE (new_var));
11173 	    gimplify_assign (unshare_expr (var4), unshare_expr (x),
11174 			     &thrn1_list);
11175 	    gimplify_assign (unshare_expr (var4), x, &thr02_list);
11176 
11177 	    gimplify_assign (unshare_expr (var4), rprivam1_ref, &thrn2_list);
11178 
11179 	    enum tree_code code = OMP_CLAUSE_REDUCTION_CODE (c);
11180 	    if (code == MINUS_EXPR)
11181 	      code = PLUS_EXPR;
11182 
11183 	    if (is_for_simd)
11184 	      gimplify_assign (unshare_expr (rprivb_ref), var5, &scan1_list);
11185 	    else
11186 	      {
11187 		if (ctx->scan_exclusive)
11188 		  gimplify_assign (unshare_expr (rprivb_ref), var2,
11189 				   &scan1_list);
11190 		x = build2 (code, TREE_TYPE (new_var), var2, new_var);
11191 		gimplify_assign (var2, x, &scan1_list);
11192 		if (ctx->scan_inclusive)
11193 		  gimplify_assign (unshare_expr (rprivb_ref), var2,
11194 				   &scan1_list);
11195 	      }
11196 
11197 	    gimplify_assign (unshare_expr (rpriva_ref), unshare_expr (var4),
11198 			     &mdlist);
11199 
11200 	    x = build2 (code, TREE_TYPE (new_var), var4, rprivb_ref);
11201 	    gimplify_assign (is_for_simd ? var6 : new_var, x, &input2_list);
11202 
11203 	    gimplify_assign (build_outer_var_ref (var, ctx), rpriva_ref,
11204 			     &last_list);
11205 
11206 	    x = build2 (code, TREE_TYPE (new_var), rprivalmk_ref,
11207 			unshare_expr (rprival_ref));
11208 	    gimplify_assign (rprival_ref, x, &reduc_list);
11209 	  }
11210       }
11211 
11212   g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11213   gimple_seq_add_stmt (&scan1_list, g);
11214   g = gimple_build_assign (ivar, PLUS_EXPR, ivar, size_one_node);
11215   gimple_seq_add_stmt (gimple_omp_body_ptr (is_for_simd
11216 					    ? scan_stmt4 : scan_stmt2), g);
11217 
11218   tree controlb = create_tmp_var (boolean_type_node);
11219   tree controlp = create_tmp_var (ptr_type_node);
11220   tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11221   OMP_CLAUSE_DECL (nc) = controlb;
11222   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11223   *cp1 = nc;
11224   cp1 = &OMP_CLAUSE_CHAIN (nc);
11225   nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11226   OMP_CLAUSE_DECL (nc) = controlp;
11227   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11228   *cp1 = nc;
11229   cp1 = &OMP_CLAUSE_CHAIN (nc);
11230   nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11231   OMP_CLAUSE_DECL (nc) = controlb;
11232   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11233   *cp2 = nc;
11234   cp2 = &OMP_CLAUSE_CHAIN (nc);
11235   nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__SCANTEMP_);
11236   OMP_CLAUSE_DECL (nc) = controlp;
11237   OMP_CLAUSE__SCANTEMP__CONTROL (nc) = 1;
11238   *cp2 = nc;
11239   cp2 = &OMP_CLAUSE_CHAIN (nc);
11240 
11241   *cp1 = gimple_omp_for_clauses (stmt);
11242   gimple_omp_for_set_clauses (stmt, new_clauses1);
11243   *cp2 = gimple_omp_for_clauses (new_stmt);
11244   gimple_omp_for_set_clauses (new_stmt, new_clauses2);
11245 
11246   if (is_for_simd)
11247     {
11248       gimple_seq_add_seq (gimple_omp_body_ptr (scan_stmt3), scan1_list);
11249       gimple_seq_add_seq (gimple_omp_body_ptr (input_stmt4), input2_list);
11250 
11251       gsi_insert_seq_after (&input3_gsi, gimple_omp_body (input_stmt3),
11252 			    GSI_SAME_STMT);
11253       gsi_remove (&input3_gsi, true);
11254       gsi_insert_seq_after (&scan3_gsi, gimple_omp_body (scan_stmt3),
11255 			    GSI_SAME_STMT);
11256       gsi_remove (&scan3_gsi, true);
11257       gsi_insert_seq_after (&input4_gsi, gimple_omp_body (input_stmt4),
11258 			    GSI_SAME_STMT);
11259       gsi_remove (&input4_gsi, true);
11260       gsi_insert_seq_after (&scan4_gsi, gimple_omp_body (scan_stmt4),
11261 			    GSI_SAME_STMT);
11262       gsi_remove (&scan4_gsi, true);
11263     }
11264   else
11265     {
11266       gimple_omp_set_body (scan_stmt1, scan1_list);
11267       gimple_omp_set_body (input_stmt2, input2_list);
11268     }
11269 
11270   gsi_insert_seq_after (&input1_gsi, gimple_omp_body (input_stmt1),
11271 			GSI_SAME_STMT);
11272   gsi_remove (&input1_gsi, true);
11273   gsi_insert_seq_after (&scan1_gsi, gimple_omp_body (scan_stmt1),
11274 			GSI_SAME_STMT);
11275   gsi_remove (&scan1_gsi, true);
11276   gsi_insert_seq_after (&input2_gsi, gimple_omp_body (input_stmt2),
11277 			GSI_SAME_STMT);
11278   gsi_remove (&input2_gsi, true);
11279   gsi_insert_seq_after (&scan2_gsi, gimple_omp_body (scan_stmt2),
11280 			GSI_SAME_STMT);
11281   gsi_remove (&scan2_gsi, true);
11282 
11283   gimple_seq_add_seq (body_p, clist);
11284 
11285   tree lab1 = create_artificial_label (UNKNOWN_LOCATION);
11286   tree lab2 = create_artificial_label (UNKNOWN_LOCATION);
11287   tree lab3 = create_artificial_label (UNKNOWN_LOCATION);
11288   g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11289   gimple_seq_add_stmt (body_p, g);
11290   g = gimple_build_label (lab1);
11291   gimple_seq_add_stmt (body_p, g);
11292   gimple_seq_add_seq (body_p, thr01_list);
11293   g = gimple_build_goto (lab3);
11294   gimple_seq_add_stmt (body_p, g);
11295   g = gimple_build_label (lab2);
11296   gimple_seq_add_stmt (body_p, g);
11297   gimple_seq_add_seq (body_p, thrn1_list);
11298   g = gimple_build_label (lab3);
11299   gimple_seq_add_stmt (body_p, g);
11300 
11301   g = gimple_build_assign (ivar, size_zero_node);
11302   gimple_seq_add_stmt (body_p, g);
11303 
11304   gimple_seq_add_stmt (body_p, stmt);
11305   gimple_seq_add_seq (body_p, body);
11306   gimple_seq_add_stmt (body_p, gimple_build_omp_continue (fd->loop.v,
11307 							  fd->loop.v));
11308 
11309   g = gimple_build_omp_return (true);
11310   gimple_seq_add_stmt (body_p, g);
11311   gimple_seq_add_seq (body_p, mdlist);
11312 
11313   lab1 = create_artificial_label (UNKNOWN_LOCATION);
11314   lab2 = create_artificial_label (UNKNOWN_LOCATION);
11315   g = gimple_build_cond (GT_EXPR, num_threads, integer_one_node, lab1, lab2);
11316   gimple_seq_add_stmt (body_p, g);
11317   g = gimple_build_label (lab1);
11318   gimple_seq_add_stmt (body_p, g);
11319 
11320   g = omp_build_barrier (NULL);
11321   gimple_seq_add_stmt (body_p, g);
11322 
11323   tree down = create_tmp_var (unsigned_type_node);
11324   g = gimple_build_assign (down, build_zero_cst (unsigned_type_node));
11325   gimple_seq_add_stmt (body_p, g);
11326 
11327   g = gimple_build_assign (k, build_one_cst (unsigned_type_node));
11328   gimple_seq_add_stmt (body_p, g);
11329 
11330   tree num_threadsu = create_tmp_var (unsigned_type_node);
11331   g = gimple_build_assign (num_threadsu, NOP_EXPR, num_threads);
11332   gimple_seq_add_stmt (body_p, g);
11333 
11334   tree thread_numu = create_tmp_var (unsigned_type_node);
11335   g = gimple_build_assign (thread_numu, NOP_EXPR, thread_num);
11336   gimple_seq_add_stmt (body_p, g);
11337 
11338   tree thread_nump1 = create_tmp_var (unsigned_type_node);
11339   g = gimple_build_assign (thread_nump1, PLUS_EXPR, thread_numu,
11340 			   build_int_cst (unsigned_type_node, 1));
11341   gimple_seq_add_stmt (body_p, g);
11342 
11343   lab3 = create_artificial_label (UNKNOWN_LOCATION);
11344   g = gimple_build_label (lab3);
11345   gimple_seq_add_stmt (body_p, g);
11346 
11347   tree twok = create_tmp_var (unsigned_type_node);
11348   g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11349   gimple_seq_add_stmt (body_p, g);
11350 
11351   tree lab4 = create_artificial_label (UNKNOWN_LOCATION);
11352   tree lab5 = create_artificial_label (UNKNOWN_LOCATION);
11353   tree lab6 = create_artificial_label (UNKNOWN_LOCATION);
11354   g = gimple_build_cond (GT_EXPR, twok, num_threadsu, lab4, lab5);
11355   gimple_seq_add_stmt (body_p, g);
11356   g = gimple_build_label (lab4);
11357   gimple_seq_add_stmt (body_p, g);
11358   g = gimple_build_assign (down, build_all_ones_cst (unsigned_type_node));
11359   gimple_seq_add_stmt (body_p, g);
11360   g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11361   gimple_seq_add_stmt (body_p, g);
11362 
11363   g = gimple_build_cond (EQ_EXPR, k, num_threadsu, lab6, lab5);
11364   gimple_seq_add_stmt (body_p, g);
11365   g = gimple_build_label (lab6);
11366   gimple_seq_add_stmt (body_p, g);
11367 
11368   g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11369   gimple_seq_add_stmt (body_p, g);
11370 
11371   g = gimple_build_label (lab5);
11372   gimple_seq_add_stmt (body_p, g);
11373 
11374   g = gimple_build_assign (twok, LSHIFT_EXPR, k, integer_one_node);
11375   gimple_seq_add_stmt (body_p, g);
11376 
11377   tree cplx = create_tmp_var (build_complex_type (unsigned_type_node, false));
11378   g = gimple_build_call_internal (IFN_MUL_OVERFLOW, 2, thread_nump1, twok);
11379   gimple_call_set_lhs (g, cplx);
11380   gimple_seq_add_stmt (body_p, g);
11381   tree mul = create_tmp_var (unsigned_type_node);
11382   g = gimple_build_assign (mul, REALPART_EXPR,
11383 			   build1 (REALPART_EXPR, unsigned_type_node, cplx));
11384   gimple_seq_add_stmt (body_p, g);
11385   tree ovf = create_tmp_var (unsigned_type_node);
11386   g = gimple_build_assign (ovf, IMAGPART_EXPR,
11387 			   build1 (IMAGPART_EXPR, unsigned_type_node, cplx));
11388   gimple_seq_add_stmt (body_p, g);
11389 
11390   tree lab7 = create_artificial_label (UNKNOWN_LOCATION);
11391   tree lab8 = create_artificial_label (UNKNOWN_LOCATION);
11392   g = gimple_build_cond (EQ_EXPR, ovf, build_zero_cst (unsigned_type_node),
11393 			 lab7, lab8);
11394   gimple_seq_add_stmt (body_p, g);
11395   g = gimple_build_label (lab7);
11396   gimple_seq_add_stmt (body_p, g);
11397 
11398   tree andv = create_tmp_var (unsigned_type_node);
11399   g = gimple_build_assign (andv, BIT_AND_EXPR, k, down);
11400   gimple_seq_add_stmt (body_p, g);
11401   tree andvm1 = create_tmp_var (unsigned_type_node);
11402   g = gimple_build_assign (andvm1, PLUS_EXPR, andv,
11403 			   build_minus_one_cst (unsigned_type_node));
11404   gimple_seq_add_stmt (body_p, g);
11405 
11406   g = gimple_build_assign (l, PLUS_EXPR, mul, andvm1);
11407   gimple_seq_add_stmt (body_p, g);
11408 
11409   tree lab9 = create_artificial_label (UNKNOWN_LOCATION);
11410   g = gimple_build_cond (LT_EXPR, l, num_threadsu, lab9, lab8);
11411   gimple_seq_add_stmt (body_p, g);
11412   g = gimple_build_label (lab9);
11413   gimple_seq_add_stmt (body_p, g);
11414   gimple_seq_add_seq (body_p, reduc_list);
11415   g = gimple_build_label (lab8);
11416   gimple_seq_add_stmt (body_p, g);
11417 
11418   tree lab10 = create_artificial_label (UNKNOWN_LOCATION);
11419   tree lab11 = create_artificial_label (UNKNOWN_LOCATION);
11420   tree lab12 = create_artificial_label (UNKNOWN_LOCATION);
11421   g = gimple_build_cond (EQ_EXPR, down, build_zero_cst (unsigned_type_node),
11422 			 lab10, lab11);
11423   gimple_seq_add_stmt (body_p, g);
11424   g = gimple_build_label (lab10);
11425   gimple_seq_add_stmt (body_p, g);
11426   g = gimple_build_assign (k, LSHIFT_EXPR, k, integer_one_node);
11427   gimple_seq_add_stmt (body_p, g);
11428   g = gimple_build_goto (lab12);
11429   gimple_seq_add_stmt (body_p, g);
11430   g = gimple_build_label (lab11);
11431   gimple_seq_add_stmt (body_p, g);
11432   g = gimple_build_assign (k, RSHIFT_EXPR, k, integer_one_node);
11433   gimple_seq_add_stmt (body_p, g);
11434   g = gimple_build_label (lab12);
11435   gimple_seq_add_stmt (body_p, g);
11436 
11437   g = omp_build_barrier (NULL);
11438   gimple_seq_add_stmt (body_p, g);
11439 
11440   g = gimple_build_cond (NE_EXPR, k, build_zero_cst (unsigned_type_node),
11441 			 lab3, lab2);
11442   gimple_seq_add_stmt (body_p, g);
11443 
11444   g = gimple_build_label (lab2);
11445   gimple_seq_add_stmt (body_p, g);
11446 
11447   lab1 = create_artificial_label (UNKNOWN_LOCATION);
11448   lab2 = create_artificial_label (UNKNOWN_LOCATION);
11449   lab3 = create_artificial_label (UNKNOWN_LOCATION);
11450   g = gimple_build_cond (EQ_EXPR, thread_num, integer_zero_node, lab1, lab2);
11451   gimple_seq_add_stmt (body_p, g);
11452   g = gimple_build_label (lab1);
11453   gimple_seq_add_stmt (body_p, g);
11454   gimple_seq_add_seq (body_p, thr02_list);
11455   g = gimple_build_goto (lab3);
11456   gimple_seq_add_stmt (body_p, g);
11457   g = gimple_build_label (lab2);
11458   gimple_seq_add_stmt (body_p, g);
11459   gimple_seq_add_seq (body_p, thrn2_list);
11460   g = gimple_build_label (lab3);
11461   gimple_seq_add_stmt (body_p, g);
11462 
11463   g = gimple_build_assign (ivar, size_zero_node);
11464   gimple_seq_add_stmt (body_p, g);
11465   gimple_seq_add_stmt (body_p, new_stmt);
11466   gimple_seq_add_seq (body_p, new_body);
11467 
11468   gimple_seq new_dlist = NULL;
11469   lab1 = create_artificial_label (UNKNOWN_LOCATION);
11470   lab2 = create_artificial_label (UNKNOWN_LOCATION);
11471   tree num_threadsm1 = create_tmp_var (integer_type_node);
11472   g = gimple_build_assign (num_threadsm1, PLUS_EXPR, num_threads,
11473 			   integer_minus_one_node);
11474   gimple_seq_add_stmt (&new_dlist, g);
11475   g = gimple_build_cond (EQ_EXPR, thread_num, num_threadsm1, lab1, lab2);
11476   gimple_seq_add_stmt (&new_dlist, g);
11477   g = gimple_build_label (lab1);
11478   gimple_seq_add_stmt (&new_dlist, g);
11479   gimple_seq_add_seq (&new_dlist, last_list);
11480   g = gimple_build_label (lab2);
11481   gimple_seq_add_stmt (&new_dlist, g);
11482   gimple_seq_add_seq (&new_dlist, *dlist);
11483   *dlist = new_dlist;
11484 }
11485 
11486 /* Build an internal UNIQUE function with type IFN_UNIQUE_OACC_PRIVATE listing
11487    the addresses of variables to be made private at the surrounding
11488    parallelism level.  Such functions appear in the gimple code stream in two
11489    forms, e.g. for a partitioned loop:
11490 
11491       .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6, 1, 68);
11492       .data_dep.6 = .UNIQUE (OACC_PRIVATE, .data_dep.6, -1, &w);
11493       .data_dep.6 = .UNIQUE (OACC_FORK, .data_dep.6, -1);
11494       .data_dep.6 = .UNIQUE (OACC_HEAD_MARK, .data_dep.6);
11495 
11496    or alternatively, OACC_PRIVATE can appear at the top level of a parallel,
11497    not as part of a HEAD_MARK sequence:
11498 
11499       .UNIQUE (OACC_PRIVATE, 0, 0, &w);
11500 
11501    For such stand-alone appearances, the 3rd argument is always 0, denoting
11502    gang partitioning.  */
11503 
11504 static gcall *
lower_oacc_private_marker(omp_context * ctx)11505 lower_oacc_private_marker (omp_context *ctx)
11506 {
11507   if (ctx->oacc_privatization_candidates.length () == 0)
11508     return NULL;
11509 
11510   auto_vec<tree, 5> args;
11511 
11512   args.quick_push (build_int_cst (integer_type_node, IFN_UNIQUE_OACC_PRIVATE));
11513   args.quick_push (integer_zero_node);
11514   args.quick_push (integer_minus_one_node);
11515 
11516   int i;
11517   tree decl;
11518   FOR_EACH_VEC_ELT (ctx->oacc_privatization_candidates, i, decl)
11519     {
11520       for (omp_context *thisctx = ctx; thisctx; thisctx = thisctx->outer)
11521 	{
11522 	  tree inner_decl = maybe_lookup_decl (decl, thisctx);
11523 	  if (inner_decl)
11524 	    {
11525 	      decl = inner_decl;
11526 	      break;
11527 	    }
11528 	}
11529       gcc_checking_assert (decl);
11530 
11531       tree addr = build_fold_addr_expr (decl);
11532       args.safe_push (addr);
11533     }
11534 
11535   return gimple_build_call_internal_vec (IFN_UNIQUE, args);
11536 }
11537 
11538 /* Lower code for an OMP loop directive.  */
11539 
11540 static void
lower_omp_for(gimple_stmt_iterator * gsi_p,omp_context * ctx)11541 lower_omp_for (gimple_stmt_iterator *gsi_p, omp_context *ctx)
11542 {
11543   tree *rhs_p, block;
11544   struct omp_for_data fd, *fdp = NULL;
11545   gomp_for *stmt = as_a <gomp_for *> (gsi_stmt (*gsi_p));
11546   gbind *new_stmt;
11547   gimple_seq omp_for_body, body, dlist, tred_ilist = NULL, tred_dlist = NULL;
11548   gimple_seq cnt_list = NULL, clist = NULL;
11549   gimple_seq oacc_head = NULL, oacc_tail = NULL;
11550   size_t i;
11551 
11552   push_gimplify_context ();
11553 
11554   if (is_gimple_omp_oacc (ctx->stmt))
11555     oacc_privatization_scan_clause_chain (ctx, gimple_omp_for_clauses (stmt));
11556 
11557   lower_omp (gimple_omp_for_pre_body_ptr (stmt), ctx);
11558 
11559   block = make_node (BLOCK);
11560   new_stmt = gimple_build_bind (NULL, NULL, block);
11561   /* Replace at gsi right away, so that 'stmt' is no member
11562      of a sequence anymore as we're going to add to a different
11563      one below.  */
11564   gsi_replace (gsi_p, new_stmt, true);
11565 
11566   /* Move declaration of temporaries in the loop body before we make
11567      it go away.  */
11568   omp_for_body = gimple_omp_body (stmt);
11569   if (!gimple_seq_empty_p (omp_for_body)
11570       && gimple_code (gimple_seq_first_stmt (omp_for_body)) == GIMPLE_BIND)
11571     {
11572       gbind *inner_bind
11573 	= as_a <gbind *> (gimple_seq_first_stmt (omp_for_body));
11574       tree vars = gimple_bind_vars (inner_bind);
11575       if (is_gimple_omp_oacc (ctx->stmt))
11576 	oacc_privatization_scan_decl_chain (ctx, vars);
11577       gimple_bind_append_vars (new_stmt, vars);
11578       /* bind_vars/BLOCK_VARS are being moved to new_stmt/block, don't
11579 	 keep them on the inner_bind and it's block.  */
11580       gimple_bind_set_vars (inner_bind, NULL_TREE);
11581       if (gimple_bind_block (inner_bind))
11582 	BLOCK_VARS (gimple_bind_block (inner_bind)) = NULL_TREE;
11583     }
11584 
11585   if (gimple_omp_for_combined_into_p (stmt))
11586     {
11587       omp_extract_for_data (stmt, &fd, NULL);
11588       fdp = &fd;
11589 
11590       /* We need two temporaries with fd.loop.v type (istart/iend)
11591 	 and then (fd.collapse - 1) temporaries with the same
11592 	 type for count2 ... countN-1 vars if not constant.  */
11593       size_t count = 2;
11594       tree type = fd.iter_type;
11595       if (fd.collapse > 1
11596 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11597 	count += fd.collapse - 1;
11598       size_t count2 = 0;
11599       tree type2 = NULL_TREE;
11600       bool taskreg_for
11601 	= (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR
11602 	   || gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_TASKLOOP);
11603       tree outerc = NULL, *pc = gimple_omp_for_clauses_ptr (stmt);
11604       tree simtc = NULL;
11605       tree clauses = *pc;
11606       if (fd.collapse > 1
11607 	  && fd.non_rect
11608 	  && fd.last_nonrect == fd.first_nonrect + 1
11609 	  && TREE_CODE (fd.loop.n2) != INTEGER_CST)
11610 	if (tree v = gimple_omp_for_index (stmt, fd.last_nonrect))
11611 	  if (!TYPE_UNSIGNED (TREE_TYPE (v)))
11612 	    {
11613 	      v = gimple_omp_for_index (stmt, fd.first_nonrect);
11614 	      type2 = TREE_TYPE (v);
11615 	      count++;
11616 	      count2 = 3;
11617 	    }
11618       if (taskreg_for)
11619 	outerc
11620 	  = omp_find_clause (gimple_omp_taskreg_clauses (ctx->outer->stmt),
11621 			     OMP_CLAUSE__LOOPTEMP_);
11622       if (ctx->simt_stmt)
11623 	simtc = omp_find_clause (gimple_omp_for_clauses (ctx->simt_stmt),
11624 				 OMP_CLAUSE__LOOPTEMP_);
11625       for (i = 0; i < count + count2; i++)
11626 	{
11627 	  tree temp;
11628 	  if (taskreg_for)
11629 	    {
11630 	      gcc_assert (outerc);
11631 	      temp = lookup_decl (OMP_CLAUSE_DECL (outerc), ctx->outer);
11632 	      outerc = omp_find_clause (OMP_CLAUSE_CHAIN (outerc),
11633 					OMP_CLAUSE__LOOPTEMP_);
11634 	    }
11635 	  else
11636 	    {
11637 	      /* If there are 2 adjacent SIMD stmts, one with _simt_
11638 		 clause, another without, make sure they have the same
11639 		 decls in _looptemp_ clauses, because the outer stmt
11640 		 they are combined into will look up just one inner_stmt.  */
11641 	      if (ctx->simt_stmt)
11642 		temp = OMP_CLAUSE_DECL (simtc);
11643 	      else
11644 		temp = create_tmp_var (i >= count ? type2 : type);
11645 	      insert_decl_map (&ctx->outer->cb, temp, temp);
11646 	    }
11647 	  *pc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__LOOPTEMP_);
11648 	  OMP_CLAUSE_DECL (*pc) = temp;
11649 	  pc = &OMP_CLAUSE_CHAIN (*pc);
11650 	  if (ctx->simt_stmt)
11651 	    simtc = omp_find_clause (OMP_CLAUSE_CHAIN (simtc),
11652 				     OMP_CLAUSE__LOOPTEMP_);
11653 	}
11654       *pc = clauses;
11655     }
11656 
11657   /* The pre-body and input clauses go before the lowered GIMPLE_OMP_FOR.  */
11658   dlist = NULL;
11659   body = NULL;
11660   tree rclauses
11661     = omp_task_reductions_find_first (gimple_omp_for_clauses (stmt), OMP_FOR,
11662 				      OMP_CLAUSE_REDUCTION);
11663   tree rtmp = NULL_TREE;
11664   if (rclauses)
11665     {
11666       tree type = build_pointer_type (pointer_sized_int_node);
11667       tree temp = create_tmp_var (type);
11668       tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__REDUCTEMP_);
11669       OMP_CLAUSE_DECL (c) = temp;
11670       OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (stmt);
11671       gimple_omp_for_set_clauses (stmt, c);
11672       lower_omp_task_reductions (ctx, OMP_FOR,
11673 				 gimple_omp_for_clauses (stmt),
11674 				 &tred_ilist, &tred_dlist);
11675       rclauses = c;
11676       rtmp = make_ssa_name (type);
11677       gimple_seq_add_stmt (&body, gimple_build_assign (rtmp, temp));
11678     }
11679 
11680   lower_lastprivate_conditional_clauses (gimple_omp_for_clauses_ptr (stmt),
11681 					 ctx);
11682 
11683   lower_rec_input_clauses (gimple_omp_for_clauses (stmt), &body, &dlist, ctx,
11684 			   fdp);
11685   gimple_seq_add_seq (rclauses ? &tred_ilist : &body,
11686 		      gimple_omp_for_pre_body (stmt));
11687 
11688   lower_omp (gimple_omp_body_ptr (stmt), ctx);
11689 
11690   gcall *private_marker = NULL;
11691   if (is_gimple_omp_oacc (ctx->stmt)
11692       && !gimple_seq_empty_p (omp_for_body))
11693     private_marker = lower_oacc_private_marker (ctx);
11694 
11695   /* Lower the header expressions.  At this point, we can assume that
11696      the header is of the form:
11697 
11698      	#pragma omp for (V = VAL1; V {<|>|<=|>=} VAL2; V = V [+-] VAL3)
11699 
11700      We just need to make sure that VAL1, VAL2 and VAL3 are lowered
11701      using the .omp_data_s mapping, if needed.  */
11702   for (i = 0; i < gimple_omp_for_collapse (stmt); i++)
11703     {
11704       rhs_p = gimple_omp_for_initial_ptr (stmt, i);
11705       if (TREE_CODE (*rhs_p) == TREE_VEC)
11706 	{
11707 	  if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11708 	    TREE_VEC_ELT (*rhs_p, 1)
11709 	      = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11710 	  if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11711 	    TREE_VEC_ELT (*rhs_p, 2)
11712 	      = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11713 	}
11714       else if (!is_gimple_min_invariant (*rhs_p))
11715 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11716       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11717 	recompute_tree_invariant_for_addr_expr (*rhs_p);
11718 
11719       rhs_p = gimple_omp_for_final_ptr (stmt, i);
11720       if (TREE_CODE (*rhs_p) == TREE_VEC)
11721 	{
11722 	  if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 1)))
11723 	    TREE_VEC_ELT (*rhs_p, 1)
11724 	      = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 1), &cnt_list);
11725 	  if (!is_gimple_min_invariant (TREE_VEC_ELT (*rhs_p, 2)))
11726 	    TREE_VEC_ELT (*rhs_p, 2)
11727 	      = get_formal_tmp_var (TREE_VEC_ELT (*rhs_p, 2), &cnt_list);
11728 	}
11729       else if (!is_gimple_min_invariant (*rhs_p))
11730 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11731       else if (TREE_CODE (*rhs_p) == ADDR_EXPR)
11732 	recompute_tree_invariant_for_addr_expr (*rhs_p);
11733 
11734       rhs_p = &TREE_OPERAND (gimple_omp_for_incr (stmt, i), 1);
11735       if (!is_gimple_min_invariant (*rhs_p))
11736 	*rhs_p = get_formal_tmp_var (*rhs_p, &cnt_list);
11737     }
11738   if (rclauses)
11739     gimple_seq_add_seq (&tred_ilist, cnt_list);
11740   else
11741     gimple_seq_add_seq (&body, cnt_list);
11742 
11743   /* Once lowered, extract the bounds and clauses.  */
11744   omp_extract_for_data (stmt, &fd, NULL);
11745 
11746   if (is_gimple_omp_oacc (ctx->stmt)
11747       && !ctx_in_oacc_kernels_region (ctx))
11748     lower_oacc_head_tail (gimple_location (stmt),
11749 			  gimple_omp_for_clauses (stmt), private_marker,
11750 			  &oacc_head, &oacc_tail, ctx);
11751 
11752   /* Add OpenACC partitioning and reduction markers just before the loop.  */
11753   if (oacc_head)
11754     gimple_seq_add_seq (&body, oacc_head);
11755 
11756   lower_omp_for_lastprivate (&fd, &body, &dlist, &clist, ctx);
11757 
11758   if (gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11759     for (tree c = gimple_omp_for_clauses (stmt); c; c = OMP_CLAUSE_CHAIN (c))
11760       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11761 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
11762 	{
11763 	  OMP_CLAUSE_DECL (c) = lookup_decl (OMP_CLAUSE_DECL (c), ctx);
11764 	  if (DECL_P (OMP_CLAUSE_LINEAR_STEP (c)))
11765 	    OMP_CLAUSE_LINEAR_STEP (c)
11766 	      = maybe_lookup_decl_in_outer_ctx (OMP_CLAUSE_LINEAR_STEP (c),
11767 						ctx);
11768 	}
11769 
11770   if ((ctx->scan_inclusive || ctx->scan_exclusive)
11771       && gimple_omp_for_kind (stmt) == GF_OMP_FOR_KIND_FOR)
11772     lower_omp_for_scan (&body, &dlist, stmt, &fd, ctx);
11773   else
11774     {
11775       gimple_seq_add_stmt (&body, stmt);
11776       gimple_seq_add_seq (&body, gimple_omp_body (stmt));
11777     }
11778 
11779   gimple_seq_add_stmt (&body, gimple_build_omp_continue (fd.loop.v,
11780 							 fd.loop.v));
11781 
11782   /* After the loop, add exit clauses.  */
11783   lower_reduction_clauses (gimple_omp_for_clauses (stmt), &body, &clist, ctx);
11784 
11785   if (clist)
11786     {
11787       tree fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_START);
11788       gcall *g = gimple_build_call (fndecl, 0);
11789       gimple_seq_add_stmt (&body, g);
11790       gimple_seq_add_seq (&body, clist);
11791       fndecl = builtin_decl_explicit (BUILT_IN_GOMP_ATOMIC_END);
11792       g = gimple_build_call (fndecl, 0);
11793       gimple_seq_add_stmt (&body, g);
11794     }
11795 
11796   if (ctx->cancellable)
11797     gimple_seq_add_stmt (&body, gimple_build_label (ctx->cancel_label));
11798 
11799   gimple_seq_add_seq (&body, dlist);
11800 
11801   if (rclauses)
11802     {
11803       gimple_seq_add_seq (&tred_ilist, body);
11804       body = tred_ilist;
11805     }
11806 
11807   body = maybe_catch_exception (body);
11808 
11809   /* Region exit marker goes at the end of the loop body.  */
11810   gimple *g = gimple_build_omp_return (fd.have_nowait);
11811   gimple_seq_add_stmt (&body, g);
11812 
11813   gimple_seq_add_seq (&body, tred_dlist);
11814 
11815   maybe_add_implicit_barrier_cancel (ctx, g, &body);
11816 
11817   if (rclauses)
11818     OMP_CLAUSE_DECL (rclauses) = rtmp;
11819 
11820   /* Add OpenACC joining and reduction markers just after the loop.  */
11821   if (oacc_tail)
11822     gimple_seq_add_seq (&body, oacc_tail);
11823 
11824   pop_gimplify_context (new_stmt);
11825 
11826   gimple_bind_append_vars (new_stmt, ctx->block_vars);
11827   maybe_remove_omp_member_access_dummy_vars (new_stmt);
11828   BLOCK_VARS (block) = gimple_bind_vars (new_stmt);
11829   if (BLOCK_VARS (block))
11830     TREE_USED (block) = 1;
11831 
11832   gimple_bind_set_body (new_stmt, body);
11833   gimple_omp_set_body (stmt, NULL);
11834   gimple_omp_for_set_pre_body (stmt, NULL);
11835 }
11836 
11837 /* Callback for walk_stmts.  Check if the current statement only contains
11838    GIMPLE_OMP_FOR or GIMPLE_OMP_SECTIONS.  */
11839 
11840 static tree
check_combined_parallel(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)11841 check_combined_parallel (gimple_stmt_iterator *gsi_p,
11842     			 bool *handled_ops_p,
11843     			 struct walk_stmt_info *wi)
11844 {
11845   int *info = (int *) wi->info;
11846   gimple *stmt = gsi_stmt (*gsi_p);
11847 
11848   *handled_ops_p = true;
11849   switch (gimple_code (stmt))
11850     {
11851     WALK_SUBSTMTS;
11852 
11853     case GIMPLE_DEBUG:
11854       break;
11855     case GIMPLE_OMP_FOR:
11856     case GIMPLE_OMP_SECTIONS:
11857       *info = *info == 0 ? 1 : -1;
11858       break;
11859     default:
11860       *info = -1;
11861       break;
11862     }
11863   return NULL;
11864 }
11865 
11866 struct omp_taskcopy_context
11867 {
11868   /* This field must be at the beginning, as we do "inheritance": Some
11869      callback functions for tree-inline.c (e.g., omp_copy_decl)
11870      receive a copy_body_data pointer that is up-casted to an
11871      omp_context pointer.  */
11872   copy_body_data cb;
11873   omp_context *ctx;
11874 };
11875 
11876 static tree
task_copyfn_copy_decl(tree var,copy_body_data * cb)11877 task_copyfn_copy_decl (tree var, copy_body_data *cb)
11878 {
11879   struct omp_taskcopy_context *tcctx = (struct omp_taskcopy_context *) cb;
11880 
11881   if (splay_tree_lookup (tcctx->ctx->sfield_map, (splay_tree_key) var))
11882     return create_tmp_var (TREE_TYPE (var));
11883 
11884   return var;
11885 }
11886 
11887 static tree
task_copyfn_remap_type(struct omp_taskcopy_context * tcctx,tree orig_type)11888 task_copyfn_remap_type (struct omp_taskcopy_context *tcctx, tree orig_type)
11889 {
11890   tree name, new_fields = NULL, type, f;
11891 
11892   type = lang_hooks.types.make_type (RECORD_TYPE);
11893   name = DECL_NAME (TYPE_NAME (orig_type));
11894   name = build_decl (gimple_location (tcctx->ctx->stmt),
11895 		     TYPE_DECL, name, type);
11896   TYPE_NAME (type) = name;
11897 
11898   for (f = TYPE_FIELDS (orig_type); f ; f = TREE_CHAIN (f))
11899     {
11900       tree new_f = copy_node (f);
11901       DECL_CONTEXT (new_f) = type;
11902       TREE_TYPE (new_f) = remap_type (TREE_TYPE (f), &tcctx->cb);
11903       TREE_CHAIN (new_f) = new_fields;
11904       walk_tree (&DECL_SIZE (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11905       walk_tree (&DECL_SIZE_UNIT (new_f), copy_tree_body_r, &tcctx->cb, NULL);
11906       walk_tree (&DECL_FIELD_OFFSET (new_f), copy_tree_body_r,
11907 		 &tcctx->cb, NULL);
11908       new_fields = new_f;
11909       tcctx->cb.decl_map->put (f, new_f);
11910     }
11911   TYPE_FIELDS (type) = nreverse (new_fields);
11912   layout_type (type);
11913   return type;
11914 }
11915 
11916 /* Create task copyfn.  */
11917 
11918 static void
create_task_copyfn(gomp_task * task_stmt,omp_context * ctx)11919 create_task_copyfn (gomp_task *task_stmt, omp_context *ctx)
11920 {
11921   struct function *child_cfun;
11922   tree child_fn, t, c, src, dst, f, sf, arg, sarg, decl;
11923   tree record_type, srecord_type, bind, list;
11924   bool record_needs_remap = false, srecord_needs_remap = false;
11925   splay_tree_node n;
11926   struct omp_taskcopy_context tcctx;
11927   location_t loc = gimple_location (task_stmt);
11928   size_t looptempno = 0;
11929 
11930   child_fn = gimple_omp_task_copy_fn (task_stmt);
11931   child_cfun = DECL_STRUCT_FUNCTION (child_fn);
11932   gcc_assert (child_cfun->cfg == NULL);
11933   DECL_SAVED_TREE (child_fn) = alloc_stmt_list ();
11934 
11935   /* Reset DECL_CONTEXT on function arguments.  */
11936   for (t = DECL_ARGUMENTS (child_fn); t; t = DECL_CHAIN (t))
11937     DECL_CONTEXT (t) = child_fn;
11938 
11939   /* Populate the function.  */
11940   push_gimplify_context ();
11941   push_cfun (child_cfun);
11942 
11943   bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
11944   TREE_SIDE_EFFECTS (bind) = 1;
11945   list = NULL;
11946   DECL_SAVED_TREE (child_fn) = bind;
11947   DECL_SOURCE_LOCATION (child_fn) = gimple_location (task_stmt);
11948 
11949   /* Remap src and dst argument types if needed.  */
11950   record_type = ctx->record_type;
11951   srecord_type = ctx->srecord_type;
11952   for (f = TYPE_FIELDS (record_type); f ; f = DECL_CHAIN (f))
11953     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11954       {
11955 	record_needs_remap = true;
11956 	break;
11957       }
11958   for (f = TYPE_FIELDS (srecord_type); f ; f = DECL_CHAIN (f))
11959     if (variably_modified_type_p (TREE_TYPE (f), ctx->cb.src_fn))
11960       {
11961 	srecord_needs_remap = true;
11962 	break;
11963       }
11964 
11965   if (record_needs_remap || srecord_needs_remap)
11966     {
11967       memset (&tcctx, '\0', sizeof (tcctx));
11968       tcctx.cb.src_fn = ctx->cb.src_fn;
11969       tcctx.cb.dst_fn = child_fn;
11970       tcctx.cb.src_node = cgraph_node::get (tcctx.cb.src_fn);
11971       gcc_checking_assert (tcctx.cb.src_node);
11972       tcctx.cb.dst_node = tcctx.cb.src_node;
11973       tcctx.cb.src_cfun = ctx->cb.src_cfun;
11974       tcctx.cb.copy_decl = task_copyfn_copy_decl;
11975       tcctx.cb.eh_lp_nr = 0;
11976       tcctx.cb.transform_call_graph_edges = CB_CGE_MOVE;
11977       tcctx.cb.decl_map = new hash_map<tree, tree>;
11978       tcctx.ctx = ctx;
11979 
11980       if (record_needs_remap)
11981 	record_type = task_copyfn_remap_type (&tcctx, record_type);
11982       if (srecord_needs_remap)
11983 	srecord_type = task_copyfn_remap_type (&tcctx, srecord_type);
11984     }
11985   else
11986     tcctx.cb.decl_map = NULL;
11987 
11988   arg = DECL_ARGUMENTS (child_fn);
11989   TREE_TYPE (arg) = build_pointer_type (record_type);
11990   sarg = DECL_CHAIN (arg);
11991   TREE_TYPE (sarg) = build_pointer_type (srecord_type);
11992 
11993   /* First pass: initialize temporaries used in record_type and srecord_type
11994      sizes and field offsets.  */
11995   if (tcctx.cb.decl_map)
11996     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11997       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
11998 	{
11999 	  tree *p;
12000 
12001 	  decl = OMP_CLAUSE_DECL (c);
12002 	  p = tcctx.cb.decl_map->get (decl);
12003 	  if (p == NULL)
12004 	    continue;
12005 	  n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12006 	  sf = (tree) n->value;
12007 	  sf = *tcctx.cb.decl_map->get (sf);
12008 	  src = build_simple_mem_ref_loc (loc, sarg);
12009 	  src = omp_build_component_ref (src, sf);
12010 	  t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
12011 	  append_to_statement_list (t, &list);
12012 	}
12013 
12014   /* Second pass: copy shared var pointers and copy construct non-VLA
12015      firstprivate vars.  */
12016   for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12017     switch (OMP_CLAUSE_CODE (c))
12018       {
12019 	splay_tree_key key;
12020       case OMP_CLAUSE_SHARED:
12021 	decl = OMP_CLAUSE_DECL (c);
12022 	key = (splay_tree_key) decl;
12023 	if (OMP_CLAUSE_SHARED_FIRSTPRIVATE (c))
12024 	  key = (splay_tree_key) &DECL_UID (decl);
12025 	n = splay_tree_lookup (ctx->field_map, key);
12026 	if (n == NULL)
12027 	  break;
12028 	f = (tree) n->value;
12029 	if (tcctx.cb.decl_map)
12030 	  f = *tcctx.cb.decl_map->get (f);
12031 	n = splay_tree_lookup (ctx->sfield_map, key);
12032 	sf = (tree) n->value;
12033 	if (tcctx.cb.decl_map)
12034 	  sf = *tcctx.cb.decl_map->get (sf);
12035 	src = build_simple_mem_ref_loc (loc, sarg);
12036 	src = omp_build_component_ref (src, sf);
12037 	dst = build_simple_mem_ref_loc (loc, arg);
12038 	dst = omp_build_component_ref (dst, f);
12039 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12040 	append_to_statement_list (t, &list);
12041 	break;
12042       case OMP_CLAUSE_REDUCTION:
12043       case OMP_CLAUSE_IN_REDUCTION:
12044 	decl = OMP_CLAUSE_DECL (c);
12045 	if (TREE_CODE (decl) == MEM_REF)
12046 	  {
12047 	    decl = TREE_OPERAND (decl, 0);
12048 	    if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
12049 	      decl = TREE_OPERAND (decl, 0);
12050 	    if (TREE_CODE (decl) == INDIRECT_REF
12051 		|| TREE_CODE (decl) == ADDR_EXPR)
12052 	      decl = TREE_OPERAND (decl, 0);
12053 	  }
12054 	key = (splay_tree_key) decl;
12055 	n = splay_tree_lookup (ctx->field_map, key);
12056 	if (n == NULL)
12057 	  break;
12058 	f = (tree) n->value;
12059 	if (tcctx.cb.decl_map)
12060 	  f = *tcctx.cb.decl_map->get (f);
12061 	n = splay_tree_lookup (ctx->sfield_map, key);
12062 	sf = (tree) n->value;
12063 	if (tcctx.cb.decl_map)
12064 	  sf = *tcctx.cb.decl_map->get (sf);
12065 	src = build_simple_mem_ref_loc (loc, sarg);
12066 	src = omp_build_component_ref (src, sf);
12067 	if (decl != OMP_CLAUSE_DECL (c)
12068 	    && TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
12069 	    && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
12070 	  src = build_simple_mem_ref_loc (loc, src);
12071 	dst = build_simple_mem_ref_loc (loc, arg);
12072 	dst = omp_build_component_ref (dst, f);
12073 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12074 	append_to_statement_list (t, &list);
12075 	break;
12076       case OMP_CLAUSE__LOOPTEMP_:
12077 	/* Fields for first two _looptemp_ clauses are initialized by
12078 	   GOMP_taskloop*, the rest are handled like firstprivate.  */
12079         if (looptempno < 2)
12080 	  {
12081 	    looptempno++;
12082 	    break;
12083 	  }
12084 	/* FALLTHRU */
12085       case OMP_CLAUSE__REDUCTEMP_:
12086       case OMP_CLAUSE_FIRSTPRIVATE:
12087 	decl = OMP_CLAUSE_DECL (c);
12088 	if (is_variable_sized (decl))
12089 	  break;
12090 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12091 	if (n == NULL)
12092 	  break;
12093 	f = (tree) n->value;
12094 	if (tcctx.cb.decl_map)
12095 	  f = *tcctx.cb.decl_map->get (f);
12096 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12097 	if (n != NULL)
12098 	  {
12099 	    sf = (tree) n->value;
12100 	    if (tcctx.cb.decl_map)
12101 	      sf = *tcctx.cb.decl_map->get (sf);
12102 	    src = build_simple_mem_ref_loc (loc, sarg);
12103 	    src = omp_build_component_ref (src, sf);
12104 	    if (use_pointer_for_field (decl, NULL)
12105 		|| omp_privatize_by_reference (decl))
12106 	      src = build_simple_mem_ref_loc (loc, src);
12107 	  }
12108 	else
12109 	  src = decl;
12110 	dst = build_simple_mem_ref_loc (loc, arg);
12111 	dst = omp_build_component_ref (dst, f);
12112 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FIRSTPRIVATE)
12113 	  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12114 	else
12115 	  {
12116 	    if (ctx->allocate_map)
12117 	      if (tree *allocatorp = ctx->allocate_map->get (decl))
12118 		{
12119 		  tree allocator = *allocatorp;
12120 		  HOST_WIDE_INT ialign = 0;
12121 		  if (TREE_CODE (allocator) == TREE_LIST)
12122 		    {
12123 		      ialign = tree_to_uhwi (TREE_VALUE (allocator));
12124 		      allocator = TREE_PURPOSE (allocator);
12125 		    }
12126 		  if (TREE_CODE (allocator) != INTEGER_CST)
12127 		    {
12128 		      n = splay_tree_lookup (ctx->sfield_map,
12129 					     (splay_tree_key) allocator);
12130 		      allocator = (tree) n->value;
12131 		      if (tcctx.cb.decl_map)
12132 			allocator = *tcctx.cb.decl_map->get (allocator);
12133 		      tree a = build_simple_mem_ref_loc (loc, sarg);
12134 		      allocator = omp_build_component_ref (a, allocator);
12135 		    }
12136 		  allocator = fold_convert (pointer_sized_int_node, allocator);
12137 		  tree a = builtin_decl_explicit (BUILT_IN_GOMP_ALLOC);
12138 		  tree align = build_int_cst (size_type_node,
12139 					      MAX (ialign,
12140 						   DECL_ALIGN_UNIT (decl)));
12141 		  tree sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (dst)));
12142 		  tree ptr = build_call_expr_loc (loc, a, 3, align, sz,
12143 						  allocator);
12144 		  ptr = fold_convert (TREE_TYPE (dst), ptr);
12145 		  t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, ptr);
12146 		  append_to_statement_list (t, &list);
12147 		  dst = build_simple_mem_ref_loc (loc, dst);
12148 		}
12149 	    t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12150 	  }
12151 	append_to_statement_list (t, &list);
12152 	break;
12153       case OMP_CLAUSE_PRIVATE:
12154 	if (! OMP_CLAUSE_PRIVATE_OUTER_REF (c))
12155 	  break;
12156 	decl = OMP_CLAUSE_DECL (c);
12157 	n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12158 	f = (tree) n->value;
12159 	if (tcctx.cb.decl_map)
12160 	  f = *tcctx.cb.decl_map->get (f);
12161 	n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
12162 	if (n != NULL)
12163 	  {
12164 	    sf = (tree) n->value;
12165 	    if (tcctx.cb.decl_map)
12166 	      sf = *tcctx.cb.decl_map->get (sf);
12167 	    src = build_simple_mem_ref_loc (loc, sarg);
12168 	    src = omp_build_component_ref (src, sf);
12169 	    if (use_pointer_for_field (decl, NULL))
12170 	      src = build_simple_mem_ref_loc (loc, src);
12171 	  }
12172 	else
12173 	  src = decl;
12174 	dst = build_simple_mem_ref_loc (loc, arg);
12175 	dst = omp_build_component_ref (dst, f);
12176 	t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
12177 	append_to_statement_list (t, &list);
12178 	break;
12179       default:
12180 	break;
12181       }
12182 
12183   /* Last pass: handle VLA firstprivates.  */
12184   if (tcctx.cb.decl_map)
12185     for (c = gimple_omp_task_clauses (task_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12186       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12187 	{
12188 	  tree ind, ptr, df;
12189 
12190 	  decl = OMP_CLAUSE_DECL (c);
12191 	  if (!is_variable_sized (decl))
12192 	    continue;
12193 	  n = splay_tree_lookup (ctx->field_map, (splay_tree_key) decl);
12194 	  if (n == NULL)
12195 	    continue;
12196 	  f = (tree) n->value;
12197 	  f = *tcctx.cb.decl_map->get (f);
12198 	  gcc_assert (DECL_HAS_VALUE_EXPR_P (decl));
12199 	  ind = DECL_VALUE_EXPR (decl);
12200 	  gcc_assert (TREE_CODE (ind) == INDIRECT_REF);
12201 	  gcc_assert (DECL_P (TREE_OPERAND (ind, 0)));
12202 	  n = splay_tree_lookup (ctx->sfield_map,
12203 				 (splay_tree_key) TREE_OPERAND (ind, 0));
12204 	  sf = (tree) n->value;
12205 	  sf = *tcctx.cb.decl_map->get (sf);
12206 	  src = build_simple_mem_ref_loc (loc, sarg);
12207 	  src = omp_build_component_ref (src, sf);
12208 	  src = build_simple_mem_ref_loc (loc, src);
12209 	  dst = build_simple_mem_ref_loc (loc, arg);
12210 	  dst = omp_build_component_ref (dst, f);
12211 	  t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
12212 	  append_to_statement_list (t, &list);
12213 	  n = splay_tree_lookup (ctx->field_map,
12214 				 (splay_tree_key) TREE_OPERAND (ind, 0));
12215 	  df = (tree) n->value;
12216 	  df = *tcctx.cb.decl_map->get (df);
12217 	  ptr = build_simple_mem_ref_loc (loc, arg);
12218 	  ptr = omp_build_component_ref (ptr, df);
12219 	  t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
12220 		      build_fold_addr_expr_loc (loc, dst));
12221 	  append_to_statement_list (t, &list);
12222 	}
12223 
12224   t = build1 (RETURN_EXPR, void_type_node, NULL);
12225   append_to_statement_list (t, &list);
12226 
12227   if (tcctx.cb.decl_map)
12228     delete tcctx.cb.decl_map;
12229   pop_gimplify_context (NULL);
12230   BIND_EXPR_BODY (bind) = list;
12231   pop_cfun ();
12232 }
12233 
12234 static void
lower_depend_clauses(tree * pclauses,gimple_seq * iseq,gimple_seq * oseq)12235 lower_depend_clauses (tree *pclauses, gimple_seq *iseq, gimple_seq *oseq)
12236 {
12237   tree c, clauses;
12238   gimple *g;
12239   size_t cnt[4] = { 0, 0, 0, 0 }, idx = 2, i;
12240 
12241   clauses = omp_find_clause (*pclauses, OMP_CLAUSE_DEPEND);
12242   gcc_assert (clauses);
12243   for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12244     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
12245       switch (OMP_CLAUSE_DEPEND_KIND (c))
12246 	{
12247 	case OMP_CLAUSE_DEPEND_LAST:
12248 	  /* Lowering already done at gimplification.  */
12249 	  return;
12250 	case OMP_CLAUSE_DEPEND_IN:
12251 	  cnt[2]++;
12252 	  break;
12253 	case OMP_CLAUSE_DEPEND_OUT:
12254 	case OMP_CLAUSE_DEPEND_INOUT:
12255 	  cnt[0]++;
12256 	  break;
12257 	case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12258 	  cnt[1]++;
12259 	  break;
12260 	case OMP_CLAUSE_DEPEND_DEPOBJ:
12261 	  cnt[3]++;
12262 	  break;
12263 	case OMP_CLAUSE_DEPEND_SOURCE:
12264 	case OMP_CLAUSE_DEPEND_SINK:
12265 	  /* FALLTHRU */
12266 	default:
12267 	  gcc_unreachable ();
12268 	}
12269   if (cnt[1] || cnt[3])
12270     idx = 5;
12271   size_t total = cnt[0] + cnt[1] + cnt[2] + cnt[3];
12272   tree type = build_array_type_nelts (ptr_type_node, total + idx);
12273   tree array = create_tmp_var (type);
12274   TREE_ADDRESSABLE (array) = 1;
12275   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
12276 		   NULL_TREE);
12277   if (idx == 5)
12278     {
12279       g = gimple_build_assign (r, build_int_cst (ptr_type_node, 0));
12280       gimple_seq_add_stmt (iseq, g);
12281       r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
12282 		  NULL_TREE);
12283     }
12284   g = gimple_build_assign (r, build_int_cst (ptr_type_node, total));
12285   gimple_seq_add_stmt (iseq, g);
12286   for (i = 0; i < (idx == 5 ? 3 : 1); i++)
12287     {
12288       r = build4 (ARRAY_REF, ptr_type_node, array,
12289 		  size_int (i + 1 + (idx == 5)), NULL_TREE, NULL_TREE);
12290       g = gimple_build_assign (r, build_int_cst (ptr_type_node, cnt[i]));
12291       gimple_seq_add_stmt (iseq, g);
12292     }
12293   for (i = 0; i < 4; i++)
12294     {
12295       if (cnt[i] == 0)
12296 	continue;
12297       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
12298 	if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_DEPEND)
12299 	  continue;
12300 	else
12301 	  {
12302 	    switch (OMP_CLAUSE_DEPEND_KIND (c))
12303 	      {
12304 	      case OMP_CLAUSE_DEPEND_IN:
12305 		if (i != 2)
12306 		  continue;
12307 		break;
12308 	      case OMP_CLAUSE_DEPEND_OUT:
12309 	      case OMP_CLAUSE_DEPEND_INOUT:
12310 		if (i != 0)
12311 		  continue;
12312 		break;
12313 	      case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
12314 		if (i != 1)
12315 		  continue;
12316 		break;
12317 	      case OMP_CLAUSE_DEPEND_DEPOBJ:
12318 		if (i != 3)
12319 		  continue;
12320 		break;
12321 	      default:
12322 		gcc_unreachable ();
12323 	      }
12324 	    tree t = OMP_CLAUSE_DECL (c);
12325 	    t = fold_convert (ptr_type_node, t);
12326 	    gimplify_expr (&t, iseq, NULL, is_gimple_val, fb_rvalue);
12327 	    r = build4 (ARRAY_REF, ptr_type_node, array, size_int (idx++),
12328 			NULL_TREE, NULL_TREE);
12329 	    g = gimple_build_assign (r, t);
12330 	    gimple_seq_add_stmt (iseq, g);
12331 	  }
12332     }
12333   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
12334   OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
12335   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
12336   OMP_CLAUSE_CHAIN (c) = *pclauses;
12337   *pclauses = c;
12338   tree clobber = build_clobber (type);
12339   g = gimple_build_assign (array, clobber);
12340   gimple_seq_add_stmt (oseq, g);
12341 }
12342 
12343 /* Lower the OpenMP parallel or task directive in the current statement
12344    in GSI_P.  CTX holds context information for the directive.  */
12345 
12346 static void
lower_omp_taskreg(gimple_stmt_iterator * gsi_p,omp_context * ctx)12347 lower_omp_taskreg (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12348 {
12349   tree clauses;
12350   tree child_fn, t;
12351   gimple *stmt = gsi_stmt (*gsi_p);
12352   gbind *par_bind, *bind, *dep_bind = NULL;
12353   gimple_seq par_body;
12354   location_t loc = gimple_location (stmt);
12355 
12356   clauses = gimple_omp_taskreg_clauses (stmt);
12357   if (gimple_code (stmt) == GIMPLE_OMP_TASK
12358       && gimple_omp_task_taskwait_p (stmt))
12359     {
12360       par_bind = NULL;
12361       par_body = NULL;
12362     }
12363   else
12364     {
12365       par_bind
12366 	= as_a <gbind *> (gimple_seq_first_stmt (gimple_omp_body (stmt)));
12367       par_body = gimple_bind_body (par_bind);
12368     }
12369   child_fn = ctx->cb.dst_fn;
12370   if (gimple_code (stmt) == GIMPLE_OMP_PARALLEL
12371       && !gimple_omp_parallel_combined_p (stmt))
12372     {
12373       struct walk_stmt_info wi;
12374       int ws_num = 0;
12375 
12376       memset (&wi, 0, sizeof (wi));
12377       wi.info = &ws_num;
12378       wi.val_only = true;
12379       walk_gimple_seq (par_body, check_combined_parallel, NULL, &wi);
12380       if (ws_num == 1)
12381 	gimple_omp_parallel_set_combined_p (stmt, true);
12382     }
12383   gimple_seq dep_ilist = NULL;
12384   gimple_seq dep_olist = NULL;
12385   if (gimple_code (stmt) == GIMPLE_OMP_TASK
12386       && omp_find_clause (clauses, OMP_CLAUSE_DEPEND))
12387     {
12388       push_gimplify_context ();
12389       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12390       lower_depend_clauses (gimple_omp_task_clauses_ptr (stmt),
12391 			    &dep_ilist, &dep_olist);
12392     }
12393 
12394   if (gimple_code (stmt) == GIMPLE_OMP_TASK
12395       && gimple_omp_task_taskwait_p (stmt))
12396     {
12397       if (dep_bind)
12398 	{
12399 	  gsi_replace (gsi_p, dep_bind, true);
12400 	  gimple_bind_add_seq (dep_bind, dep_ilist);
12401 	  gimple_bind_add_stmt (dep_bind, stmt);
12402 	  gimple_bind_add_seq (dep_bind, dep_olist);
12403 	  pop_gimplify_context (dep_bind);
12404 	}
12405       return;
12406     }
12407 
12408   if (ctx->srecord_type)
12409     create_task_copyfn (as_a <gomp_task *> (stmt), ctx);
12410 
12411   gimple_seq tskred_ilist = NULL;
12412   gimple_seq tskred_olist = NULL;
12413   if ((is_task_ctx (ctx)
12414        && gimple_omp_task_taskloop_p (ctx->stmt)
12415        && omp_find_clause (gimple_omp_task_clauses (ctx->stmt),
12416 			   OMP_CLAUSE_REDUCTION))
12417       || (is_parallel_ctx (ctx)
12418 	  && omp_find_clause (gimple_omp_parallel_clauses (stmt),
12419 			      OMP_CLAUSE__REDUCTEMP_)))
12420     {
12421       if (dep_bind == NULL)
12422 	{
12423 	  push_gimplify_context ();
12424 	  dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12425 	}
12426       lower_omp_task_reductions (ctx, is_task_ctx (ctx) ? OMP_TASKLOOP
12427 							: OMP_PARALLEL,
12428 				 gimple_omp_taskreg_clauses (ctx->stmt),
12429 				 &tskred_ilist, &tskred_olist);
12430     }
12431 
12432   push_gimplify_context ();
12433 
12434   gimple_seq par_olist = NULL;
12435   gimple_seq par_ilist = NULL;
12436   gimple_seq par_rlist = NULL;
12437   lower_rec_input_clauses (clauses, &par_ilist, &par_olist, ctx, NULL);
12438   lower_omp (&par_body, ctx);
12439   if (gimple_code (stmt) != GIMPLE_OMP_TASK)
12440     lower_reduction_clauses (clauses, &par_rlist, NULL, ctx);
12441 
12442   /* Declare all the variables created by mapping and the variables
12443      declared in the scope of the parallel body.  */
12444   record_vars_into (ctx->block_vars, child_fn);
12445   maybe_remove_omp_member_access_dummy_vars (par_bind);
12446   record_vars_into (gimple_bind_vars (par_bind), child_fn);
12447 
12448   if (ctx->record_type)
12449     {
12450       ctx->sender_decl
12451 	= create_tmp_var (ctx->srecord_type ? ctx->srecord_type
12452 			  : ctx->record_type, ".omp_data_o");
12453       DECL_NAMELESS (ctx->sender_decl) = 1;
12454       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12455       gimple_omp_taskreg_set_data_arg (stmt, ctx->sender_decl);
12456     }
12457 
12458   gimple_seq olist = NULL;
12459   gimple_seq ilist = NULL;
12460   lower_send_clauses (clauses, &ilist, &olist, ctx);
12461   lower_send_shared_vars (&ilist, &olist, ctx);
12462 
12463   if (ctx->record_type)
12464     {
12465       tree clobber = build_clobber (TREE_TYPE (ctx->sender_decl));
12466       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
12467 							clobber));
12468     }
12469 
12470   /* Once all the expansions are done, sequence all the different
12471      fragments inside gimple_omp_body.  */
12472 
12473   gimple_seq new_body = NULL;
12474 
12475   if (ctx->record_type)
12476     {
12477       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
12478       /* fixup_child_record_type might have changed receiver_decl's type.  */
12479       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
12480       gimple_seq_add_stmt (&new_body,
12481 	  		   gimple_build_assign (ctx->receiver_decl, t));
12482     }
12483 
12484   gimple_seq_add_seq (&new_body, par_ilist);
12485   gimple_seq_add_seq (&new_body, par_body);
12486   gimple_seq_add_seq (&new_body, par_rlist);
12487   if (ctx->cancellable)
12488     gimple_seq_add_stmt (&new_body, gimple_build_label (ctx->cancel_label));
12489   gimple_seq_add_seq (&new_body, par_olist);
12490   new_body = maybe_catch_exception (new_body);
12491   if (gimple_code (stmt) == GIMPLE_OMP_TASK)
12492     gimple_seq_add_stmt (&new_body,
12493 			 gimple_build_omp_continue (integer_zero_node,
12494 						    integer_zero_node));
12495   gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
12496   gimple_omp_set_body (stmt, new_body);
12497 
12498   if (dep_bind && gimple_bind_block (par_bind) == NULL_TREE)
12499     bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12500   else
12501     bind = gimple_build_bind (NULL, NULL, gimple_bind_block (par_bind));
12502   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
12503   gimple_bind_add_seq (bind, ilist);
12504   gimple_bind_add_stmt (bind, stmt);
12505   gimple_bind_add_seq (bind, olist);
12506 
12507   pop_gimplify_context (NULL);
12508 
12509   if (dep_bind)
12510     {
12511       gimple_bind_add_seq (dep_bind, dep_ilist);
12512       gimple_bind_add_seq (dep_bind, tskred_ilist);
12513       gimple_bind_add_stmt (dep_bind, bind);
12514       gimple_bind_add_seq (dep_bind, tskred_olist);
12515       gimple_bind_add_seq (dep_bind, dep_olist);
12516       pop_gimplify_context (dep_bind);
12517     }
12518 }
12519 
12520 /* Lower the GIMPLE_OMP_TARGET in the current statement
12521    in GSI_P.  CTX holds context information for the directive.  */
12522 
12523 static void
lower_omp_target(gimple_stmt_iterator * gsi_p,omp_context * ctx)12524 lower_omp_target (gimple_stmt_iterator *gsi_p, omp_context *ctx)
12525 {
12526   tree clauses;
12527   tree child_fn, t, c;
12528   gomp_target *stmt = as_a <gomp_target *> (gsi_stmt (*gsi_p));
12529   gbind *tgt_bind, *bind, *dep_bind = NULL;
12530   gimple_seq tgt_body, olist, ilist, fplist, new_body;
12531   location_t loc = gimple_location (stmt);
12532   bool offloaded, data_region;
12533   unsigned int map_cnt = 0;
12534   tree in_reduction_clauses = NULL_TREE;
12535 
12536   offloaded = is_gimple_omp_offloaded (stmt);
12537   switch (gimple_omp_target_kind (stmt))
12538     {
12539     case GF_OMP_TARGET_KIND_REGION:
12540       tree *p, *q;
12541       q = &in_reduction_clauses;
12542       for (p = gimple_omp_target_clauses_ptr (stmt); *p; )
12543 	if (OMP_CLAUSE_CODE (*p) == OMP_CLAUSE_IN_REDUCTION)
12544 	  {
12545 	    *q = *p;
12546 	    q = &OMP_CLAUSE_CHAIN (*q);
12547 	    *p = OMP_CLAUSE_CHAIN (*p);
12548 	  }
12549 	else
12550 	  p = &OMP_CLAUSE_CHAIN (*p);
12551       *q = NULL_TREE;
12552       *p = in_reduction_clauses;
12553       /* FALLTHRU */
12554     case GF_OMP_TARGET_KIND_UPDATE:
12555     case GF_OMP_TARGET_KIND_ENTER_DATA:
12556     case GF_OMP_TARGET_KIND_EXIT_DATA:
12557     case GF_OMP_TARGET_KIND_OACC_PARALLEL:
12558     case GF_OMP_TARGET_KIND_OACC_KERNELS:
12559     case GF_OMP_TARGET_KIND_OACC_SERIAL:
12560     case GF_OMP_TARGET_KIND_OACC_UPDATE:
12561     case GF_OMP_TARGET_KIND_OACC_ENTER_DATA:
12562     case GF_OMP_TARGET_KIND_OACC_EXIT_DATA:
12563     case GF_OMP_TARGET_KIND_OACC_DECLARE:
12564     case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_PARALLELIZED:
12565     case GF_OMP_TARGET_KIND_OACC_PARALLEL_KERNELS_GANG_SINGLE:
12566       data_region = false;
12567       break;
12568     case GF_OMP_TARGET_KIND_DATA:
12569     case GF_OMP_TARGET_KIND_OACC_DATA:
12570     case GF_OMP_TARGET_KIND_OACC_HOST_DATA:
12571     case GF_OMP_TARGET_KIND_OACC_DATA_KERNELS:
12572       data_region = true;
12573       break;
12574     default:
12575       gcc_unreachable ();
12576     }
12577 
12578   clauses = gimple_omp_target_clauses (stmt);
12579 
12580   gimple_seq dep_ilist = NULL;
12581   gimple_seq dep_olist = NULL;
12582   bool has_depend = omp_find_clause (clauses, OMP_CLAUSE_DEPEND) != NULL_TREE;
12583   if (has_depend || in_reduction_clauses)
12584     {
12585       push_gimplify_context ();
12586       dep_bind = gimple_build_bind (NULL, NULL, make_node (BLOCK));
12587       if (has_depend)
12588 	lower_depend_clauses (gimple_omp_target_clauses_ptr (stmt),
12589 			      &dep_ilist, &dep_olist);
12590       if (in_reduction_clauses)
12591 	lower_rec_input_clauses (in_reduction_clauses, &dep_ilist, &dep_olist,
12592 				 ctx, NULL);
12593     }
12594 
12595   tgt_bind = NULL;
12596   tgt_body = NULL;
12597   if (offloaded)
12598     {
12599       tgt_bind = gimple_seq_first_stmt_as_a_bind (gimple_omp_body (stmt));
12600       tgt_body = gimple_bind_body (tgt_bind);
12601     }
12602   else if (data_region)
12603     tgt_body = gimple_omp_body (stmt);
12604   child_fn = ctx->cb.dst_fn;
12605 
12606   push_gimplify_context ();
12607   fplist = NULL;
12608 
12609   for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12610     switch (OMP_CLAUSE_CODE (c))
12611       {
12612 	tree var, x;
12613 
12614       default:
12615 	break;
12616       case OMP_CLAUSE_MAP:
12617 #if CHECKING_P
12618 	/* First check what we're prepared to handle in the following.  */
12619 	switch (OMP_CLAUSE_MAP_KIND (c))
12620 	  {
12621 	  case GOMP_MAP_ALLOC:
12622 	  case GOMP_MAP_TO:
12623 	  case GOMP_MAP_FROM:
12624 	  case GOMP_MAP_TOFROM:
12625 	  case GOMP_MAP_POINTER:
12626 	  case GOMP_MAP_TO_PSET:
12627 	  case GOMP_MAP_DELETE:
12628 	  case GOMP_MAP_RELEASE:
12629 	  case GOMP_MAP_ALWAYS_TO:
12630 	  case GOMP_MAP_ALWAYS_FROM:
12631 	  case GOMP_MAP_ALWAYS_TOFROM:
12632 	  case GOMP_MAP_FIRSTPRIVATE_POINTER:
12633 	  case GOMP_MAP_FIRSTPRIVATE_REFERENCE:
12634 	  case GOMP_MAP_STRUCT:
12635 	  case GOMP_MAP_ALWAYS_POINTER:
12636 	  case GOMP_MAP_ATTACH:
12637 	  case GOMP_MAP_DETACH:
12638 	    break;
12639 	  case GOMP_MAP_IF_PRESENT:
12640 	  case GOMP_MAP_FORCE_ALLOC:
12641 	  case GOMP_MAP_FORCE_TO:
12642 	  case GOMP_MAP_FORCE_FROM:
12643 	  case GOMP_MAP_FORCE_TOFROM:
12644 	  case GOMP_MAP_FORCE_PRESENT:
12645 	  case GOMP_MAP_FORCE_DEVICEPTR:
12646 	  case GOMP_MAP_DEVICE_RESIDENT:
12647 	  case GOMP_MAP_LINK:
12648 	  case GOMP_MAP_FORCE_DETACH:
12649 	    gcc_assert (is_gimple_omp_oacc (stmt));
12650 	    break;
12651 	  default:
12652 	    gcc_unreachable ();
12653 	  }
12654 #endif
12655 	  /* FALLTHRU */
12656       case OMP_CLAUSE_TO:
12657       case OMP_CLAUSE_FROM:
12658       oacc_firstprivate:
12659 	var = OMP_CLAUSE_DECL (c);
12660 	if (!DECL_P (var))
12661 	  {
12662 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_MAP
12663 		|| (!OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12664 		    && (OMP_CLAUSE_MAP_KIND (c)
12665 			!= GOMP_MAP_FIRSTPRIVATE_POINTER)))
12666 	      map_cnt++;
12667 	    continue;
12668 	  }
12669 
12670 	if (DECL_SIZE (var)
12671 	    && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
12672 	  {
12673 	    tree var2 = DECL_VALUE_EXPR (var);
12674 	    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
12675 	    var2 = TREE_OPERAND (var2, 0);
12676 	    gcc_assert (DECL_P (var2));
12677 	    var = var2;
12678 	  }
12679 
12680 	if (offloaded
12681 	    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12682 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12683 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
12684 	  {
12685 	    if (TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12686 	      {
12687 		if (is_global_var (maybe_lookup_decl_in_outer_ctx (var, ctx))
12688 		    && varpool_node::get_create (var)->offloadable)
12689 		  continue;
12690 
12691 		tree type = build_pointer_type (TREE_TYPE (var));
12692 		tree new_var = lookup_decl (var, ctx);
12693 		x = create_tmp_var_raw (type, get_name (new_var));
12694 		gimple_add_tmp_var (x);
12695 		x = build_simple_mem_ref (x);
12696 		SET_DECL_VALUE_EXPR (new_var, x);
12697 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12698 	      }
12699 	    continue;
12700 	  }
12701 
12702 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12703 	    && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12704 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
12705 	    && is_omp_target (stmt))
12706 	  {
12707 	    gcc_assert (maybe_lookup_field (c, ctx));
12708 	    map_cnt++;
12709 	    continue;
12710 	  }
12711 
12712 	if (!maybe_lookup_field (var, ctx))
12713 	  continue;
12714 
12715 	/* Don't remap compute constructs' reduction variables, because the
12716 	   intermediate result must be local to each gang.  */
12717 	if (offloaded && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12718 			   && is_gimple_omp_oacc (ctx->stmt)
12719 			   && OMP_CLAUSE_MAP_IN_REDUCTION (c)))
12720 	  {
12721 	    x = build_receiver_ref (var, true, ctx);
12722 	    tree new_var = lookup_decl (var, ctx);
12723 
12724 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12725 		&& OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
12726 		&& !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
12727 		&& TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12728 	      x = build_simple_mem_ref (x);
12729 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
12730 	      {
12731 		gcc_assert (is_gimple_omp_oacc (ctx->stmt));
12732 		if (omp_privatize_by_reference (new_var)
12733 		    && (TREE_CODE (TREE_TYPE (new_var)) != POINTER_TYPE
12734 		        || DECL_BY_REFERENCE (var)))
12735 		  {
12736 		    /* Create a local object to hold the instance
12737 		       value.  */
12738 		    tree type = TREE_TYPE (TREE_TYPE (new_var));
12739 		    const char *id = IDENTIFIER_POINTER (DECL_NAME (new_var));
12740 		    tree inst = create_tmp_var (type, id);
12741 		    gimplify_assign (inst, fold_indirect_ref (x), &fplist);
12742 		    x = build_fold_addr_expr (inst);
12743 		  }
12744 		gimplify_assign (new_var, x, &fplist);
12745 	      }
12746 	    else if (DECL_P (new_var))
12747 	      {
12748 		SET_DECL_VALUE_EXPR (new_var, x);
12749 		DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12750 	      }
12751 	    else
12752 	      gcc_unreachable ();
12753 	  }
12754 	map_cnt++;
12755 	break;
12756 
12757       case OMP_CLAUSE_FIRSTPRIVATE:
12758 	gcc_checking_assert (offloaded);
12759 	if (is_gimple_omp_oacc (ctx->stmt))
12760 	  {
12761 	    /* No 'firstprivate' clauses on OpenACC 'kernels'.  */
12762 	    gcc_checking_assert (!is_oacc_kernels (ctx));
12763 	    /* Likewise, on OpenACC 'kernels' decomposed parts.  */
12764 	    gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12765 
12766 	    goto oacc_firstprivate;
12767 	  }
12768 	map_cnt++;
12769 	var = OMP_CLAUSE_DECL (c);
12770 	if (!omp_privatize_by_reference (var)
12771 	    && !is_gimple_reg_type (TREE_TYPE (var)))
12772 	  {
12773 	    tree new_var = lookup_decl (var, ctx);
12774 	    if (is_variable_sized (var))
12775 	      {
12776 		tree pvar = DECL_VALUE_EXPR (var);
12777 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12778 		pvar = TREE_OPERAND (pvar, 0);
12779 		gcc_assert (DECL_P (pvar));
12780 		tree new_pvar = lookup_decl (pvar, ctx);
12781 		x = build_fold_indirect_ref (new_pvar);
12782 		TREE_THIS_NOTRAP (x) = 1;
12783 	      }
12784 	    else
12785 	      x = build_receiver_ref (var, true, ctx);
12786 	    SET_DECL_VALUE_EXPR (new_var, x);
12787 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12788 	  }
12789 	break;
12790 
12791       case OMP_CLAUSE_PRIVATE:
12792 	gcc_checking_assert (offloaded);
12793 	if (is_gimple_omp_oacc (ctx->stmt))
12794 	  {
12795 	    /* No 'private' clauses on OpenACC 'kernels'.  */
12796 	    gcc_checking_assert (!is_oacc_kernels (ctx));
12797 	    /* Likewise, on OpenACC 'kernels' decomposed parts.  */
12798 	    gcc_checking_assert (!is_oacc_kernels_decomposed_part (ctx));
12799 
12800 	    break;
12801 	  }
12802 	var = OMP_CLAUSE_DECL (c);
12803 	if (is_variable_sized (var))
12804 	  {
12805 	    tree new_var = lookup_decl (var, ctx);
12806 	    tree pvar = DECL_VALUE_EXPR (var);
12807 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12808 	    pvar = TREE_OPERAND (pvar, 0);
12809 	    gcc_assert (DECL_P (pvar));
12810 	    tree new_pvar = lookup_decl (pvar, ctx);
12811 	    x = build_fold_indirect_ref (new_pvar);
12812 	    TREE_THIS_NOTRAP (x) = 1;
12813 	    SET_DECL_VALUE_EXPR (new_var, x);
12814 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12815 	  }
12816 	break;
12817 
12818       case OMP_CLAUSE_USE_DEVICE_PTR:
12819       case OMP_CLAUSE_USE_DEVICE_ADDR:
12820       case OMP_CLAUSE_IS_DEVICE_PTR:
12821 	var = OMP_CLAUSE_DECL (c);
12822 	map_cnt++;
12823 	if (is_variable_sized (var))
12824 	  {
12825 	    tree new_var = lookup_decl (var, ctx);
12826 	    tree pvar = DECL_VALUE_EXPR (var);
12827 	    gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
12828 	    pvar = TREE_OPERAND (pvar, 0);
12829 	    gcc_assert (DECL_P (pvar));
12830 	    tree new_pvar = lookup_decl (pvar, ctx);
12831 	    x = build_fold_indirect_ref (new_pvar);
12832 	    TREE_THIS_NOTRAP (x) = 1;
12833 	    SET_DECL_VALUE_EXPR (new_var, x);
12834 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12835 	  }
12836 	else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
12837 		  && !omp_privatize_by_reference (var)
12838 		  && !omp_is_allocatable_or_ptr (var)
12839 		  && !lang_hooks.decls.omp_array_data (var, true))
12840 		 || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
12841 	  {
12842 	    tree new_var = lookup_decl (var, ctx);
12843 	    tree type = build_pointer_type (TREE_TYPE (var));
12844 	    x = create_tmp_var_raw (type, get_name (new_var));
12845 	    gimple_add_tmp_var (x);
12846 	    x = build_simple_mem_ref (x);
12847 	    SET_DECL_VALUE_EXPR (new_var, x);
12848 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12849 	  }
12850 	else
12851 	  {
12852 	    tree new_var = lookup_decl (var, ctx);
12853 	    x = create_tmp_var_raw (TREE_TYPE (new_var), get_name (new_var));
12854 	    gimple_add_tmp_var (x);
12855 	    SET_DECL_VALUE_EXPR (new_var, x);
12856 	    DECL_HAS_VALUE_EXPR_P (new_var) = 1;
12857 	  }
12858 	break;
12859       }
12860 
12861   if (offloaded)
12862     {
12863       target_nesting_level++;
12864       lower_omp (&tgt_body, ctx);
12865       target_nesting_level--;
12866     }
12867   else if (data_region)
12868     lower_omp (&tgt_body, ctx);
12869 
12870   if (offloaded)
12871     {
12872       /* Declare all the variables created by mapping and the variables
12873 	 declared in the scope of the target body.  */
12874       record_vars_into (ctx->block_vars, child_fn);
12875       maybe_remove_omp_member_access_dummy_vars (tgt_bind);
12876       record_vars_into (gimple_bind_vars (tgt_bind), child_fn);
12877     }
12878 
12879   olist = NULL;
12880   ilist = NULL;
12881   if (ctx->record_type)
12882     {
12883       ctx->sender_decl
12884 	= create_tmp_var (ctx->record_type, ".omp_data_arr");
12885       DECL_NAMELESS (ctx->sender_decl) = 1;
12886       TREE_ADDRESSABLE (ctx->sender_decl) = 1;
12887       t = make_tree_vec (3);
12888       TREE_VEC_ELT (t, 0) = ctx->sender_decl;
12889       TREE_VEC_ELT (t, 1)
12890 	= create_tmp_var (build_array_type_nelts (size_type_node, map_cnt),
12891 			  ".omp_data_sizes");
12892       DECL_NAMELESS (TREE_VEC_ELT (t, 1)) = 1;
12893       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 1)) = 1;
12894       TREE_STATIC (TREE_VEC_ELT (t, 1)) = 1;
12895       tree tkind_type = short_unsigned_type_node;
12896       int talign_shift = 8;
12897       TREE_VEC_ELT (t, 2)
12898 	= create_tmp_var (build_array_type_nelts (tkind_type, map_cnt),
12899 			  ".omp_data_kinds");
12900       DECL_NAMELESS (TREE_VEC_ELT (t, 2)) = 1;
12901       TREE_ADDRESSABLE (TREE_VEC_ELT (t, 2)) = 1;
12902       TREE_STATIC (TREE_VEC_ELT (t, 2)) = 1;
12903       gimple_omp_target_set_data_arg (stmt, t);
12904 
12905       vec<constructor_elt, va_gc> *vsize;
12906       vec<constructor_elt, va_gc> *vkind;
12907       vec_alloc (vsize, map_cnt);
12908       vec_alloc (vkind, map_cnt);
12909       unsigned int map_idx = 0;
12910 
12911       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
12912 	switch (OMP_CLAUSE_CODE (c))
12913 	  {
12914 	    tree ovar, nc, s, purpose, var, x, type;
12915 	    unsigned int talign;
12916 
12917 	  default:
12918 	    break;
12919 
12920 	  case OMP_CLAUSE_MAP:
12921 	  case OMP_CLAUSE_TO:
12922 	  case OMP_CLAUSE_FROM:
12923 	  oacc_firstprivate_map:
12924 	    nc = c;
12925 	    ovar = OMP_CLAUSE_DECL (c);
12926 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12927 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
12928 		    || (OMP_CLAUSE_MAP_KIND (c)
12929 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
12930 	      break;
12931 	    if (!DECL_P (ovar))
12932 	      {
12933 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12934 		    && OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c))
12935 		  {
12936 		    nc = OMP_CLAUSE_CHAIN (c);
12937 		    gcc_checking_assert (OMP_CLAUSE_DECL (nc)
12938 					 == get_base_address (ovar));
12939 		    ovar = OMP_CLAUSE_DECL (nc);
12940 		  }
12941 		else
12942 		  {
12943 		    tree x = build_sender_ref (ovar, ctx);
12944 		    tree v = ovar;
12945 		    if (in_reduction_clauses
12946 			&& OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12947 			&& OMP_CLAUSE_MAP_IN_REDUCTION (c))
12948 		      {
12949 			v = unshare_expr (v);
12950 			tree *p = &v;
12951 			while (handled_component_p (*p)
12952 			       || TREE_CODE (*p) == INDIRECT_REF
12953 			       || TREE_CODE (*p) == ADDR_EXPR
12954 			       || TREE_CODE (*p) == MEM_REF
12955 			       || TREE_CODE (*p) == NON_LVALUE_EXPR)
12956 			  p = &TREE_OPERAND (*p, 0);
12957 			tree d = *p;
12958 			if (is_variable_sized (d))
12959 			  {
12960 			    gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
12961 			    d = DECL_VALUE_EXPR (d);
12962 			    gcc_assert (TREE_CODE (d) == INDIRECT_REF);
12963 			    d = TREE_OPERAND (d, 0);
12964 			    gcc_assert (DECL_P (d));
12965 			  }
12966 			splay_tree_key key
12967 			  = (splay_tree_key) &DECL_CONTEXT (d);
12968 			tree nd = (tree) splay_tree_lookup (ctx->field_map,
12969 							    key)->value;
12970 			if (d == *p)
12971 			  *p = nd;
12972 			else
12973 			  *p = build_fold_indirect_ref (nd);
12974 		      }
12975 		    v = build_fold_addr_expr_with_type (v, ptr_type_node);
12976 		    gimplify_assign (x, v, &ilist);
12977 		    nc = NULL_TREE;
12978 		  }
12979 	      }
12980 	    else
12981 	      {
12982 		if (DECL_SIZE (ovar)
12983 		    && TREE_CODE (DECL_SIZE (ovar)) != INTEGER_CST)
12984 		  {
12985 		    tree ovar2 = DECL_VALUE_EXPR (ovar);
12986 		    gcc_assert (TREE_CODE (ovar2) == INDIRECT_REF);
12987 		    ovar2 = TREE_OPERAND (ovar2, 0);
12988 		    gcc_assert (DECL_P (ovar2));
12989 		    ovar = ovar2;
12990 		  }
12991 		if (!maybe_lookup_field (ovar, ctx)
12992 		    && !(OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
12993 			 && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
12994 			     || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)))
12995 		  continue;
12996 	      }
12997 
12998 	    talign = TYPE_ALIGN_UNIT (TREE_TYPE (ovar));
12999 	    if (DECL_P (ovar) && DECL_ALIGN_UNIT (ovar) > talign)
13000 	      talign = DECL_ALIGN_UNIT (ovar);
13001 
13002 	    var = NULL_TREE;
13003 	    if (nc)
13004 	      {
13005 		if (in_reduction_clauses
13006 		    && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13007 		    && OMP_CLAUSE_MAP_IN_REDUCTION (c))
13008 		  {
13009 		    tree d = ovar;
13010 		    if (is_variable_sized (d))
13011 		      {
13012 			gcc_assert (DECL_HAS_VALUE_EXPR_P (d));
13013 			d = DECL_VALUE_EXPR (d);
13014 			gcc_assert (TREE_CODE (d) == INDIRECT_REF);
13015 			d = TREE_OPERAND (d, 0);
13016 			gcc_assert (DECL_P (d));
13017 		      }
13018 		    splay_tree_key key
13019 		      = (splay_tree_key) &DECL_CONTEXT (d);
13020 		    tree nd = (tree) splay_tree_lookup (ctx->field_map,
13021 							key)->value;
13022 		    if (d == ovar)
13023 		      var = nd;
13024 		    else
13025 		      var = build_fold_indirect_ref (nd);
13026 		  }
13027 		else
13028 		  var = lookup_decl_in_outer_ctx (ovar, ctx);
13029 	      }
13030 	    if (nc
13031 		&& OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13032 		&& (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
13033 		    || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH)
13034 		&& is_omp_target (stmt))
13035 	      {
13036 		x = build_sender_ref (c, ctx);
13037 		gimplify_assign (x, build_fold_addr_expr (var), &ilist);
13038 	      }
13039 	    else if (nc)
13040 	      {
13041 		x = build_sender_ref (ovar, ctx);
13042 
13043 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
13044 		    && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
13045 		    && !OMP_CLAUSE_MAP_ZERO_BIAS_ARRAY_SECTION (c)
13046 		    && TREE_CODE (TREE_TYPE (ovar)) == ARRAY_TYPE)
13047 		  {
13048 		    gcc_assert (offloaded);
13049 		    tree avar
13050 		      = create_tmp_var (TREE_TYPE (TREE_TYPE (x)));
13051 		    mark_addressable (avar);
13052 		    gimplify_assign (avar, build_fold_addr_expr (var), &ilist);
13053 		    talign = DECL_ALIGN_UNIT (avar);
13054 		    avar = build_fold_addr_expr (avar);
13055 		    gimplify_assign (x, avar, &ilist);
13056 		  }
13057 		else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13058 		  {
13059 		    gcc_assert (is_gimple_omp_oacc (ctx->stmt));
13060 		    if (!omp_privatize_by_reference (var))
13061 		      {
13062 			if (is_gimple_reg (var)
13063 			    && OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13064 			  suppress_warning (var);
13065 			var = build_fold_addr_expr (var);
13066 		      }
13067 		    else
13068 		      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13069 		    gimplify_assign (x, var, &ilist);
13070 		  }
13071 		else if (is_gimple_reg (var))
13072 		  {
13073 		    gcc_assert (offloaded);
13074 		    tree avar = create_tmp_var (TREE_TYPE (var));
13075 		    mark_addressable (avar);
13076 		    enum gomp_map_kind map_kind = OMP_CLAUSE_MAP_KIND (c);
13077 		    if (GOMP_MAP_COPY_TO_P (map_kind)
13078 			|| map_kind == GOMP_MAP_POINTER
13079 			|| map_kind == GOMP_MAP_TO_PSET
13080 			|| map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13081 		      {
13082 			/* If we need to initialize a temporary
13083 			   with VAR because it is not addressable, and
13084 			   the variable hasn't been initialized yet, then
13085 			   we'll get a warning for the store to avar.
13086 			   Don't warn in that case, the mapping might
13087 			   be implicit.  */
13088 			suppress_warning (var, OPT_Wuninitialized);
13089 			gimplify_assign (avar, var, &ilist);
13090 		      }
13091 		    avar = build_fold_addr_expr (avar);
13092 		    gimplify_assign (x, avar, &ilist);
13093 		    if ((GOMP_MAP_COPY_FROM_P (map_kind)
13094 			 || map_kind == GOMP_MAP_FORCE_DEVICEPTR)
13095 			&& !TYPE_READONLY (TREE_TYPE (var)))
13096 		      {
13097 			x = unshare_expr (x);
13098 			x = build_simple_mem_ref (x);
13099 			gimplify_assign (var, x, &olist);
13100 		      }
13101 		  }
13102 		else
13103 		  {
13104 		    /* While MAP is handled explicitly by the FE,
13105 		       for 'target update', only the identified is passed.  */
13106 		    if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FROM
13107 			 || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TO)
13108 			&& (omp_is_allocatable_or_ptr (var)
13109 			    && omp_check_optional_argument (var, false)))
13110 		      var = build_fold_indirect_ref (var);
13111 		    else if ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_FROM
13112 			      && OMP_CLAUSE_CODE (c) != OMP_CLAUSE_TO)
13113 			     || (!omp_is_allocatable_or_ptr (var)
13114 				 && !omp_check_optional_argument (var, false)))
13115 		      var = build_fold_addr_expr (var);
13116 		    gimplify_assign (x, var, &ilist);
13117 		  }
13118 	      }
13119 	    s = NULL_TREE;
13120 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
13121 	      {
13122 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13123 		s = TREE_TYPE (ovar);
13124 		if (TREE_CODE (s) == REFERENCE_TYPE
13125 		    || omp_check_optional_argument (ovar, false))
13126 		  s = TREE_TYPE (s);
13127 		s = TYPE_SIZE_UNIT (s);
13128 	      }
13129 	    else
13130 	      s = OMP_CLAUSE_SIZE (c);
13131 	    if (s == NULL_TREE)
13132 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13133 	    s = fold_convert (size_type_node, s);
13134 	    purpose = size_int (map_idx++);
13135 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13136 	    if (TREE_CODE (s) != INTEGER_CST)
13137 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13138 
13139 	    unsigned HOST_WIDE_INT tkind, tkind_zero;
13140 	    switch (OMP_CLAUSE_CODE (c))
13141 	      {
13142 	      case OMP_CLAUSE_MAP:
13143 		tkind = OMP_CLAUSE_MAP_KIND (c);
13144 		tkind_zero = tkind;
13145 		if (OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c))
13146 		  switch (tkind)
13147 		    {
13148 		    case GOMP_MAP_ALLOC:
13149 		    case GOMP_MAP_IF_PRESENT:
13150 		    case GOMP_MAP_TO:
13151 		    case GOMP_MAP_FROM:
13152 		    case GOMP_MAP_TOFROM:
13153 		    case GOMP_MAP_ALWAYS_TO:
13154 		    case GOMP_MAP_ALWAYS_FROM:
13155 		    case GOMP_MAP_ALWAYS_TOFROM:
13156 		    case GOMP_MAP_RELEASE:
13157 		    case GOMP_MAP_FORCE_TO:
13158 		    case GOMP_MAP_FORCE_FROM:
13159 		    case GOMP_MAP_FORCE_TOFROM:
13160 		    case GOMP_MAP_FORCE_PRESENT:
13161 		      tkind_zero = GOMP_MAP_ZERO_LEN_ARRAY_SECTION;
13162 		      break;
13163 		    case GOMP_MAP_DELETE:
13164 		      tkind_zero = GOMP_MAP_DELETE_ZERO_LEN_ARRAY_SECTION;
13165 		    default:
13166 		      break;
13167 		    }
13168 		if (tkind_zero != tkind)
13169 		  {
13170 		    if (integer_zerop (s))
13171 		      tkind = tkind_zero;
13172 		    else if (integer_nonzerop (s))
13173 		      tkind_zero = tkind;
13174 		  }
13175 		if (tkind_zero == tkind
13176 		    && OMP_CLAUSE_MAP_RUNTIME_IMPLICIT_P (c)
13177 		    && (((tkind & GOMP_MAP_FLAG_SPECIAL_BITS)
13178 			 & ~GOMP_MAP_IMPLICIT)
13179 			== 0))
13180 		  {
13181 		    /* If this is an implicit map, and the GOMP_MAP_IMPLICIT
13182 		       bits are not interfered by other special bit encodings,
13183 		       then turn the GOMP_IMPLICIT_BIT flag on for the runtime
13184 		       to see.  */
13185 		    tkind |= GOMP_MAP_IMPLICIT;
13186 		    tkind_zero = tkind;
13187 		  }
13188 		break;
13189 	      case OMP_CLAUSE_FIRSTPRIVATE:
13190 		gcc_checking_assert (is_gimple_omp_oacc (ctx->stmt));
13191 		tkind = GOMP_MAP_TO;
13192 		tkind_zero = tkind;
13193 		break;
13194 	      case OMP_CLAUSE_TO:
13195 		tkind = GOMP_MAP_TO;
13196 		tkind_zero = tkind;
13197 		break;
13198 	      case OMP_CLAUSE_FROM:
13199 		tkind = GOMP_MAP_FROM;
13200 		tkind_zero = tkind;
13201 		break;
13202 	      default:
13203 		gcc_unreachable ();
13204 	      }
13205 	    gcc_checking_assert (tkind
13206 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
13207 	    gcc_checking_assert (tkind_zero
13208 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
13209 	    talign = ceil_log2 (talign);
13210 	    tkind |= talign << talign_shift;
13211 	    tkind_zero |= talign << talign_shift;
13212 	    gcc_checking_assert (tkind
13213 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13214 	    gcc_checking_assert (tkind_zero
13215 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13216 	    if (tkind == tkind_zero)
13217 	      x = build_int_cstu (tkind_type, tkind);
13218 	    else
13219 	      {
13220 		TREE_STATIC (TREE_VEC_ELT (t, 2)) = 0;
13221 		x = build3 (COND_EXPR, tkind_type,
13222 			    fold_build2 (EQ_EXPR, boolean_type_node,
13223 					 unshare_expr (s), size_zero_node),
13224 			    build_int_cstu (tkind_type, tkind_zero),
13225 			    build_int_cstu (tkind_type, tkind));
13226 	      }
13227 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose, x);
13228 	    if (nc && nc != c)
13229 	      c = nc;
13230 	    break;
13231 
13232 	  case OMP_CLAUSE_FIRSTPRIVATE:
13233 	    if (is_gimple_omp_oacc (ctx->stmt))
13234 	      goto oacc_firstprivate_map;
13235 	    ovar = OMP_CLAUSE_DECL (c);
13236 	    if (omp_privatize_by_reference (ovar))
13237 	      talign = TYPE_ALIGN_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13238 	    else
13239 	      talign = DECL_ALIGN_UNIT (ovar);
13240 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
13241 	    x = build_sender_ref (ovar, ctx);
13242 	    tkind = GOMP_MAP_FIRSTPRIVATE;
13243 	    type = TREE_TYPE (ovar);
13244 	    if (omp_privatize_by_reference (ovar))
13245 	      type = TREE_TYPE (type);
13246 	    if ((INTEGRAL_TYPE_P (type)
13247 		 && TYPE_PRECISION (type) <= POINTER_SIZE)
13248 		|| TREE_CODE (type) == POINTER_TYPE)
13249 	      {
13250 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13251 		tree t = var;
13252 		if (omp_privatize_by_reference (var))
13253 		  t = build_simple_mem_ref (var);
13254 		else if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13255 		  suppress_warning (var);
13256 		if (TREE_CODE (type) != POINTER_TYPE)
13257 		  t = fold_convert (pointer_sized_int_node, t);
13258 		t = fold_convert (TREE_TYPE (x), t);
13259 		gimplify_assign (x, t, &ilist);
13260 	      }
13261 	    else if (omp_privatize_by_reference (var))
13262 	      gimplify_assign (x, var, &ilist);
13263 	    else if (is_gimple_reg (var))
13264 	      {
13265 		tree avar = create_tmp_var (TREE_TYPE (var));
13266 		mark_addressable (avar);
13267 		if (OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (c))
13268 		  suppress_warning (var);
13269 		gimplify_assign (avar, var, &ilist);
13270 		avar = build_fold_addr_expr (avar);
13271 		gimplify_assign (x, avar, &ilist);
13272 	      }
13273 	    else
13274 	      {
13275 		var = build_fold_addr_expr (var);
13276 		gimplify_assign (x, var, &ilist);
13277 	      }
13278 	    if (tkind == GOMP_MAP_FIRSTPRIVATE_INT)
13279 	      s = size_int (0);
13280 	    else if (omp_privatize_by_reference (ovar))
13281 	      s = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ovar)));
13282 	    else
13283 	      s = TYPE_SIZE_UNIT (TREE_TYPE (ovar));
13284 	    s = fold_convert (size_type_node, s);
13285 	    purpose = size_int (map_idx++);
13286 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13287 	    if (TREE_CODE (s) != INTEGER_CST)
13288 	      TREE_STATIC (TREE_VEC_ELT (t, 1)) = 0;
13289 
13290 	    gcc_checking_assert (tkind
13291 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
13292 	    talign = ceil_log2 (talign);
13293 	    tkind |= talign << talign_shift;
13294 	    gcc_checking_assert (tkind
13295 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13296 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13297 				    build_int_cstu (tkind_type, tkind));
13298 	    break;
13299 
13300 	  case OMP_CLAUSE_USE_DEVICE_PTR:
13301 	  case OMP_CLAUSE_USE_DEVICE_ADDR:
13302 	  case OMP_CLAUSE_IS_DEVICE_PTR:
13303 	    ovar = OMP_CLAUSE_DECL (c);
13304 	    var = lookup_decl_in_outer_ctx (ovar, ctx);
13305 
13306 	    if (lang_hooks.decls.omp_array_data (ovar, true))
13307 	      {
13308 		tkind = (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR
13309 			 ? GOMP_MAP_USE_DEVICE_PTR : GOMP_MAP_FIRSTPRIVATE_INT);
13310 		x = build_sender_ref ((splay_tree_key) &DECL_NAME (ovar), ctx);
13311 	      }
13312 	    else if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13313 	      {
13314 		tkind = GOMP_MAP_USE_DEVICE_PTR;
13315 		x = build_sender_ref ((splay_tree_key) &DECL_UID (ovar), ctx);
13316 	      }
13317 	    else
13318 	      {
13319 		tkind = GOMP_MAP_FIRSTPRIVATE_INT;
13320 		x = build_sender_ref (ovar, ctx);
13321 	      }
13322 
13323 	    if (is_gimple_omp_oacc (ctx->stmt))
13324 	      {
13325 		gcc_assert (tkind == GOMP_MAP_USE_DEVICE_PTR);
13326 
13327 		if (OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c))
13328 		  tkind = GOMP_MAP_USE_DEVICE_PTR_IF_PRESENT;
13329 	      }
13330 
13331 	    type = TREE_TYPE (ovar);
13332 	    if (lang_hooks.decls.omp_array_data (ovar, true))
13333 	      var = lang_hooks.decls.omp_array_data (ovar, false);
13334 	    else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13335 		      && !omp_privatize_by_reference (ovar)
13336 		      && !omp_is_allocatable_or_ptr (ovar))
13337 		     || TREE_CODE (type) == ARRAY_TYPE)
13338 	      var = build_fold_addr_expr (var);
13339 	    else
13340 	      {
13341 		if (omp_privatize_by_reference (ovar)
13342 		    || omp_check_optional_argument (ovar, false)
13343 		    || omp_is_allocatable_or_ptr (ovar))
13344 		  {
13345 		    type = TREE_TYPE (type);
13346 		    if (POINTER_TYPE_P (type)
13347 			&& TREE_CODE (type) != ARRAY_TYPE
13348 			&& ((OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13349 			    && !omp_is_allocatable_or_ptr (ovar))
13350 			   || (omp_privatize_by_reference (ovar)
13351 			       && omp_is_allocatable_or_ptr (ovar))))
13352 		      var = build_simple_mem_ref (var);
13353 		    var = fold_convert (TREE_TYPE (x), var);
13354 		  }
13355 	      }
13356 	    tree present;
13357 	    present = omp_check_optional_argument (ovar, true);
13358 	    if (present)
13359 	      {
13360 		tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13361 		tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13362 		tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13363 		tree new_x = unshare_expr (x);
13364 		gimplify_expr (&present, &ilist, NULL, is_gimple_val,
13365 			       fb_rvalue);
13366 		gcond *cond = gimple_build_cond_from_tree (present,
13367 							   notnull_label,
13368 							   null_label);
13369 		gimple_seq_add_stmt (&ilist, cond);
13370 		gimple_seq_add_stmt (&ilist, gimple_build_label (null_label));
13371 		gimplify_assign (new_x, null_pointer_node, &ilist);
13372 		gimple_seq_add_stmt (&ilist, gimple_build_goto (opt_arg_label));
13373 		gimple_seq_add_stmt (&ilist,
13374 				     gimple_build_label (notnull_label));
13375 		gimplify_assign (x, var, &ilist);
13376 		gimple_seq_add_stmt (&ilist,
13377 				     gimple_build_label (opt_arg_label));
13378 	      }
13379 	    else
13380 	      gimplify_assign (x, var, &ilist);
13381 	    s = size_int (0);
13382 	    purpose = size_int (map_idx++);
13383 	    CONSTRUCTOR_APPEND_ELT (vsize, purpose, s);
13384 	    gcc_checking_assert (tkind
13385 				 < (HOST_WIDE_INT_C (1U) << talign_shift));
13386 	    gcc_checking_assert (tkind
13387 				 <= tree_to_uhwi (TYPE_MAX_VALUE (tkind_type)));
13388 	    CONSTRUCTOR_APPEND_ELT (vkind, purpose,
13389 				    build_int_cstu (tkind_type, tkind));
13390 	    break;
13391 	  }
13392 
13393       gcc_assert (map_idx == map_cnt);
13394 
13395       DECL_INITIAL (TREE_VEC_ELT (t, 1))
13396 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 1)), vsize);
13397       DECL_INITIAL (TREE_VEC_ELT (t, 2))
13398 	= build_constructor (TREE_TYPE (TREE_VEC_ELT (t, 2)), vkind);
13399       for (int i = 1; i <= 2; i++)
13400 	if (!TREE_STATIC (TREE_VEC_ELT (t, i)))
13401 	  {
13402 	    gimple_seq initlist = NULL;
13403 	    force_gimple_operand (build1 (DECL_EXPR, void_type_node,
13404 					  TREE_VEC_ELT (t, i)),
13405 				  &initlist, true, NULL_TREE);
13406 	    gimple_seq_add_seq (&ilist, initlist);
13407 
13408 	    tree clobber = build_clobber (TREE_TYPE (TREE_VEC_ELT (t, i)));
13409 	    gimple_seq_add_stmt (&olist,
13410 				 gimple_build_assign (TREE_VEC_ELT (t, i),
13411 						      clobber));
13412 	  }
13413 	else if (omp_maybe_offloaded_ctx (ctx->outer))
13414 	  {
13415 	    tree id = get_identifier ("omp declare target");
13416 	    tree decl = TREE_VEC_ELT (t, i);
13417 	    DECL_ATTRIBUTES (decl)
13418 	      = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
13419 	    varpool_node *node = varpool_node::get (decl);
13420 	    if (node)
13421 	      {
13422 		node->offloadable = 1;
13423 		if (ENABLE_OFFLOADING)
13424 		  {
13425 		    g->have_offload = true;
13426 		    vec_safe_push (offload_vars, t);
13427 		  }
13428 	      }
13429 	  }
13430 
13431       tree clobber = build_clobber (ctx->record_type);
13432       gimple_seq_add_stmt (&olist, gimple_build_assign (ctx->sender_decl,
13433 							clobber));
13434     }
13435 
13436   /* Once all the expansions are done, sequence all the different
13437      fragments inside gimple_omp_body.  */
13438 
13439   new_body = NULL;
13440 
13441   if (offloaded
13442       && ctx->record_type)
13443     {
13444       t = build_fold_addr_expr_loc (loc, ctx->sender_decl);
13445       /* fixup_child_record_type might have changed receiver_decl's type.  */
13446       t = fold_convert_loc (loc, TREE_TYPE (ctx->receiver_decl), t);
13447       gimple_seq_add_stmt (&new_body,
13448 	  		   gimple_build_assign (ctx->receiver_decl, t));
13449     }
13450   gimple_seq_add_seq (&new_body, fplist);
13451 
13452   if (offloaded || data_region)
13453     {
13454       tree prev = NULL_TREE;
13455       for (c = clauses; c ; c = OMP_CLAUSE_CHAIN (c))
13456 	switch (OMP_CLAUSE_CODE (c))
13457 	  {
13458 	    tree var, x;
13459 	  default:
13460 	    break;
13461 	  case OMP_CLAUSE_FIRSTPRIVATE:
13462 	    if (is_gimple_omp_oacc (ctx->stmt))
13463 	      break;
13464 	    var = OMP_CLAUSE_DECL (c);
13465 	    if (omp_privatize_by_reference (var)
13466 		|| is_gimple_reg_type (TREE_TYPE (var)))
13467 	      {
13468 		tree new_var = lookup_decl (var, ctx);
13469 		tree type;
13470 		type = TREE_TYPE (var);
13471 		if (omp_privatize_by_reference (var))
13472 		  type = TREE_TYPE (type);
13473 		if ((INTEGRAL_TYPE_P (type)
13474 		     && TYPE_PRECISION (type) <= POINTER_SIZE)
13475 		    || TREE_CODE (type) == POINTER_TYPE)
13476 		  {
13477 		    x = build_receiver_ref (var, false, ctx);
13478 		    if (TREE_CODE (type) != POINTER_TYPE)
13479 		      x = fold_convert (pointer_sized_int_node, x);
13480 		    x = fold_convert (type, x);
13481 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13482 				   fb_rvalue);
13483 		    if (omp_privatize_by_reference (var))
13484 		      {
13485 			tree v = create_tmp_var_raw (type, get_name (var));
13486 			gimple_add_tmp_var (v);
13487 			TREE_ADDRESSABLE (v) = 1;
13488 			gimple_seq_add_stmt (&new_body,
13489 					     gimple_build_assign (v, x));
13490 			x = build_fold_addr_expr (v);
13491 		      }
13492 		    gimple_seq_add_stmt (&new_body,
13493 					 gimple_build_assign (new_var, x));
13494 		  }
13495 		else
13496 		  {
13497 		    bool by_ref = !omp_privatize_by_reference (var);
13498 		    x = build_receiver_ref (var, by_ref, ctx);
13499 		    gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13500 				   fb_rvalue);
13501 		    gimple_seq_add_stmt (&new_body,
13502 					 gimple_build_assign (new_var, x));
13503 		  }
13504 	      }
13505 	    else if (is_variable_sized (var))
13506 	      {
13507 		tree pvar = DECL_VALUE_EXPR (var);
13508 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13509 		pvar = TREE_OPERAND (pvar, 0);
13510 		gcc_assert (DECL_P (pvar));
13511 		tree new_var = lookup_decl (pvar, ctx);
13512 		x = build_receiver_ref (var, false, ctx);
13513 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13514 		gimple_seq_add_stmt (&new_body,
13515 				     gimple_build_assign (new_var, x));
13516 	      }
13517 	    break;
13518 	  case OMP_CLAUSE_PRIVATE:
13519 	    if (is_gimple_omp_oacc (ctx->stmt))
13520 	      break;
13521 	    var = OMP_CLAUSE_DECL (c);
13522 	    if (omp_privatize_by_reference (var))
13523 	      {
13524 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13525 		tree new_var = lookup_decl (var, ctx);
13526 		x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13527 		if (TREE_CONSTANT (x))
13528 		  {
13529 		    x = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (new_var)),
13530 					    get_name (var));
13531 		    gimple_add_tmp_var (x);
13532 		    TREE_ADDRESSABLE (x) = 1;
13533 		    x = build_fold_addr_expr_loc (clause_loc, x);
13534 		  }
13535 		else
13536 		  break;
13537 
13538 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13539 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13540 		gimple_seq_add_stmt (&new_body,
13541 				     gimple_build_assign (new_var, x));
13542 	      }
13543 	    break;
13544 	  case OMP_CLAUSE_USE_DEVICE_PTR:
13545 	  case OMP_CLAUSE_USE_DEVICE_ADDR:
13546 	  case OMP_CLAUSE_IS_DEVICE_PTR:
13547 	    tree new_var;
13548 	    gimple_seq assign_body;
13549 	    bool is_array_data;
13550 	    bool do_optional_check;
13551 	    assign_body = NULL;
13552 	    do_optional_check = false;
13553 	    var = OMP_CLAUSE_DECL (c);
13554 	    is_array_data = lang_hooks.decls.omp_array_data (var, true) != NULL;
13555 
13556 	    if (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_IS_DEVICE_PTR)
13557 	      x = build_sender_ref (is_array_data
13558 				    ? (splay_tree_key) &DECL_NAME (var)
13559 				    : (splay_tree_key) &DECL_UID (var), ctx);
13560 	    else
13561 	      x = build_receiver_ref (var, false, ctx);
13562 
13563 	    if (is_array_data)
13564 	      {
13565 		bool is_ref = omp_privatize_by_reference (var);
13566 		do_optional_check = true;
13567 		/* First, we copy the descriptor data from the host; then
13568 		   we update its data to point to the target address.  */
13569 		new_var = lookup_decl (var, ctx);
13570 		new_var = DECL_VALUE_EXPR (new_var);
13571 		tree v = new_var;
13572 
13573 		if (is_ref)
13574 		  {
13575 		    var = build_fold_indirect_ref (var);
13576 		    gimplify_expr (&var, &assign_body, NULL, is_gimple_val,
13577 				   fb_rvalue);
13578 		    v = create_tmp_var_raw (TREE_TYPE (var), get_name (var));
13579 		    gimple_add_tmp_var (v);
13580 		    TREE_ADDRESSABLE (v) = 1;
13581 		    gimple_seq_add_stmt (&assign_body,
13582 					 gimple_build_assign (v, var));
13583 		    tree rhs = build_fold_addr_expr (v);
13584 		    gimple_seq_add_stmt (&assign_body,
13585 					 gimple_build_assign (new_var, rhs));
13586 		  }
13587 		else
13588 		  gimple_seq_add_stmt (&assign_body,
13589 				       gimple_build_assign (new_var, var));
13590 
13591 		tree v2 = lang_hooks.decls.omp_array_data (unshare_expr (v), false);
13592 		gcc_assert (v2);
13593 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13594 		gimple_seq_add_stmt (&assign_body,
13595 				     gimple_build_assign (v2, x));
13596 	      }
13597 	    else if (is_variable_sized (var))
13598 	      {
13599 		tree pvar = DECL_VALUE_EXPR (var);
13600 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13601 		pvar = TREE_OPERAND (pvar, 0);
13602 		gcc_assert (DECL_P (pvar));
13603 		new_var = lookup_decl (pvar, ctx);
13604 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13605 		gimple_seq_add_stmt (&assign_body,
13606 				     gimple_build_assign (new_var, x));
13607 	      }
13608 	    else if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_ADDR
13609 		      && !omp_privatize_by_reference (var)
13610 		      && !omp_is_allocatable_or_ptr (var))
13611 		     || TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE)
13612 	      {
13613 		new_var = lookup_decl (var, ctx);
13614 		new_var = DECL_VALUE_EXPR (new_var);
13615 		gcc_assert (TREE_CODE (new_var) == MEM_REF);
13616 		new_var = TREE_OPERAND (new_var, 0);
13617 		gcc_assert (DECL_P (new_var));
13618 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13619 		gimple_seq_add_stmt (&assign_body,
13620 				     gimple_build_assign (new_var, x));
13621 	      }
13622 	    else
13623 	      {
13624 		tree type = TREE_TYPE (var);
13625 		new_var = lookup_decl (var, ctx);
13626 		if (omp_privatize_by_reference (var))
13627 		  {
13628 		    type = TREE_TYPE (type);
13629 		    if (POINTER_TYPE_P (type)
13630 			&& TREE_CODE (type) != ARRAY_TYPE
13631 			&& (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_USE_DEVICE_ADDR
13632 			    || (omp_privatize_by_reference (var)
13633 				&& omp_is_allocatable_or_ptr (var))))
13634 		      {
13635 			tree v = create_tmp_var_raw (type, get_name (var));
13636 			gimple_add_tmp_var (v);
13637 			TREE_ADDRESSABLE (v) = 1;
13638 			x = fold_convert (type, x);
13639 			gimplify_expr (&x, &assign_body, NULL, is_gimple_val,
13640 				       fb_rvalue);
13641 			gimple_seq_add_stmt (&assign_body,
13642 					     gimple_build_assign (v, x));
13643 			x = build_fold_addr_expr (v);
13644 			do_optional_check = true;
13645 		      }
13646 		  }
13647 		new_var = DECL_VALUE_EXPR (new_var);
13648 		x = fold_convert (TREE_TYPE (new_var), x);
13649 		gimplify_expr (&x, &assign_body, NULL, is_gimple_val, fb_rvalue);
13650 		gimple_seq_add_stmt (&assign_body,
13651 				     gimple_build_assign (new_var, x));
13652 	      }
13653 	    tree present;
13654 	    present = (do_optional_check
13655 		       ? omp_check_optional_argument (OMP_CLAUSE_DECL (c), true)
13656 		       : NULL_TREE);
13657 	    if (present)
13658 	      {
13659 		tree null_label = create_artificial_label (UNKNOWN_LOCATION);
13660 		tree notnull_label = create_artificial_label (UNKNOWN_LOCATION);
13661 		tree opt_arg_label = create_artificial_label (UNKNOWN_LOCATION);
13662 		glabel *null_glabel = gimple_build_label (null_label);
13663 		glabel *notnull_glabel = gimple_build_label (notnull_label);
13664 		ggoto *opt_arg_ggoto = gimple_build_goto (opt_arg_label);
13665 		gimplify_expr (&x, &new_body, NULL, is_gimple_val,
13666 					   fb_rvalue);
13667 		gimplify_expr (&present, &new_body, NULL, is_gimple_val,
13668 			       fb_rvalue);
13669 		gcond *cond = gimple_build_cond_from_tree (present,
13670 							   notnull_label,
13671 							   null_label);
13672 		gimple_seq_add_stmt (&new_body, cond);
13673 		gimple_seq_add_stmt (&new_body, null_glabel);
13674 		gimplify_assign (new_var, null_pointer_node, &new_body);
13675 		gimple_seq_add_stmt (&new_body, opt_arg_ggoto);
13676 		gimple_seq_add_stmt (&new_body, notnull_glabel);
13677 		gimple_seq_add_seq (&new_body, assign_body);
13678 		gimple_seq_add_stmt (&new_body,
13679 				     gimple_build_label (opt_arg_label));
13680 	      }
13681 	    else
13682 	      gimple_seq_add_seq (&new_body, assign_body);
13683 	    break;
13684 	  }
13685       /* Handle GOMP_MAP_FIRSTPRIVATE_{POINTER,REFERENCE} in second pass,
13686 	 so that firstprivate vars holding OMP_CLAUSE_SIZE if needed
13687 	 are already handled.  Similarly OMP_CLAUSE_PRIVATE for VLAs
13688 	 or references to VLAs.  */
13689       for (c = clauses; c; c = OMP_CLAUSE_CHAIN (c))
13690 	switch (OMP_CLAUSE_CODE (c))
13691 	  {
13692 	    tree var;
13693 	  default:
13694 	    break;
13695 	  case OMP_CLAUSE_MAP:
13696 	    if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
13697 		|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13698 	      {
13699 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13700 		poly_int64 offset = 0;
13701 		gcc_assert (prev);
13702 		var = OMP_CLAUSE_DECL (c);
13703 		if (DECL_P (var)
13704 		    && TREE_CODE (TREE_TYPE (var)) == ARRAY_TYPE
13705 		    && is_global_var (maybe_lookup_decl_in_outer_ctx (var,
13706 								      ctx))
13707 		    && varpool_node::get_create (var)->offloadable)
13708 		  break;
13709 		if (TREE_CODE (var) == INDIRECT_REF
13710 		    && TREE_CODE (TREE_OPERAND (var, 0)) == COMPONENT_REF)
13711 		  var = TREE_OPERAND (var, 0);
13712 		if (TREE_CODE (var) == COMPONENT_REF)
13713 		  {
13714 		    var = get_addr_base_and_unit_offset (var, &offset);
13715 		    gcc_assert (var != NULL_TREE && DECL_P (var));
13716 		  }
13717 		else if (DECL_SIZE (var)
13718 			 && TREE_CODE (DECL_SIZE (var)) != INTEGER_CST)
13719 		  {
13720 		    tree var2 = DECL_VALUE_EXPR (var);
13721 		    gcc_assert (TREE_CODE (var2) == INDIRECT_REF);
13722 		    var2 = TREE_OPERAND (var2, 0);
13723 		    gcc_assert (DECL_P (var2));
13724 		    var = var2;
13725 		  }
13726 		tree new_var = lookup_decl (var, ctx), x;
13727 		tree type = TREE_TYPE (new_var);
13728 		bool is_ref;
13729 		if (TREE_CODE (OMP_CLAUSE_DECL (c)) == INDIRECT_REF
13730 		    && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
13731 			== COMPONENT_REF))
13732 		  {
13733 		    type = TREE_TYPE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0));
13734 		    is_ref = true;
13735 		    new_var = build2 (MEM_REF, type,
13736 				      build_fold_addr_expr (new_var),
13737 				      build_int_cst (build_pointer_type (type),
13738 						     offset));
13739 		  }
13740 		else if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPONENT_REF)
13741 		  {
13742 		    type = TREE_TYPE (OMP_CLAUSE_DECL (c));
13743 		    is_ref = TREE_CODE (type) == REFERENCE_TYPE;
13744 		    new_var = build2 (MEM_REF, type,
13745 				      build_fold_addr_expr (new_var),
13746 				      build_int_cst (build_pointer_type (type),
13747 						     offset));
13748 		  }
13749 		else
13750 		  is_ref = omp_privatize_by_reference (var);
13751 		if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE)
13752 		  is_ref = false;
13753 		bool ref_to_array = false;
13754 		if (is_ref)
13755 		  {
13756 		    type = TREE_TYPE (type);
13757 		    if (TREE_CODE (type) == ARRAY_TYPE)
13758 		      {
13759 			type = build_pointer_type (type);
13760 			ref_to_array = true;
13761 		      }
13762 		  }
13763 		else if (TREE_CODE (type) == ARRAY_TYPE)
13764 		  {
13765 		    tree decl2 = DECL_VALUE_EXPR (new_var);
13766 		    gcc_assert (TREE_CODE (decl2) == MEM_REF);
13767 		    decl2 = TREE_OPERAND (decl2, 0);
13768 		    gcc_assert (DECL_P (decl2));
13769 		    new_var = decl2;
13770 		    type = TREE_TYPE (new_var);
13771 		  }
13772 		x = build_receiver_ref (OMP_CLAUSE_DECL (prev), false, ctx);
13773 		x = fold_convert_loc (clause_loc, type, x);
13774 		if (!integer_zerop (OMP_CLAUSE_SIZE (c)))
13775 		  {
13776 		    tree bias = OMP_CLAUSE_SIZE (c);
13777 		    if (DECL_P (bias))
13778 		      bias = lookup_decl (bias, ctx);
13779 		    bias = fold_convert_loc (clause_loc, sizetype, bias);
13780 		    bias = fold_build1_loc (clause_loc, NEGATE_EXPR, sizetype,
13781 					    bias);
13782 		    x = fold_build2_loc (clause_loc, POINTER_PLUS_EXPR,
13783 					 TREE_TYPE (x), x, bias);
13784 		  }
13785 		if (ref_to_array)
13786 		  x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13787 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13788 		if (is_ref && !ref_to_array)
13789 		  {
13790 		    tree t = create_tmp_var_raw (type, get_name (var));
13791 		    gimple_add_tmp_var (t);
13792 		    TREE_ADDRESSABLE (t) = 1;
13793 		    gimple_seq_add_stmt (&new_body,
13794 					 gimple_build_assign (t, x));
13795 		    x = build_fold_addr_expr_loc (clause_loc, t);
13796 		  }
13797 		gimple_seq_add_stmt (&new_body,
13798 				     gimple_build_assign (new_var, x));
13799 		prev = NULL_TREE;
13800 	      }
13801 	    else if (OMP_CLAUSE_CHAIN (c)
13802 		     && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c))
13803 			== OMP_CLAUSE_MAP
13804 		     && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13805 			 == GOMP_MAP_FIRSTPRIVATE_POINTER
13806 			 || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
13807 			     == GOMP_MAP_FIRSTPRIVATE_REFERENCE)))
13808 	      prev = c;
13809 	    break;
13810 	  case OMP_CLAUSE_PRIVATE:
13811 	    var = OMP_CLAUSE_DECL (c);
13812 	    if (is_variable_sized (var))
13813 	      {
13814 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13815 		tree new_var = lookup_decl (var, ctx);
13816 		tree pvar = DECL_VALUE_EXPR (var);
13817 		gcc_assert (TREE_CODE (pvar) == INDIRECT_REF);
13818 		pvar = TREE_OPERAND (pvar, 0);
13819 		gcc_assert (DECL_P (pvar));
13820 		tree new_pvar = lookup_decl (pvar, ctx);
13821 		tree atmp = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13822 		tree al = size_int (DECL_ALIGN (var));
13823 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (new_var));
13824 		x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13825 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_pvar), x);
13826 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13827 		gimple_seq_add_stmt (&new_body,
13828 				     gimple_build_assign (new_pvar, x));
13829 	      }
13830 	    else if (omp_privatize_by_reference (var)
13831 		     && !is_gimple_omp_oacc (ctx->stmt))
13832 	      {
13833 		location_t clause_loc = OMP_CLAUSE_LOCATION (c);
13834 		tree new_var = lookup_decl (var, ctx);
13835 		tree x = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (new_var)));
13836 		if (TREE_CONSTANT (x))
13837 		  break;
13838 		else
13839 		  {
13840 		    tree atmp
13841 		      = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
13842 		    tree rtype = TREE_TYPE (TREE_TYPE (new_var));
13843 		    tree al = size_int (TYPE_ALIGN (rtype));
13844 		    x = build_call_expr_loc (clause_loc, atmp, 2, x, al);
13845 		  }
13846 
13847 		x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
13848 		gimplify_expr (&x, &new_body, NULL, is_gimple_val, fb_rvalue);
13849 		gimple_seq_add_stmt (&new_body,
13850 				     gimple_build_assign (new_var, x));
13851 	      }
13852 	    break;
13853 	  }
13854 
13855       gimple_seq fork_seq = NULL;
13856       gimple_seq join_seq = NULL;
13857 
13858       if (offloaded && is_gimple_omp_oacc (ctx->stmt))
13859 	{
13860 	  /* If there are reductions on the offloaded region itself, treat
13861 	     them as a dummy GANG loop.  */
13862 	  tree level = build_int_cst (integer_type_node, GOMP_DIM_GANG);
13863 
13864 	  gcall *private_marker = lower_oacc_private_marker (ctx);
13865 
13866 	  if (private_marker)
13867 	    gimple_call_set_arg (private_marker, 2, level);
13868 
13869 	  lower_oacc_reductions (gimple_location (ctx->stmt), clauses, level,
13870 				 false, NULL, private_marker, NULL, &fork_seq,
13871 				 &join_seq, ctx);
13872 	}
13873 
13874       gimple_seq_add_seq (&new_body, fork_seq);
13875       gimple_seq_add_seq (&new_body, tgt_body);
13876       gimple_seq_add_seq (&new_body, join_seq);
13877 
13878       if (offloaded)
13879 	{
13880 	  new_body = maybe_catch_exception (new_body);
13881 	  gimple_seq_add_stmt (&new_body, gimple_build_omp_return (false));
13882 	}
13883       gimple_omp_set_body (stmt, new_body);
13884     }
13885 
13886   bind = gimple_build_bind (NULL, NULL,
13887 			    tgt_bind ? gimple_bind_block (tgt_bind)
13888 				     : NULL_TREE);
13889   gsi_replace (gsi_p, dep_bind ? dep_bind : bind, true);
13890   gimple_bind_add_seq (bind, ilist);
13891   gimple_bind_add_stmt (bind, stmt);
13892   gimple_bind_add_seq (bind, olist);
13893 
13894   pop_gimplify_context (NULL);
13895 
13896   if (dep_bind)
13897     {
13898       gimple_bind_add_seq (dep_bind, dep_ilist);
13899       gimple_bind_add_stmt (dep_bind, bind);
13900       gimple_bind_add_seq (dep_bind, dep_olist);
13901       pop_gimplify_context (dep_bind);
13902     }
13903 }
13904 
13905 /* Expand code for an OpenMP teams directive.  */
13906 
13907 static void
lower_omp_teams(gimple_stmt_iterator * gsi_p,omp_context * ctx)13908 lower_omp_teams (gimple_stmt_iterator *gsi_p, omp_context *ctx)
13909 {
13910   gomp_teams *teams_stmt = as_a <gomp_teams *> (gsi_stmt (*gsi_p));
13911   push_gimplify_context ();
13912 
13913   tree block = make_node (BLOCK);
13914   gbind *bind = gimple_build_bind (NULL, NULL, block);
13915   gsi_replace (gsi_p, bind, true);
13916   gimple_seq bind_body = NULL;
13917   gimple_seq dlist = NULL;
13918   gimple_seq olist = NULL;
13919 
13920   tree num_teams = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13921 				    OMP_CLAUSE_NUM_TEAMS);
13922   tree num_teams_lower = NULL_TREE;
13923   if (num_teams == NULL_TREE)
13924     num_teams = build_int_cst (unsigned_type_node, 0);
13925   else
13926     {
13927       num_teams_lower = OMP_CLAUSE_NUM_TEAMS_LOWER_EXPR (num_teams);
13928       if (num_teams_lower)
13929 	{
13930 	  num_teams_lower = fold_convert (unsigned_type_node, num_teams_lower);
13931 	  gimplify_expr (&num_teams_lower, &bind_body, NULL, is_gimple_val,
13932 			 fb_rvalue);
13933 	}
13934       num_teams = OMP_CLAUSE_NUM_TEAMS_UPPER_EXPR (num_teams);
13935       num_teams = fold_convert (unsigned_type_node, num_teams);
13936       gimplify_expr (&num_teams, &bind_body, NULL, is_gimple_val, fb_rvalue);
13937     }
13938   if (num_teams_lower == NULL_TREE)
13939     num_teams_lower = num_teams;
13940   tree thread_limit = omp_find_clause (gimple_omp_teams_clauses (teams_stmt),
13941 				       OMP_CLAUSE_THREAD_LIMIT);
13942   if (thread_limit == NULL_TREE)
13943     thread_limit = build_int_cst (unsigned_type_node, 0);
13944   else
13945     {
13946       thread_limit = OMP_CLAUSE_THREAD_LIMIT_EXPR (thread_limit);
13947       thread_limit = fold_convert (unsigned_type_node, thread_limit);
13948       gimplify_expr (&thread_limit, &bind_body, NULL, is_gimple_val,
13949 		     fb_rvalue);
13950     }
13951   location_t loc = gimple_location (teams_stmt);
13952   tree decl = builtin_decl_explicit (BUILT_IN_GOMP_TEAMS4);
13953   tree rettype = TREE_TYPE (TREE_TYPE (decl));
13954   tree first = create_tmp_var (rettype);
13955   gimple_seq_add_stmt (&bind_body,
13956 		       gimple_build_assign (first, build_one_cst (rettype)));
13957   tree llabel = create_artificial_label (loc);
13958   gimple_seq_add_stmt (&bind_body, gimple_build_label (llabel));
13959   gimple *call
13960     = gimple_build_call (decl, 4, num_teams_lower, num_teams, thread_limit,
13961 			 first);
13962   gimple_set_location (call, loc);
13963   tree temp = create_tmp_var (rettype);
13964   gimple_call_set_lhs (call, temp);
13965   gimple_seq_add_stmt (&bind_body, call);
13966 
13967   tree tlabel = create_artificial_label (loc);
13968   tree flabel = create_artificial_label (loc);
13969   gimple *cond = gimple_build_cond (NE_EXPR, temp, build_zero_cst (rettype),
13970 				    tlabel, flabel);
13971   gimple_seq_add_stmt (&bind_body, cond);
13972   gimple_seq_add_stmt (&bind_body, gimple_build_label (tlabel));
13973   gimple_seq_add_stmt (&bind_body,
13974 		       gimple_build_assign (first, build_zero_cst (rettype)));
13975 
13976   lower_rec_input_clauses (gimple_omp_teams_clauses (teams_stmt),
13977 			   &bind_body, &dlist, ctx, NULL);
13978   lower_omp (gimple_omp_body_ptr (teams_stmt), ctx);
13979   lower_reduction_clauses (gimple_omp_teams_clauses (teams_stmt), &olist,
13980 			   NULL, ctx);
13981   gimple_seq_add_stmt (&bind_body, teams_stmt);
13982 
13983   gimple_seq_add_seq (&bind_body, gimple_omp_body (teams_stmt));
13984   gimple_omp_set_body (teams_stmt, NULL);
13985   gimple_seq_add_seq (&bind_body, olist);
13986   gimple_seq_add_seq (&bind_body, dlist);
13987   gimple_seq_add_stmt (&bind_body, gimple_build_omp_return (true));
13988   gimple_seq_add_stmt (&bind_body, gimple_build_goto (llabel));
13989   gimple_seq_add_stmt (&bind_body, gimple_build_label (flabel));
13990   gimple_bind_set_body (bind, bind_body);
13991 
13992   pop_gimplify_context (bind);
13993 
13994   gimple_bind_append_vars (bind, ctx->block_vars);
13995   BLOCK_VARS (block) = ctx->block_vars;
13996   if (BLOCK_VARS (block))
13997     TREE_USED (block) = 1;
13998 }
13999 
14000 /* Callback for lower_omp_1.  Return non-NULL if *tp needs to be
14001    regimplified.  If DATA is non-NULL, lower_omp_1 is outside
14002    of OMP context, but with task_shared_vars set.  */
14003 
14004 static tree
lower_omp_regimplify_p(tree * tp,int * walk_subtrees,void * data)14005 lower_omp_regimplify_p (tree *tp, int *walk_subtrees,
14006     			void *data)
14007 {
14008   tree t = *tp;
14009 
14010   /* Any variable with DECL_VALUE_EXPR needs to be regimplified.  */
14011   if ((VAR_P (t) || TREE_CODE (t) == PARM_DECL || TREE_CODE (t) == RESULT_DECL)
14012       && data == NULL
14013       && DECL_HAS_VALUE_EXPR_P (t))
14014     return t;
14015 
14016   if (task_shared_vars
14017       && DECL_P (t)
14018       && bitmap_bit_p (task_shared_vars, DECL_UID (t)))
14019     return t;
14020 
14021   /* If a global variable has been privatized, TREE_CONSTANT on
14022      ADDR_EXPR might be wrong.  */
14023   if (data == NULL && TREE_CODE (t) == ADDR_EXPR)
14024     recompute_tree_invariant_for_addr_expr (t);
14025 
14026   *walk_subtrees = !IS_TYPE_OR_DECL_P (t);
14027   return NULL_TREE;
14028 }
14029 
14030 /* Data to be communicated between lower_omp_regimplify_operands and
14031    lower_omp_regimplify_operands_p.  */
14032 
14033 struct lower_omp_regimplify_operands_data
14034 {
14035   omp_context *ctx;
14036   vec<tree> *decls;
14037 };
14038 
14039 /* Helper function for lower_omp_regimplify_operands.  Find
14040    omp_member_access_dummy_var vars and adjust temporarily their
14041    DECL_VALUE_EXPRs if needed.  */
14042 
14043 static tree
lower_omp_regimplify_operands_p(tree * tp,int * walk_subtrees,void * data)14044 lower_omp_regimplify_operands_p (tree *tp, int *walk_subtrees,
14045 				 void *data)
14046 {
14047   tree t = omp_member_access_dummy_var (*tp);
14048   if (t)
14049     {
14050       struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
14051       lower_omp_regimplify_operands_data *ldata
14052 	= (lower_omp_regimplify_operands_data *) wi->info;
14053       tree o = maybe_lookup_decl (t, ldata->ctx);
14054       if (o != t)
14055 	{
14056 	  ldata->decls->safe_push (DECL_VALUE_EXPR (*tp));
14057 	  ldata->decls->safe_push (*tp);
14058 	  tree v = unshare_and_remap (DECL_VALUE_EXPR (*tp), t, o);
14059 	  SET_DECL_VALUE_EXPR (*tp, v);
14060 	}
14061     }
14062   *walk_subtrees = !IS_TYPE_OR_DECL_P (*tp);
14063   return NULL_TREE;
14064 }
14065 
14066 /* Wrapper around gimple_regimplify_operands that adjusts DECL_VALUE_EXPRs
14067    of omp_member_access_dummy_var vars during regimplification.  */
14068 
14069 static void
lower_omp_regimplify_operands(omp_context * ctx,gimple * stmt,gimple_stmt_iterator * gsi_p)14070 lower_omp_regimplify_operands (omp_context *ctx, gimple *stmt,
14071 			       gimple_stmt_iterator *gsi_p)
14072 {
14073   auto_vec<tree, 10> decls;
14074   if (ctx)
14075     {
14076       struct walk_stmt_info wi;
14077       memset (&wi, '\0', sizeof (wi));
14078       struct lower_omp_regimplify_operands_data data;
14079       data.ctx = ctx;
14080       data.decls = &decls;
14081       wi.info = &data;
14082       walk_gimple_op (stmt, lower_omp_regimplify_operands_p, &wi);
14083     }
14084   gimple_regimplify_operands (stmt, gsi_p);
14085   while (!decls.is_empty ())
14086     {
14087       tree t = decls.pop ();
14088       tree v = decls.pop ();
14089       SET_DECL_VALUE_EXPR (t, v);
14090     }
14091 }
14092 
14093 static void
lower_omp_1(gimple_stmt_iterator * gsi_p,omp_context * ctx)14094 lower_omp_1 (gimple_stmt_iterator *gsi_p, omp_context *ctx)
14095 {
14096   gimple *stmt = gsi_stmt (*gsi_p);
14097   struct walk_stmt_info wi;
14098   gcall *call_stmt;
14099 
14100   if (gimple_has_location (stmt))
14101     input_location = gimple_location (stmt);
14102 
14103   if (task_shared_vars)
14104     memset (&wi, '\0', sizeof (wi));
14105 
14106   /* If we have issued syntax errors, avoid doing any heavy lifting.
14107      Just replace the OMP directives with a NOP to avoid
14108      confusing RTL expansion.  */
14109   if (seen_error () && is_gimple_omp (stmt))
14110     {
14111       gsi_replace (gsi_p, gimple_build_nop (), true);
14112       return;
14113     }
14114 
14115   switch (gimple_code (stmt))
14116     {
14117     case GIMPLE_COND:
14118       {
14119 	gcond *cond_stmt = as_a <gcond *> (stmt);
14120 	if ((ctx || task_shared_vars)
14121 	    && (walk_tree (gimple_cond_lhs_ptr (cond_stmt),
14122 			   lower_omp_regimplify_p,
14123 			   ctx ? NULL : &wi, NULL)
14124 		|| walk_tree (gimple_cond_rhs_ptr (cond_stmt),
14125 			      lower_omp_regimplify_p,
14126 			      ctx ? NULL : &wi, NULL)))
14127 	  lower_omp_regimplify_operands (ctx, cond_stmt, gsi_p);
14128       }
14129       break;
14130     case GIMPLE_CATCH:
14131       lower_omp (gimple_catch_handler_ptr (as_a <gcatch *> (stmt)), ctx);
14132       break;
14133     case GIMPLE_EH_FILTER:
14134       lower_omp (gimple_eh_filter_failure_ptr (stmt), ctx);
14135       break;
14136     case GIMPLE_TRY:
14137       lower_omp (gimple_try_eval_ptr (stmt), ctx);
14138       lower_omp (gimple_try_cleanup_ptr (stmt), ctx);
14139       break;
14140     case GIMPLE_TRANSACTION:
14141       lower_omp (gimple_transaction_body_ptr (as_a <gtransaction *> (stmt)),
14142 		 ctx);
14143       break;
14144     case GIMPLE_BIND:
14145       if (ctx && is_gimple_omp_oacc (ctx->stmt))
14146 	{
14147 	  tree vars = gimple_bind_vars (as_a <gbind *> (stmt));
14148 	  oacc_privatization_scan_decl_chain (ctx, vars);
14149 	}
14150       lower_omp (gimple_bind_body_ptr (as_a <gbind *> (stmt)), ctx);
14151       maybe_remove_omp_member_access_dummy_vars (as_a <gbind *> (stmt));
14152       break;
14153     case GIMPLE_OMP_PARALLEL:
14154     case GIMPLE_OMP_TASK:
14155       ctx = maybe_lookup_ctx (stmt);
14156       gcc_assert (ctx);
14157       if (ctx->cancellable)
14158 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14159       lower_omp_taskreg (gsi_p, ctx);
14160       break;
14161     case GIMPLE_OMP_FOR:
14162       ctx = maybe_lookup_ctx (stmt);
14163       gcc_assert (ctx);
14164       if (ctx->cancellable)
14165 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14166       lower_omp_for (gsi_p, ctx);
14167       break;
14168     case GIMPLE_OMP_SECTIONS:
14169       ctx = maybe_lookup_ctx (stmt);
14170       gcc_assert (ctx);
14171       if (ctx->cancellable)
14172 	ctx->cancel_label = create_artificial_label (UNKNOWN_LOCATION);
14173       lower_omp_sections (gsi_p, ctx);
14174       break;
14175     case GIMPLE_OMP_SCOPE:
14176       ctx = maybe_lookup_ctx (stmt);
14177       gcc_assert (ctx);
14178       lower_omp_scope (gsi_p, ctx);
14179       break;
14180     case GIMPLE_OMP_SINGLE:
14181       ctx = maybe_lookup_ctx (stmt);
14182       gcc_assert (ctx);
14183       lower_omp_single (gsi_p, ctx);
14184       break;
14185     case GIMPLE_OMP_MASTER:
14186     case GIMPLE_OMP_MASKED:
14187       ctx = maybe_lookup_ctx (stmt);
14188       gcc_assert (ctx);
14189       lower_omp_master (gsi_p, ctx);
14190       break;
14191     case GIMPLE_OMP_TASKGROUP:
14192       ctx = maybe_lookup_ctx (stmt);
14193       gcc_assert (ctx);
14194       lower_omp_taskgroup (gsi_p, ctx);
14195       break;
14196     case GIMPLE_OMP_ORDERED:
14197       ctx = maybe_lookup_ctx (stmt);
14198       gcc_assert (ctx);
14199       lower_omp_ordered (gsi_p, ctx);
14200       break;
14201     case GIMPLE_OMP_SCAN:
14202       ctx = maybe_lookup_ctx (stmt);
14203       gcc_assert (ctx);
14204       lower_omp_scan (gsi_p, ctx);
14205       break;
14206     case GIMPLE_OMP_CRITICAL:
14207       ctx = maybe_lookup_ctx (stmt);
14208       gcc_assert (ctx);
14209       lower_omp_critical (gsi_p, ctx);
14210       break;
14211     case GIMPLE_OMP_ATOMIC_LOAD:
14212       if ((ctx || task_shared_vars)
14213 	  && walk_tree (gimple_omp_atomic_load_rhs_ptr (
14214 			  as_a <gomp_atomic_load *> (stmt)),
14215 			lower_omp_regimplify_p, ctx ? NULL : &wi, NULL))
14216 	lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14217       break;
14218     case GIMPLE_OMP_TARGET:
14219       ctx = maybe_lookup_ctx (stmt);
14220       gcc_assert (ctx);
14221       lower_omp_target (gsi_p, ctx);
14222       break;
14223     case GIMPLE_OMP_TEAMS:
14224       ctx = maybe_lookup_ctx (stmt);
14225       gcc_assert (ctx);
14226       if (gimple_omp_teams_host (as_a <gomp_teams *> (stmt)))
14227 	lower_omp_taskreg (gsi_p, ctx);
14228       else
14229 	lower_omp_teams (gsi_p, ctx);
14230       break;
14231     case GIMPLE_CALL:
14232       tree fndecl;
14233       call_stmt = as_a <gcall *> (stmt);
14234       fndecl = gimple_call_fndecl (call_stmt);
14235       if (fndecl
14236 	  && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
14237 	switch (DECL_FUNCTION_CODE (fndecl))
14238 	  {
14239 	  case BUILT_IN_GOMP_BARRIER:
14240 	    if (ctx == NULL)
14241 	      break;
14242 	    /* FALLTHRU */
14243 	  case BUILT_IN_GOMP_CANCEL:
14244 	  case BUILT_IN_GOMP_CANCELLATION_POINT:
14245 	    omp_context *cctx;
14246 	    cctx = ctx;
14247 	    if (gimple_code (cctx->stmt) == GIMPLE_OMP_SECTION)
14248 	      cctx = cctx->outer;
14249 	    gcc_assert (gimple_call_lhs (call_stmt) == NULL_TREE);
14250 	    if (!cctx->cancellable)
14251 	      {
14252 		if (DECL_FUNCTION_CODE (fndecl)
14253 		    == BUILT_IN_GOMP_CANCELLATION_POINT)
14254 		  {
14255 		    stmt = gimple_build_nop ();
14256 		    gsi_replace (gsi_p, stmt, false);
14257 		  }
14258 		break;
14259 	      }
14260 	    if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_GOMP_BARRIER)
14261 	      {
14262 		fndecl = builtin_decl_explicit (BUILT_IN_GOMP_BARRIER_CANCEL);
14263 		gimple_call_set_fndecl (call_stmt, fndecl);
14264 		gimple_call_set_fntype (call_stmt, TREE_TYPE (fndecl));
14265 	      }
14266 	    tree lhs;
14267 	    lhs = create_tmp_var (TREE_TYPE (TREE_TYPE (fndecl)));
14268 	    gimple_call_set_lhs (call_stmt, lhs);
14269 	    tree fallthru_label;
14270 	    fallthru_label = create_artificial_label (UNKNOWN_LOCATION);
14271 	    gimple *g;
14272 	    g = gimple_build_label (fallthru_label);
14273 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14274 	    g = gimple_build_cond (NE_EXPR, lhs,
14275 				   fold_convert (TREE_TYPE (lhs),
14276 						 boolean_false_node),
14277 				   cctx->cancel_label, fallthru_label);
14278 	    gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14279 	    break;
14280 	  default:
14281 	    break;
14282 	  }
14283       goto regimplify;
14284 
14285     case GIMPLE_ASSIGN:
14286       for (omp_context *up = ctx; up; up = up->outer)
14287 	{
14288 	  if (gimple_code (up->stmt) == GIMPLE_OMP_ORDERED
14289 	      || gimple_code (up->stmt) == GIMPLE_OMP_CRITICAL
14290 	      || gimple_code (up->stmt) == GIMPLE_OMP_TASKGROUP
14291 	      || gimple_code (up->stmt) == GIMPLE_OMP_SCOPE
14292 	      || gimple_code (up->stmt) == GIMPLE_OMP_SECTION
14293 	      || gimple_code (up->stmt) == GIMPLE_OMP_SCAN
14294 	      || (gimple_code (up->stmt) == GIMPLE_OMP_TARGET
14295 		  && (gimple_omp_target_kind (up->stmt)
14296 		      == GF_OMP_TARGET_KIND_DATA)))
14297 	    continue;
14298 	  else if (!up->lastprivate_conditional_map)
14299 	    break;
14300 	  tree lhs = get_base_address (gimple_assign_lhs (stmt));
14301 	  if (TREE_CODE (lhs) == MEM_REF
14302 	      && DECL_P (TREE_OPERAND (lhs, 0))
14303 	      && TREE_CODE (TREE_TYPE (TREE_OPERAND (lhs,
14304 						     0))) == REFERENCE_TYPE)
14305 	    lhs = TREE_OPERAND (lhs, 0);
14306 	  if (DECL_P (lhs))
14307 	    if (tree *v = up->lastprivate_conditional_map->get (lhs))
14308 	      {
14309 		tree clauses;
14310 		if (up->combined_into_simd_safelen1)
14311 		  {
14312 		    up = up->outer;
14313 		    if (gimple_code (up->stmt) == GIMPLE_OMP_SCAN)
14314 		      up = up->outer;
14315 		  }
14316 		if (gimple_code (up->stmt) == GIMPLE_OMP_FOR)
14317 		  clauses = gimple_omp_for_clauses (up->stmt);
14318 		else
14319 		  clauses = gimple_omp_sections_clauses (up->stmt);
14320 		tree c = omp_find_clause (clauses, OMP_CLAUSE__CONDTEMP_);
14321 		if (!OMP_CLAUSE__CONDTEMP__ITER (c))
14322 		  c = omp_find_clause (OMP_CLAUSE_CHAIN (c),
14323 				       OMP_CLAUSE__CONDTEMP_);
14324 		gcc_assert (OMP_CLAUSE__CONDTEMP__ITER (c));
14325 		gimple *g = gimple_build_assign (*v, OMP_CLAUSE_DECL (c));
14326 		gsi_insert_after (gsi_p, g, GSI_SAME_STMT);
14327 	      }
14328 	}
14329       /* FALLTHRU */
14330 
14331     default:
14332     regimplify:
14333       if ((ctx || task_shared_vars)
14334 	  && walk_gimple_op (stmt, lower_omp_regimplify_p,
14335 			     ctx ? NULL : &wi))
14336 	{
14337 	  /* Just remove clobbers, this should happen only if we have
14338 	     "privatized" local addressable variables in SIMD regions,
14339 	     the clobber isn't needed in that case and gimplifying address
14340 	     of the ARRAY_REF into a pointer and creating MEM_REF based
14341 	     clobber would create worse code than we get with the clobber
14342 	     dropped.  */
14343 	  if (gimple_clobber_p (stmt))
14344 	    {
14345 	      gsi_replace (gsi_p, gimple_build_nop (), true);
14346 	      break;
14347 	    }
14348 	  lower_omp_regimplify_operands (ctx, stmt, gsi_p);
14349 	}
14350       break;
14351     }
14352 }
14353 
14354 static void
lower_omp(gimple_seq * body,omp_context * ctx)14355 lower_omp (gimple_seq *body, omp_context *ctx)
14356 {
14357   location_t saved_location = input_location;
14358   gimple_stmt_iterator gsi;
14359   for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14360     lower_omp_1 (&gsi, ctx);
14361   /* During gimplification, we haven't folded statments inside offloading
14362      or taskreg regions (gimplify.c:maybe_fold_stmt); do that now.  */
14363   if (target_nesting_level || taskreg_nesting_level)
14364     for (gsi = gsi_start (*body); !gsi_end_p (gsi); gsi_next (&gsi))
14365       fold_stmt (&gsi);
14366   input_location = saved_location;
14367 }
14368 
14369 /* Main entry point.  */
14370 
14371 static unsigned int
execute_lower_omp(void)14372 execute_lower_omp (void)
14373 {
14374   gimple_seq body;
14375   int i;
14376   omp_context *ctx;
14377 
14378   /* This pass always runs, to provide PROP_gimple_lomp.
14379      But often, there is nothing to do.  */
14380   if (flag_openacc == 0 && flag_openmp == 0
14381       && flag_openmp_simd == 0)
14382     return 0;
14383 
14384   all_contexts = splay_tree_new (splay_tree_compare_pointers, 0,
14385 				 delete_omp_context);
14386 
14387   body = gimple_body (current_function_decl);
14388 
14389   scan_omp (&body, NULL);
14390   gcc_assert (taskreg_nesting_level == 0);
14391   FOR_EACH_VEC_ELT (taskreg_contexts, i, ctx)
14392     finish_taskreg_scan (ctx);
14393   taskreg_contexts.release ();
14394 
14395   if (all_contexts->root)
14396     {
14397       if (task_shared_vars)
14398 	push_gimplify_context ();
14399       lower_omp (&body, NULL);
14400       if (task_shared_vars)
14401 	pop_gimplify_context (NULL);
14402     }
14403 
14404   if (all_contexts)
14405     {
14406       splay_tree_delete (all_contexts);
14407       all_contexts = NULL;
14408     }
14409   BITMAP_FREE (task_shared_vars);
14410   BITMAP_FREE (global_nonaddressable_vars);
14411 
14412   /* If current function is a method, remove artificial dummy VAR_DECL created
14413      for non-static data member privatization, they aren't needed for
14414      debuginfo nor anything else, have been already replaced everywhere in the
14415      IL and cause problems with LTO.  */
14416   if (DECL_ARGUMENTS (current_function_decl)
14417       && DECL_ARTIFICIAL (DECL_ARGUMENTS (current_function_decl))
14418       && (TREE_CODE (TREE_TYPE (DECL_ARGUMENTS (current_function_decl)))
14419 	  == POINTER_TYPE))
14420     remove_member_access_dummy_vars (DECL_INITIAL (current_function_decl));
14421   return 0;
14422 }
14423 
14424 namespace {
14425 
14426 const pass_data pass_data_lower_omp =
14427 {
14428   GIMPLE_PASS, /* type */
14429   "omplower", /* name */
14430   OPTGROUP_OMP, /* optinfo_flags */
14431   TV_NONE, /* tv_id */
14432   PROP_gimple_any, /* properties_required */
14433   PROP_gimple_lomp | PROP_gimple_lomp_dev, /* properties_provided */
14434   0, /* properties_destroyed */
14435   0, /* todo_flags_start */
14436   0, /* todo_flags_finish */
14437 };
14438 
14439 class pass_lower_omp : public gimple_opt_pass
14440 {
14441 public:
pass_lower_omp(gcc::context * ctxt)14442   pass_lower_omp (gcc::context *ctxt)
14443     : gimple_opt_pass (pass_data_lower_omp, ctxt)
14444   {}
14445 
14446   /* opt_pass methods: */
execute(function *)14447   virtual unsigned int execute (function *) { return execute_lower_omp (); }
14448 
14449 }; // class pass_lower_omp
14450 
14451 } // anon namespace
14452 
14453 gimple_opt_pass *
make_pass_lower_omp(gcc::context * ctxt)14454 make_pass_lower_omp (gcc::context *ctxt)
14455 {
14456   return new pass_lower_omp (ctxt);
14457 }
14458 
14459 /* The following is a utility to diagnose structured block violations.
14460    It is not part of the "omplower" pass, as that's invoked too late.  It
14461    should be invoked by the respective front ends after gimplification.  */
14462 
14463 static splay_tree all_labels;
14464 
14465 /* Check for mismatched contexts and generate an error if needed.  Return
14466    true if an error is detected.  */
14467 
14468 static bool
diagnose_sb_0(gimple_stmt_iterator * gsi_p,gimple * branch_ctx,gimple * label_ctx)14469 diagnose_sb_0 (gimple_stmt_iterator *gsi_p,
14470 	       gimple *branch_ctx, gimple *label_ctx)
14471 {
14472   gcc_checking_assert (!branch_ctx || is_gimple_omp (branch_ctx));
14473   gcc_checking_assert (!label_ctx || is_gimple_omp (label_ctx));
14474 
14475   if (label_ctx == branch_ctx)
14476     return false;
14477 
14478   const char* kind = NULL;
14479 
14480   if (flag_openacc)
14481     {
14482       if ((branch_ctx && is_gimple_omp_oacc (branch_ctx))
14483 	  || (label_ctx && is_gimple_omp_oacc (label_ctx)))
14484 	{
14485 	  gcc_checking_assert (kind == NULL);
14486 	  kind = "OpenACC";
14487 	}
14488     }
14489   if (kind == NULL)
14490     {
14491       gcc_checking_assert (flag_openmp || flag_openmp_simd);
14492       kind = "OpenMP";
14493     }
14494 
14495   /* Previously we kept track of the label's entire context in diagnose_sb_[12]
14496      so we could traverse it and issue a correct "exit" or "enter" error
14497      message upon a structured block violation.
14498 
14499      We built the context by building a list with tree_cons'ing, but there is
14500      no easy counterpart in gimple tuples.  It seems like far too much work
14501      for issuing exit/enter error messages.  If someone really misses the
14502      distinct error message... patches welcome.  */
14503 
14504 #if 0
14505   /* Try to avoid confusing the user by producing and error message
14506      with correct "exit" or "enter" verbiage.  We prefer "exit"
14507      unless we can show that LABEL_CTX is nested within BRANCH_CTX.  */
14508   if (branch_ctx == NULL)
14509     exit_p = false;
14510   else
14511     {
14512       while (label_ctx)
14513 	{
14514 	  if (TREE_VALUE (label_ctx) == branch_ctx)
14515 	    {
14516 	      exit_p = false;
14517 	      break;
14518 	    }
14519 	  label_ctx = TREE_CHAIN (label_ctx);
14520 	}
14521     }
14522 
14523   if (exit_p)
14524     error ("invalid exit from %s structured block", kind);
14525   else
14526     error ("invalid entry to %s structured block", kind);
14527 #endif
14528 
14529   /* If it's obvious we have an invalid entry, be specific about the error.  */
14530   if (branch_ctx == NULL)
14531     error ("invalid entry to %s structured block", kind);
14532   else
14533     {
14534       /* Otherwise, be vague and lazy, but efficient.  */
14535       error ("invalid branch to/from %s structured block", kind);
14536     }
14537 
14538   gsi_replace (gsi_p, gimple_build_nop (), false);
14539   return true;
14540 }
14541 
14542 /* Pass 1: Create a minimal tree of structured blocks, and record
14543    where each label is found.  */
14544 
14545 static tree
diagnose_sb_1(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)14546 diagnose_sb_1 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14547     	       struct walk_stmt_info *wi)
14548 {
14549   gimple *context = (gimple *) wi->info;
14550   gimple *inner_context;
14551   gimple *stmt = gsi_stmt (*gsi_p);
14552 
14553   *handled_ops_p = true;
14554 
14555   switch (gimple_code (stmt))
14556     {
14557     WALK_SUBSTMTS;
14558 
14559     case GIMPLE_OMP_PARALLEL:
14560     case GIMPLE_OMP_TASK:
14561     case GIMPLE_OMP_SCOPE:
14562     case GIMPLE_OMP_SECTIONS:
14563     case GIMPLE_OMP_SINGLE:
14564     case GIMPLE_OMP_SECTION:
14565     case GIMPLE_OMP_MASTER:
14566     case GIMPLE_OMP_MASKED:
14567     case GIMPLE_OMP_ORDERED:
14568     case GIMPLE_OMP_SCAN:
14569     case GIMPLE_OMP_CRITICAL:
14570     case GIMPLE_OMP_TARGET:
14571     case GIMPLE_OMP_TEAMS:
14572     case GIMPLE_OMP_TASKGROUP:
14573       /* The minimal context here is just the current OMP construct.  */
14574       inner_context = stmt;
14575       wi->info = inner_context;
14576       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14577       wi->info = context;
14578       break;
14579 
14580     case GIMPLE_OMP_FOR:
14581       inner_context = stmt;
14582       wi->info = inner_context;
14583       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14584 	 walk them.  */
14585       walk_gimple_seq (gimple_omp_for_pre_body (stmt),
14586 	  	       diagnose_sb_1, NULL, wi);
14587       walk_gimple_seq (gimple_omp_body (stmt), diagnose_sb_1, NULL, wi);
14588       wi->info = context;
14589       break;
14590 
14591     case GIMPLE_LABEL:
14592       splay_tree_insert (all_labels,
14593 			 (splay_tree_key) gimple_label_label (
14594 					    as_a <glabel *> (stmt)),
14595 			 (splay_tree_value) context);
14596       break;
14597 
14598     default:
14599       break;
14600     }
14601 
14602   return NULL_TREE;
14603 }
14604 
14605 /* Pass 2: Check each branch and see if its context differs from that of
14606    the destination label's context.  */
14607 
14608 static tree
diagnose_sb_2(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)14609 diagnose_sb_2 (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
14610     	       struct walk_stmt_info *wi)
14611 {
14612   gimple *context = (gimple *) wi->info;
14613   splay_tree_node n;
14614   gimple *stmt = gsi_stmt (*gsi_p);
14615 
14616   *handled_ops_p = true;
14617 
14618   switch (gimple_code (stmt))
14619     {
14620     WALK_SUBSTMTS;
14621 
14622     case GIMPLE_OMP_PARALLEL:
14623     case GIMPLE_OMP_TASK:
14624     case GIMPLE_OMP_SCOPE:
14625     case GIMPLE_OMP_SECTIONS:
14626     case GIMPLE_OMP_SINGLE:
14627     case GIMPLE_OMP_SECTION:
14628     case GIMPLE_OMP_MASTER:
14629     case GIMPLE_OMP_MASKED:
14630     case GIMPLE_OMP_ORDERED:
14631     case GIMPLE_OMP_SCAN:
14632     case GIMPLE_OMP_CRITICAL:
14633     case GIMPLE_OMP_TARGET:
14634     case GIMPLE_OMP_TEAMS:
14635     case GIMPLE_OMP_TASKGROUP:
14636       wi->info = stmt;
14637       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14638       wi->info = context;
14639       break;
14640 
14641     case GIMPLE_OMP_FOR:
14642       wi->info = stmt;
14643       /* gimple_omp_for_{index,initial,final} are all DECLs; no need to
14644 	 walk them.  */
14645       walk_gimple_seq_mod (gimple_omp_for_pre_body_ptr (stmt),
14646 			   diagnose_sb_2, NULL, wi);
14647       walk_gimple_seq_mod (gimple_omp_body_ptr (stmt), diagnose_sb_2, NULL, wi);
14648       wi->info = context;
14649       break;
14650 
14651     case GIMPLE_COND:
14652 	{
14653 	  gcond *cond_stmt = as_a <gcond *> (stmt);
14654 	  tree lab = gimple_cond_true_label (cond_stmt);
14655 	  if (lab)
14656 	    {
14657 	      n = splay_tree_lookup (all_labels,
14658 				     (splay_tree_key) lab);
14659 	      diagnose_sb_0 (gsi_p, context,
14660 			     n ? (gimple *) n->value : NULL);
14661 	    }
14662 	  lab = gimple_cond_false_label (cond_stmt);
14663 	  if (lab)
14664 	    {
14665 	      n = splay_tree_lookup (all_labels,
14666 				     (splay_tree_key) lab);
14667 	      diagnose_sb_0 (gsi_p, context,
14668 			     n ? (gimple *) n->value : NULL);
14669 	    }
14670 	}
14671       break;
14672 
14673     case GIMPLE_GOTO:
14674       {
14675 	tree lab = gimple_goto_dest (stmt);
14676 	if (TREE_CODE (lab) != LABEL_DECL)
14677 	  break;
14678 
14679 	n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14680 	diagnose_sb_0 (gsi_p, context, n ? (gimple *) n->value : NULL);
14681       }
14682       break;
14683 
14684     case GIMPLE_SWITCH:
14685       {
14686 	gswitch *switch_stmt = as_a <gswitch *> (stmt);
14687 	unsigned int i;
14688 	for (i = 0; i < gimple_switch_num_labels (switch_stmt); ++i)
14689 	  {
14690 	    tree lab = CASE_LABEL (gimple_switch_label (switch_stmt, i));
14691 	    n = splay_tree_lookup (all_labels, (splay_tree_key) lab);
14692 	    if (n && diagnose_sb_0 (gsi_p, context, (gimple *) n->value))
14693 	      break;
14694 	  }
14695       }
14696       break;
14697 
14698     case GIMPLE_RETURN:
14699       diagnose_sb_0 (gsi_p, context, NULL);
14700       break;
14701 
14702     default:
14703       break;
14704     }
14705 
14706   return NULL_TREE;
14707 }
14708 
14709 static unsigned int
diagnose_omp_structured_block_errors(void)14710 diagnose_omp_structured_block_errors (void)
14711 {
14712   struct walk_stmt_info wi;
14713   gimple_seq body = gimple_body (current_function_decl);
14714 
14715   all_labels = splay_tree_new (splay_tree_compare_pointers, 0, 0);
14716 
14717   memset (&wi, 0, sizeof (wi));
14718   walk_gimple_seq (body, diagnose_sb_1, NULL, &wi);
14719 
14720   memset (&wi, 0, sizeof (wi));
14721   wi.want_locations = true;
14722   walk_gimple_seq_mod (&body, diagnose_sb_2, NULL, &wi);
14723 
14724   gimple_set_body (current_function_decl, body);
14725 
14726   splay_tree_delete (all_labels);
14727   all_labels = NULL;
14728 
14729   return 0;
14730 }
14731 
14732 namespace {
14733 
14734 const pass_data pass_data_diagnose_omp_blocks =
14735 {
14736   GIMPLE_PASS, /* type */
14737   "*diagnose_omp_blocks", /* name */
14738   OPTGROUP_OMP, /* optinfo_flags */
14739   TV_NONE, /* tv_id */
14740   PROP_gimple_any, /* properties_required */
14741   0, /* properties_provided */
14742   0, /* properties_destroyed */
14743   0, /* todo_flags_start */
14744   0, /* todo_flags_finish */
14745 };
14746 
14747 class pass_diagnose_omp_blocks : public gimple_opt_pass
14748 {
14749 public:
pass_diagnose_omp_blocks(gcc::context * ctxt)14750   pass_diagnose_omp_blocks (gcc::context *ctxt)
14751     : gimple_opt_pass (pass_data_diagnose_omp_blocks, ctxt)
14752   {}
14753 
14754   /* opt_pass methods: */
gate(function *)14755   virtual bool gate (function *)
14756   {
14757     return flag_openacc || flag_openmp || flag_openmp_simd;
14758   }
execute(function *)14759   virtual unsigned int execute (function *)
14760     {
14761       return diagnose_omp_structured_block_errors ();
14762     }
14763 
14764 }; // class pass_diagnose_omp_blocks
14765 
14766 } // anon namespace
14767 
14768 gimple_opt_pass *
make_pass_diagnose_omp_blocks(gcc::context * ctxt)14769 make_pass_diagnose_omp_blocks (gcc::context *ctxt)
14770 {
14771   return new pass_diagnose_omp_blocks (ctxt);
14772 }
14773 
14774 
14775 #include "gt-omp-low.h"
14776