xref: /netbsd/external/gpl3/gcc.old/dist/gcc/gimplify.c (revision ec02198a)
1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002-2020 Free Software Foundation, Inc.
4    Major work done by Sebastian Pop <s.pop@laposte.net>,
5    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h"		/* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68 #include "omp-offload.h"
69 #include "context.h"
70 
71 /* Hash set of poisoned variables in a bind expr.  */
72 static hash_set<tree> *asan_poisoned_variables = NULL;
73 
74 enum gimplify_omp_var_data
75 {
76   GOVD_SEEN = 0x000001,
77   GOVD_EXPLICIT = 0x000002,
78   GOVD_SHARED = 0x000004,
79   GOVD_PRIVATE = 0x000008,
80   GOVD_FIRSTPRIVATE = 0x000010,
81   GOVD_LASTPRIVATE = 0x000020,
82   GOVD_REDUCTION = 0x000040,
83   GOVD_LOCAL = 0x00080,
84   GOVD_MAP = 0x000100,
85   GOVD_DEBUG_PRIVATE = 0x000200,
86   GOVD_PRIVATE_OUTER_REF = 0x000400,
87   GOVD_LINEAR = 0x000800,
88   GOVD_ALIGNED = 0x001000,
89 
90   /* Flag for GOVD_MAP: don't copy back.  */
91   GOVD_MAP_TO_ONLY = 0x002000,
92 
93   /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference.  */
94   GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 0x004000,
95 
96   GOVD_MAP_0LEN_ARRAY = 0x008000,
97 
98   /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping.  */
99   GOVD_MAP_ALWAYS_TO = 0x010000,
100 
101   /* Flag for shared vars that are or might be stored to in the region.  */
102   GOVD_WRITTEN = 0x020000,
103 
104   /* Flag for GOVD_MAP, if it is a forced mapping.  */
105   GOVD_MAP_FORCE = 0x040000,
106 
107   /* Flag for GOVD_MAP: must be present already.  */
108   GOVD_MAP_FORCE_PRESENT = 0x080000,
109 
110   /* Flag for GOVD_MAP: only allocate.  */
111   GOVD_MAP_ALLOC_ONLY = 0x100000,
112 
113   /* Flag for GOVD_MAP: only copy back.  */
114   GOVD_MAP_FROM_ONLY = 0x200000,
115 
116   GOVD_NONTEMPORAL = 0x400000,
117 
118   /* Flag for GOVD_LASTPRIVATE: conditional modifier.  */
119   GOVD_LASTPRIVATE_CONDITIONAL = 0x800000,
120 
121   GOVD_CONDTEMP = 0x1000000,
122 
123   /* Flag for GOVD_REDUCTION: inscan seen in {in,ex}clusive clause.  */
124   GOVD_REDUCTION_INSCAN = 0x2000000,
125 
126   /* Flag for GOVD_MAP: (struct) vars that have pointer attachments for
127      fields.  */
128   GOVD_MAP_HAS_ATTACHMENTS = 8388608,
129 
130   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
131 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
132 			   | GOVD_LOCAL)
133 };
134 
135 
136 enum omp_region_type
137 {
138   ORT_WORKSHARE = 0x00,
139   ORT_TASKGROUP = 0x01,
140   ORT_SIMD 	= 0x04,
141 
142   ORT_PARALLEL	= 0x08,
143   ORT_COMBINED_PARALLEL = ORT_PARALLEL | 1,
144 
145   ORT_TASK	= 0x10,
146   ORT_UNTIED_TASK = ORT_TASK | 1,
147   ORT_TASKLOOP  = ORT_TASK | 2,
148   ORT_UNTIED_TASKLOOP = ORT_UNTIED_TASK | 2,
149 
150   ORT_TEAMS	= 0x20,
151   ORT_COMBINED_TEAMS = ORT_TEAMS | 1,
152   ORT_HOST_TEAMS = ORT_TEAMS | 2,
153   ORT_COMBINED_HOST_TEAMS = ORT_COMBINED_TEAMS | 2,
154 
155   /* Data region.  */
156   ORT_TARGET_DATA = 0x40,
157 
158   /* Data region with offloading.  */
159   ORT_TARGET	= 0x80,
160   ORT_COMBINED_TARGET = ORT_TARGET | 1,
161   ORT_IMPLICIT_TARGET = ORT_TARGET | 2,
162 
163   /* OpenACC variants.  */
164   ORT_ACC	= 0x100,  /* A generic OpenACC region.  */
165   ORT_ACC_DATA	= ORT_ACC | ORT_TARGET_DATA, /* Data construct.  */
166   ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET,  /* Parallel construct */
167   ORT_ACC_KERNELS  = ORT_ACC | ORT_TARGET | 2,  /* Kernels construct.  */
168   ORT_ACC_SERIAL   = ORT_ACC | ORT_TARGET | 4,  /* Serial construct.  */
169   ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 2,  /* Host data.  */
170 
171   /* Dummy OpenMP region, used to disable expansion of
172      DECL_VALUE_EXPRs in taskloop pre body.  */
173   ORT_NONE	= 0x200
174 };
175 
176 /* Gimplify hashtable helper.  */
177 
178 struct gimplify_hasher : free_ptr_hash <elt_t>
179 {
180   static inline hashval_t hash (const elt_t *);
181   static inline bool equal (const elt_t *, const elt_t *);
182 };
183 
184 struct gimplify_ctx
185 {
186   struct gimplify_ctx *prev_context;
187 
188   vec<gbind *> bind_expr_stack;
189   tree temps;
190   gimple_seq conditional_cleanups;
191   tree exit_label;
192   tree return_temp;
193 
194   vec<tree> case_labels;
195   hash_set<tree> *live_switch_vars;
196   /* The formal temporary table.  Should this be persistent?  */
197   hash_table<gimplify_hasher> *temp_htab;
198 
199   int conditions;
200   unsigned into_ssa : 1;
201   unsigned allow_rhs_cond_expr : 1;
202   unsigned in_cleanup_point_expr : 1;
203   unsigned keep_stack : 1;
204   unsigned save_stack : 1;
205   unsigned in_switch_expr : 1;
206 };
207 
208 enum gimplify_defaultmap_kind
209 {
210   GDMK_SCALAR,
211   GDMK_AGGREGATE,
212   GDMK_ALLOCATABLE,
213   GDMK_POINTER
214 };
215 
216 struct gimplify_omp_ctx
217 {
218   struct gimplify_omp_ctx *outer_context;
219   splay_tree variables;
220   hash_set<tree> *privatized_types;
221   tree clauses;
222   /* Iteration variables in an OMP_FOR.  */
223   vec<tree> loop_iter_var;
224   location_t location;
225   enum omp_clause_default_kind default_kind;
226   enum omp_region_type region_type;
227   enum tree_code code;
228   bool combined_loop;
229   bool distribute;
230   bool target_firstprivatize_array_bases;
231   bool add_safelen1;
232   bool order_concurrent;
233   bool has_depend;
234   bool in_for_exprs;
235   int defaultmap[4];
236 };
237 
238 static struct gimplify_ctx *gimplify_ctxp;
239 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
240 static bool in_omp_construct;
241 
242 /* Forward declaration.  */
243 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
244 static hash_map<tree, tree> *oacc_declare_returns;
245 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
246 					   bool (*) (tree), fallback_t, bool);
247 
248 /* Shorter alias name for the above function for use in gimplify.c
249    only.  */
250 
251 static inline void
gimplify_seq_add_stmt(gimple_seq * seq_p,gimple * gs)252 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
253 {
254   gimple_seq_add_stmt_without_update (seq_p, gs);
255 }
256 
257 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
258    NULL, a new sequence is allocated.   This function is
259    similar to gimple_seq_add_seq, but does not scan the operands.
260    During gimplification, we need to manipulate statement sequences
261    before the def/use vectors have been constructed.  */
262 
263 static void
gimplify_seq_add_seq(gimple_seq * dst_p,gimple_seq src)264 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
265 {
266   gimple_stmt_iterator si;
267 
268   if (src == NULL)
269     return;
270 
271   si = gsi_last (*dst_p);
272   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
273 }
274 
275 
276 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
277    and popping gimplify contexts.  */
278 
279 static struct gimplify_ctx *ctx_pool = NULL;
280 
281 /* Return a gimplify context struct from the pool.  */
282 
283 static inline struct gimplify_ctx *
ctx_alloc(void)284 ctx_alloc (void)
285 {
286   struct gimplify_ctx * c = ctx_pool;
287 
288   if (c)
289     ctx_pool = c->prev_context;
290   else
291     c = XNEW (struct gimplify_ctx);
292 
293   memset (c, '\0', sizeof (*c));
294   return c;
295 }
296 
297 /* Put gimplify context C back into the pool.  */
298 
299 static inline void
ctx_free(struct gimplify_ctx * c)300 ctx_free (struct gimplify_ctx *c)
301 {
302   c->prev_context = ctx_pool;
303   ctx_pool = c;
304 }
305 
306 /* Free allocated ctx stack memory.  */
307 
308 void
free_gimplify_stack(void)309 free_gimplify_stack (void)
310 {
311   struct gimplify_ctx *c;
312 
313   while ((c = ctx_pool))
314     {
315       ctx_pool = c->prev_context;
316       free (c);
317     }
318 }
319 
320 
321 /* Set up a context for the gimplifier.  */
322 
323 void
push_gimplify_context(bool in_ssa,bool rhs_cond_ok)324 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
325 {
326   struct gimplify_ctx *c = ctx_alloc ();
327 
328   c->prev_context = gimplify_ctxp;
329   gimplify_ctxp = c;
330   gimplify_ctxp->into_ssa = in_ssa;
331   gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
332 }
333 
334 /* Tear down a context for the gimplifier.  If BODY is non-null, then
335    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
336    in the local_decls.
337 
338    BODY is not a sequence, but the first tuple in a sequence.  */
339 
340 void
pop_gimplify_context(gimple * body)341 pop_gimplify_context (gimple *body)
342 {
343   struct gimplify_ctx *c = gimplify_ctxp;
344 
345   gcc_assert (c
346               && (!c->bind_expr_stack.exists ()
347 		  || c->bind_expr_stack.is_empty ()));
348   c->bind_expr_stack.release ();
349   gimplify_ctxp = c->prev_context;
350 
351   if (body)
352     declare_vars (c->temps, body, false);
353   else
354     record_vars (c->temps);
355 
356   delete c->temp_htab;
357   c->temp_htab = NULL;
358   ctx_free (c);
359 }
360 
361 /* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
362 
363 static void
gimple_push_bind_expr(gbind * bind_stmt)364 gimple_push_bind_expr (gbind *bind_stmt)
365 {
366   gimplify_ctxp->bind_expr_stack.reserve (8);
367   gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
368 }
369 
370 /* Pop the first element off the stack of bindings.  */
371 
372 static void
gimple_pop_bind_expr(void)373 gimple_pop_bind_expr (void)
374 {
375   gimplify_ctxp->bind_expr_stack.pop ();
376 }
377 
378 /* Return the first element of the stack of bindings.  */
379 
380 gbind *
gimple_current_bind_expr(void)381 gimple_current_bind_expr (void)
382 {
383   return gimplify_ctxp->bind_expr_stack.last ();
384 }
385 
386 /* Return the stack of bindings created during gimplification.  */
387 
388 vec<gbind *>
gimple_bind_expr_stack(void)389 gimple_bind_expr_stack (void)
390 {
391   return gimplify_ctxp->bind_expr_stack;
392 }
393 
394 /* Return true iff there is a COND_EXPR between us and the innermost
395    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
396 
397 static bool
gimple_conditional_context(void)398 gimple_conditional_context (void)
399 {
400   return gimplify_ctxp->conditions > 0;
401 }
402 
403 /* Note that we've entered a COND_EXPR.  */
404 
405 static void
gimple_push_condition(void)406 gimple_push_condition (void)
407 {
408 #ifdef ENABLE_GIMPLE_CHECKING
409   if (gimplify_ctxp->conditions == 0)
410     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
411 #endif
412   ++(gimplify_ctxp->conditions);
413 }
414 
415 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
416    now, add any conditional cleanups we've seen to the prequeue.  */
417 
418 static void
gimple_pop_condition(gimple_seq * pre_p)419 gimple_pop_condition (gimple_seq *pre_p)
420 {
421   int conds = --(gimplify_ctxp->conditions);
422 
423   gcc_assert (conds >= 0);
424   if (conds == 0)
425     {
426       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
427       gimplify_ctxp->conditional_cleanups = NULL;
428     }
429 }
430 
431 /* A stable comparison routine for use with splay trees and DECLs.  */
432 
433 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)434 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
435 {
436   tree a = (tree) xa;
437   tree b = (tree) xb;
438 
439   return DECL_UID (a) - DECL_UID (b);
440 }
441 
442 /* Create a new omp construct that deals with variable remapping.  */
443 
444 static struct gimplify_omp_ctx *
new_omp_context(enum omp_region_type region_type)445 new_omp_context (enum omp_region_type region_type)
446 {
447   struct gimplify_omp_ctx *c;
448 
449   c = XCNEW (struct gimplify_omp_ctx);
450   c->outer_context = gimplify_omp_ctxp;
451   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
452   c->privatized_types = new hash_set<tree>;
453   c->location = input_location;
454   c->region_type = region_type;
455   if ((region_type & ORT_TASK) == 0)
456     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
457   else
458     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
459   c->defaultmap[GDMK_SCALAR] = GOVD_MAP;
460   c->defaultmap[GDMK_AGGREGATE] = GOVD_MAP;
461   c->defaultmap[GDMK_ALLOCATABLE] = GOVD_MAP;
462   c->defaultmap[GDMK_POINTER] = GOVD_MAP;
463 
464   return c;
465 }
466 
467 /* Destroy an omp construct that deals with variable remapping.  */
468 
469 static void
delete_omp_context(struct gimplify_omp_ctx * c)470 delete_omp_context (struct gimplify_omp_ctx *c)
471 {
472   splay_tree_delete (c->variables);
473   delete c->privatized_types;
474   c->loop_iter_var.release ();
475   XDELETE (c);
476 }
477 
478 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
479 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
480 
481 /* Both gimplify the statement T and append it to *SEQ_P.  This function
482    behaves exactly as gimplify_stmt, but you don't have to pass T as a
483    reference.  */
484 
485 void
gimplify_and_add(tree t,gimple_seq * seq_p)486 gimplify_and_add (tree t, gimple_seq *seq_p)
487 {
488   gimplify_stmt (&t, seq_p);
489 }
490 
491 /* Gimplify statement T into sequence *SEQ_P, and return the first
492    tuple in the sequence of generated tuples for this statement.
493    Return NULL if gimplifying T produced no tuples.  */
494 
495 static gimple *
gimplify_and_return_first(tree t,gimple_seq * seq_p)496 gimplify_and_return_first (tree t, gimple_seq *seq_p)
497 {
498   gimple_stmt_iterator last = gsi_last (*seq_p);
499 
500   gimplify_and_add (t, seq_p);
501 
502   if (!gsi_end_p (last))
503     {
504       gsi_next (&last);
505       return gsi_stmt (last);
506     }
507   else
508     return gimple_seq_first_stmt (*seq_p);
509 }
510 
511 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
512    LHS, or for a call argument.  */
513 
514 static bool
is_gimple_mem_rhs(tree t)515 is_gimple_mem_rhs (tree t)
516 {
517   /* If we're dealing with a renamable type, either source or dest must be
518      a renamed variable.  */
519   if (is_gimple_reg_type (TREE_TYPE (t)))
520     return is_gimple_val (t);
521   else
522     return is_gimple_val (t) || is_gimple_lvalue (t);
523 }
524 
525 /* Return true if T is a CALL_EXPR or an expression that can be
526    assigned to a temporary.  Note that this predicate should only be
527    used during gimplification.  See the rationale for this in
528    gimplify_modify_expr.  */
529 
530 static bool
is_gimple_reg_rhs_or_call(tree t)531 is_gimple_reg_rhs_or_call (tree t)
532 {
533   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
534 	  || TREE_CODE (t) == CALL_EXPR);
535 }
536 
537 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
538    this predicate should only be used during gimplification.  See the
539    rationale for this in gimplify_modify_expr.  */
540 
541 static bool
is_gimple_mem_rhs_or_call(tree t)542 is_gimple_mem_rhs_or_call (tree t)
543 {
544   /* If we're dealing with a renamable type, either source or dest must be
545      a renamed variable.  */
546   if (is_gimple_reg_type (TREE_TYPE (t)))
547     return is_gimple_val (t);
548   else
549     return (is_gimple_val (t)
550 	    || is_gimple_lvalue (t)
551 	    || TREE_CLOBBER_P (t)
552 	    || TREE_CODE (t) == CALL_EXPR);
553 }
554 
555 /* Create a temporary with a name derived from VAL.  Subroutine of
556    lookup_tmp_var; nobody else should call this function.  */
557 
558 static inline tree
create_tmp_from_val(tree val)559 create_tmp_from_val (tree val)
560 {
561   /* Drop all qualifiers and address-space information from the value type.  */
562   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
563   tree var = create_tmp_var (type, get_name (val));
564   if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
565       || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
566     DECL_GIMPLE_REG_P (var) = 1;
567   return var;
568 }
569 
570 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
571    an existing expression temporary.  */
572 
573 static tree
lookup_tmp_var(tree val,bool is_formal)574 lookup_tmp_var (tree val, bool is_formal)
575 {
576   tree ret;
577 
578   /* If not optimizing, never really reuse a temporary.  local-alloc
579      won't allocate any variable that is used in more than one basic
580      block, which means it will go into memory, causing much extra
581      work in reload and final and poorer code generation, outweighing
582      the extra memory allocation here.  */
583   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
584     ret = create_tmp_from_val (val);
585   else
586     {
587       elt_t elt, *elt_p;
588       elt_t **slot;
589 
590       elt.val = val;
591       if (!gimplify_ctxp->temp_htab)
592         gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
593       slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
594       if (*slot == NULL)
595 	{
596 	  elt_p = XNEW (elt_t);
597 	  elt_p->val = val;
598 	  elt_p->temp = ret = create_tmp_from_val (val);
599 	  *slot = elt_p;
600 	}
601       else
602 	{
603 	  elt_p = *slot;
604           ret = elt_p->temp;
605 	}
606     }
607 
608   return ret;
609 }
610 
611 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
612 
613 static tree
internal_get_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool is_formal,bool allow_ssa)614 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
615                       bool is_formal, bool allow_ssa)
616 {
617   tree t, mod;
618 
619   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
620      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
621   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
622 		 fb_rvalue);
623 
624   if (allow_ssa
625       && gimplify_ctxp->into_ssa
626       && is_gimple_reg_type (TREE_TYPE (val)))
627     {
628       t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
629       if (! gimple_in_ssa_p (cfun))
630 	{
631 	  const char *name = get_name (val);
632 	  if (name)
633 	    SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
634 	}
635     }
636   else
637     t = lookup_tmp_var (val, is_formal);
638 
639   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
640 
641   SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
642 
643   /* gimplify_modify_expr might want to reduce this further.  */
644   gimplify_and_add (mod, pre_p);
645   ggc_free (mod);
646 
647   return t;
648 }
649 
650 /* Return a formal temporary variable initialized with VAL.  PRE_P is as
651    in gimplify_expr.  Only use this function if:
652 
653    1) The value of the unfactored expression represented by VAL will not
654       change between the initialization and use of the temporary, and
655    2) The temporary will not be otherwise modified.
656 
657    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
658    and #2 means it is inappropriate for && temps.
659 
660    For other cases, use get_initialized_tmp_var instead.  */
661 
662 tree
get_formal_tmp_var(tree val,gimple_seq * pre_p)663 get_formal_tmp_var (tree val, gimple_seq *pre_p)
664 {
665   return internal_get_tmp_var (val, pre_p, NULL, true, true);
666 }
667 
668 /* Return a temporary variable initialized with VAL.  PRE_P and POST_P
669    are as in gimplify_expr.  */
670 
671 tree
get_initialized_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool allow_ssa)672 get_initialized_tmp_var (tree val, gimple_seq *pre_p,
673 			 gimple_seq *post_p /* = NULL */,
674 			 bool allow_ssa /* = true */)
675 {
676   return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
677 }
678 
679 /* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
680    generate debug info for them; otherwise don't.  */
681 
682 void
declare_vars(tree vars,gimple * gs,bool debug_info)683 declare_vars (tree vars, gimple *gs, bool debug_info)
684 {
685   tree last = vars;
686   if (last)
687     {
688       tree temps, block;
689 
690       gbind *scope = as_a <gbind *> (gs);
691 
692       temps = nreverse (last);
693 
694       block = gimple_bind_block (scope);
695       gcc_assert (!block || TREE_CODE (block) == BLOCK);
696       if (!block || !debug_info)
697 	{
698 	  DECL_CHAIN (last) = gimple_bind_vars (scope);
699 	  gimple_bind_set_vars (scope, temps);
700 	}
701       else
702 	{
703 	  /* We need to attach the nodes both to the BIND_EXPR and to its
704 	     associated BLOCK for debugging purposes.  The key point here
705 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
706 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
707 	  if (BLOCK_VARS (block))
708 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
709 	  else
710 	    {
711 	      gimple_bind_set_vars (scope,
712 	      			    chainon (gimple_bind_vars (scope), temps));
713 	      BLOCK_VARS (block) = temps;
714 	    }
715 	}
716     }
717 }
718 
719 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
720    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
721    no such upper bound can be obtained.  */
722 
723 static void
force_constant_size(tree var)724 force_constant_size (tree var)
725 {
726   /* The only attempt we make is by querying the maximum size of objects
727      of the variable's type.  */
728 
729   HOST_WIDE_INT max_size;
730 
731   gcc_assert (VAR_P (var));
732 
733   max_size = max_int_size_in_bytes (TREE_TYPE (var));
734 
735   gcc_assert (max_size >= 0);
736 
737   DECL_SIZE_UNIT (var)
738     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
739   DECL_SIZE (var)
740     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
741 }
742 
743 /* Push the temporary variable TMP into the current binding.  */
744 
745 void
gimple_add_tmp_var_fn(struct function * fn,tree tmp)746 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
747 {
748   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
749 
750   /* Later processing assumes that the object size is constant, which might
751      not be true at this point.  Force the use of a constant upper bound in
752      this case.  */
753   if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
754     force_constant_size (tmp);
755 
756   DECL_CONTEXT (tmp) = fn->decl;
757   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
758 
759   record_vars_into (tmp, fn->decl);
760 }
761 
762 /* Push the temporary variable TMP into the current binding.  */
763 
764 void
gimple_add_tmp_var(tree tmp)765 gimple_add_tmp_var (tree tmp)
766 {
767   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
768 
769   /* Later processing assumes that the object size is constant, which might
770      not be true at this point.  Force the use of a constant upper bound in
771      this case.  */
772   if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
773     force_constant_size (tmp);
774 
775   DECL_CONTEXT (tmp) = current_function_decl;
776   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
777 
778   if (gimplify_ctxp)
779     {
780       DECL_CHAIN (tmp) = gimplify_ctxp->temps;
781       gimplify_ctxp->temps = tmp;
782 
783       /* Mark temporaries local within the nearest enclosing parallel.  */
784       if (gimplify_omp_ctxp)
785 	{
786 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
787 	  int flag = GOVD_LOCAL | GOVD_SEEN;
788 	  while (ctx
789 		 && (ctx->region_type == ORT_WORKSHARE
790 		     || ctx->region_type == ORT_TASKGROUP
791 		     || ctx->region_type == ORT_SIMD
792 		     || ctx->region_type == ORT_ACC))
793 	    {
794 	      if (ctx->region_type == ORT_SIMD
795 		  && TREE_ADDRESSABLE (tmp)
796 		  && !TREE_STATIC (tmp))
797 		{
798 		  if (TREE_CODE (DECL_SIZE_UNIT (tmp)) != INTEGER_CST)
799 		    ctx->add_safelen1 = true;
800 		  else if (ctx->in_for_exprs)
801 		    flag = GOVD_PRIVATE;
802 		  else
803 		    flag = GOVD_PRIVATE | GOVD_SEEN;
804 		  break;
805 		}
806 	      ctx = ctx->outer_context;
807 	    }
808 	  if (ctx)
809 	    omp_add_variable (ctx, tmp, flag);
810 	}
811     }
812   else if (cfun)
813     record_vars (tmp);
814   else
815     {
816       gimple_seq body_seq;
817 
818       /* This case is for nested functions.  We need to expose the locals
819 	 they create.  */
820       body_seq = gimple_body (current_function_decl);
821       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
822     }
823 }
824 
825 
826 
827 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
828    nodes that are referenced more than once in GENERIC functions.  This is
829    necessary because gimplification (translation into GIMPLE) is performed
830    by modifying tree nodes in-place, so gimplication of a shared node in a
831    first context could generate an invalid GIMPLE form in a second context.
832 
833    This is achieved with a simple mark/copy/unmark algorithm that walks the
834    GENERIC representation top-down, marks nodes with TREE_VISITED the first
835    time it encounters them, duplicates them if they already have TREE_VISITED
836    set, and finally removes the TREE_VISITED marks it has set.
837 
838    The algorithm works only at the function level, i.e. it generates a GENERIC
839    representation of a function with no nodes shared within the function when
840    passed a GENERIC function (except for nodes that are allowed to be shared).
841 
842    At the global level, it is also necessary to unshare tree nodes that are
843    referenced in more than one function, for the same aforementioned reason.
844    This requires some cooperation from the front-end.  There are 2 strategies:
845 
846      1. Manual unsharing.  The front-end needs to call unshare_expr on every
847         expression that might end up being shared across functions.
848 
849      2. Deep unsharing.  This is an extension of regular unsharing.  Instead
850         of calling unshare_expr on expressions that might be shared across
851         functions, the front-end pre-marks them with TREE_VISITED.  This will
852         ensure that they are unshared on the first reference within functions
853         when the regular unsharing algorithm runs.  The counterpart is that
854         this algorithm must look deeper than for manual unsharing, which is
855         specified by LANG_HOOKS_DEEP_UNSHARING.
856 
857   If there are only few specific cases of node sharing across functions, it is
858   probably easier for a front-end to unshare the expressions manually.  On the
859   contrary, if the expressions generated at the global level are as widespread
860   as expressions generated within functions, deep unsharing is very likely the
861   way to go.  */
862 
863 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
864    These nodes model computations that must be done once.  If we were to
865    unshare something like SAVE_EXPR(i++), the gimplification process would
866    create wrong code.  However, if DATA is non-null, it must hold a pointer
867    set that is used to unshare the subtrees of these nodes.  */
868 
869 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)870 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
871 {
872   tree t = *tp;
873   enum tree_code code = TREE_CODE (t);
874 
875   /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
876      copy their subtrees if we can make sure to do it only once.  */
877   if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
878     {
879       if (data && !((hash_set<tree> *)data)->add (t))
880 	;
881       else
882 	*walk_subtrees = 0;
883     }
884 
885   /* Stop at types, decls, constants like copy_tree_r.  */
886   else if (TREE_CODE_CLASS (code) == tcc_type
887 	   || TREE_CODE_CLASS (code) == tcc_declaration
888 	   || TREE_CODE_CLASS (code) == tcc_constant)
889     *walk_subtrees = 0;
890 
891   /* Cope with the statement expression extension.  */
892   else if (code == STATEMENT_LIST)
893     ;
894 
895   /* Leave the bulk of the work to copy_tree_r itself.  */
896   else
897     copy_tree_r (tp, walk_subtrees, NULL);
898 
899   return NULL_TREE;
900 }
901 
902 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
903    If *TP has been visited already, then *TP is deeply copied by calling
904    mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
905 
906 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees,void * data)907 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
908 {
909   tree t = *tp;
910   enum tree_code code = TREE_CODE (t);
911 
912   /* Skip types, decls, and constants.  But we do want to look at their
913      types and the bounds of types.  Mark them as visited so we properly
914      unmark their subtrees on the unmark pass.  If we've already seen them,
915      don't look down further.  */
916   if (TREE_CODE_CLASS (code) == tcc_type
917       || TREE_CODE_CLASS (code) == tcc_declaration
918       || TREE_CODE_CLASS (code) == tcc_constant)
919     {
920       if (TREE_VISITED (t))
921 	*walk_subtrees = 0;
922       else
923 	TREE_VISITED (t) = 1;
924     }
925 
926   /* If this node has been visited already, unshare it and don't look
927      any deeper.  */
928   else if (TREE_VISITED (t))
929     {
930       walk_tree (tp, mostly_copy_tree_r, data, NULL);
931       *walk_subtrees = 0;
932     }
933 
934   /* Otherwise, mark the node as visited and keep looking.  */
935   else
936     TREE_VISITED (t) = 1;
937 
938   return NULL_TREE;
939 }
940 
941 /* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
942    copy_if_shared_r callback unmodified.  */
943 
944 void
copy_if_shared(tree * tp,void * data)945 copy_if_shared (tree *tp, void *data)
946 {
947   walk_tree (tp, copy_if_shared_r, data, NULL);
948 }
949 
950 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
951    any nested functions.  */
952 
953 static void
unshare_body(tree fndecl)954 unshare_body (tree fndecl)
955 {
956   struct cgraph_node *cgn = cgraph_node::get (fndecl);
957   /* If the language requires deep unsharing, we need a pointer set to make
958      sure we don't repeatedly unshare subtrees of unshareable nodes.  */
959   hash_set<tree> *visited
960     = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
961 
962   copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
963   copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
964   copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
965 
966   delete visited;
967 
968   if (cgn)
969     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
970       unshare_body (cgn->decl);
971 }
972 
973 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
974    Subtrees are walked until the first unvisited node is encountered.  */
975 
976 static tree
unmark_visited_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)977 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
978 {
979   tree t = *tp;
980 
981   /* If this node has been visited, unmark it and keep looking.  */
982   if (TREE_VISITED (t))
983     TREE_VISITED (t) = 0;
984 
985   /* Otherwise, don't look any deeper.  */
986   else
987     *walk_subtrees = 0;
988 
989   return NULL_TREE;
990 }
991 
992 /* Unmark the visited trees rooted at *TP.  */
993 
994 static inline void
unmark_visited(tree * tp)995 unmark_visited (tree *tp)
996 {
997   walk_tree (tp, unmark_visited_r, NULL, NULL);
998 }
999 
1000 /* Likewise, but mark all trees as not visited.  */
1001 
1002 static void
unvisit_body(tree fndecl)1003 unvisit_body (tree fndecl)
1004 {
1005   struct cgraph_node *cgn = cgraph_node::get (fndecl);
1006 
1007   unmark_visited (&DECL_SAVED_TREE (fndecl));
1008   unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1009   unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1010 
1011   if (cgn)
1012     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1013       unvisit_body (cgn->decl);
1014 }
1015 
1016 /* Unconditionally make an unshared copy of EXPR.  This is used when using
1017    stored expressions which span multiple functions, such as BINFO_VTABLE,
1018    as the normal unsharing process can't tell that they're shared.  */
1019 
1020 tree
unshare_expr(tree expr)1021 unshare_expr (tree expr)
1022 {
1023   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1024   return expr;
1025 }
1026 
1027 /* Worker for unshare_expr_without_location.  */
1028 
1029 static tree
prune_expr_location(tree * tp,int * walk_subtrees,void *)1030 prune_expr_location (tree *tp, int *walk_subtrees, void *)
1031 {
1032   if (EXPR_P (*tp))
1033     SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
1034   else
1035     *walk_subtrees = 0;
1036   return NULL_TREE;
1037 }
1038 
1039 /* Similar to unshare_expr but also prune all expression locations
1040    from EXPR.  */
1041 
1042 tree
unshare_expr_without_location(tree expr)1043 unshare_expr_without_location (tree expr)
1044 {
1045   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1046   if (EXPR_P (expr))
1047     walk_tree (&expr, prune_expr_location, NULL, NULL);
1048   return expr;
1049 }
1050 
1051 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
1052    one, OR_ELSE otherwise.  The location of a STATEMENT_LISTs
1053    comprising at least one DEBUG_BEGIN_STMT followed by exactly one
1054    EXPR is the location of the EXPR.  */
1055 
1056 static location_t
1057 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
1058 {
1059   if (!expr)
1060     return or_else;
1061 
1062   if (EXPR_HAS_LOCATION (expr))
1063     return EXPR_LOCATION (expr);
1064 
1065   if (TREE_CODE (expr) != STATEMENT_LIST)
1066     return or_else;
1067 
1068   tree_stmt_iterator i = tsi_start (expr);
1069 
1070   bool found = false;
1071   while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1072     {
1073       found = true;
1074       tsi_next (&i);
1075     }
1076 
1077   if (!found || !tsi_one_before_end_p (i))
1078     return or_else;
1079 
1080   return rexpr_location (tsi_stmt (i), or_else);
1081 }
1082 
1083 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1084    rexpr_location for the potential recursion.  */
1085 
1086 static inline bool
rexpr_has_location(tree expr)1087 rexpr_has_location (tree expr)
1088 {
1089   return rexpr_location (expr) != UNKNOWN_LOCATION;
1090 }
1091 
1092 
1093 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1094    contain statements and have a value.  Assign its value to a temporary
1095    and give it void_type_node.  Return the temporary, or NULL_TREE if
1096    WRAPPER was already void.  */
1097 
1098 tree
voidify_wrapper_expr(tree wrapper,tree temp)1099 voidify_wrapper_expr (tree wrapper, tree temp)
1100 {
1101   tree type = TREE_TYPE (wrapper);
1102   if (type && !VOID_TYPE_P (type))
1103     {
1104       tree *p;
1105 
1106       /* Set p to point to the body of the wrapper.  Loop until we find
1107 	 something that isn't a wrapper.  */
1108       for (p = &wrapper; p && *p; )
1109 	{
1110 	  switch (TREE_CODE (*p))
1111 	    {
1112 	    case BIND_EXPR:
1113 	      TREE_SIDE_EFFECTS (*p) = 1;
1114 	      TREE_TYPE (*p) = void_type_node;
1115 	      /* For a BIND_EXPR, the body is operand 1.  */
1116 	      p = &BIND_EXPR_BODY (*p);
1117 	      break;
1118 
1119 	    case CLEANUP_POINT_EXPR:
1120 	    case TRY_FINALLY_EXPR:
1121 	    case TRY_CATCH_EXPR:
1122 	      TREE_SIDE_EFFECTS (*p) = 1;
1123 	      TREE_TYPE (*p) = void_type_node;
1124 	      p = &TREE_OPERAND (*p, 0);
1125 	      break;
1126 
1127 	    case STATEMENT_LIST:
1128 	      {
1129 		tree_stmt_iterator i = tsi_last (*p);
1130 		TREE_SIDE_EFFECTS (*p) = 1;
1131 		TREE_TYPE (*p) = void_type_node;
1132 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1133 	      }
1134 	      break;
1135 
1136 	    case COMPOUND_EXPR:
1137 	      /* Advance to the last statement.  Set all container types to
1138 		 void.  */
1139 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1140 		{
1141 		  TREE_SIDE_EFFECTS (*p) = 1;
1142 		  TREE_TYPE (*p) = void_type_node;
1143 		}
1144 	      break;
1145 
1146 	    case TRANSACTION_EXPR:
1147 	      TREE_SIDE_EFFECTS (*p) = 1;
1148 	      TREE_TYPE (*p) = void_type_node;
1149 	      p = &TRANSACTION_EXPR_BODY (*p);
1150 	      break;
1151 
1152 	    default:
1153 	      /* Assume that any tree upon which voidify_wrapper_expr is
1154 		 directly called is a wrapper, and that its body is op0.  */
1155 	      if (p == &wrapper)
1156 		{
1157 		  TREE_SIDE_EFFECTS (*p) = 1;
1158 		  TREE_TYPE (*p) = void_type_node;
1159 		  p = &TREE_OPERAND (*p, 0);
1160 		  break;
1161 		}
1162 	      goto out;
1163 	    }
1164 	}
1165 
1166     out:
1167       if (p == NULL || IS_EMPTY_STMT (*p))
1168 	temp = NULL_TREE;
1169       else if (temp)
1170 	{
1171 	  /* The wrapper is on the RHS of an assignment that we're pushing
1172 	     down.  */
1173 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1174 		      || TREE_CODE (temp) == MODIFY_EXPR);
1175 	  TREE_OPERAND (temp, 1) = *p;
1176 	  *p = temp;
1177 	}
1178       else
1179 	{
1180 	  temp = create_tmp_var (type, "retval");
1181 	  *p = build2 (INIT_EXPR, type, temp, *p);
1182 	}
1183 
1184       return temp;
1185     }
1186 
1187   return NULL_TREE;
1188 }
1189 
1190 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1191    a temporary through which they communicate.  */
1192 
1193 static void
build_stack_save_restore(gcall ** save,gcall ** restore)1194 build_stack_save_restore (gcall **save, gcall **restore)
1195 {
1196   tree tmp_var;
1197 
1198   *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1199   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1200   gimple_call_set_lhs (*save, tmp_var);
1201 
1202   *restore
1203     = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1204 			 1, tmp_var);
1205 }
1206 
1207 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable.  */
1208 
1209 static tree
build_asan_poison_call_expr(tree decl)1210 build_asan_poison_call_expr (tree decl)
1211 {
1212   /* Do not poison variables that have size equal to zero.  */
1213   tree unit_size = DECL_SIZE_UNIT (decl);
1214   if (zerop (unit_size))
1215     return NULL_TREE;
1216 
1217   tree base = build_fold_addr_expr (decl);
1218 
1219   return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1220 				       void_type_node, 3,
1221 				       build_int_cst (integer_type_node,
1222 						      ASAN_MARK_POISON),
1223 				       base, unit_size);
1224 }
1225 
1226 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1227    on POISON flag, shadow memory of a DECL variable.  The call will be
1228    put on location identified by IT iterator, where BEFORE flag drives
1229    position where the stmt will be put.  */
1230 
1231 static void
asan_poison_variable(tree decl,bool poison,gimple_stmt_iterator * it,bool before)1232 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1233 		      bool before)
1234 {
1235   tree unit_size = DECL_SIZE_UNIT (decl);
1236   tree base = build_fold_addr_expr (decl);
1237 
1238   /* Do not poison variables that have size equal to zero.  */
1239   if (zerop (unit_size))
1240     return;
1241 
1242   /* It's necessary to have all stack variables aligned to ASAN granularity
1243      bytes.  */
1244   if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1245     SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1246 
1247   HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1248 
1249   gimple *g
1250     = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1251 				  build_int_cst (integer_type_node, flags),
1252 				  base, unit_size);
1253 
1254   if (before)
1255     gsi_insert_before (it, g, GSI_NEW_STMT);
1256   else
1257     gsi_insert_after (it, g, GSI_NEW_STMT);
1258 }
1259 
1260 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1261    either poisons or unpoisons a DECL.  Created statement is appended
1262    to SEQ_P gimple sequence.  */
1263 
1264 static void
asan_poison_variable(tree decl,bool poison,gimple_seq * seq_p)1265 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1266 {
1267   gimple_stmt_iterator it = gsi_last (*seq_p);
1268   bool before = false;
1269 
1270   if (gsi_end_p (it))
1271     before = true;
1272 
1273   asan_poison_variable (decl, poison, &it, before);
1274 }
1275 
1276 /* Sort pair of VAR_DECLs A and B by DECL_UID.  */
1277 
1278 static int
sort_by_decl_uid(const void * a,const void * b)1279 sort_by_decl_uid (const void *a, const void *b)
1280 {
1281   const tree *t1 = (const tree *)a;
1282   const tree *t2 = (const tree *)b;
1283 
1284   int uid1 = DECL_UID (*t1);
1285   int uid2 = DECL_UID (*t2);
1286 
1287   if (uid1 < uid2)
1288     return -1;
1289   else if (uid1 > uid2)
1290     return 1;
1291   else
1292     return 0;
1293 }
1294 
1295 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1296    depending on POISON flag.  Created statement is appended
1297    to SEQ_P gimple sequence.  */
1298 
1299 static void
asan_poison_variables(hash_set<tree> * variables,bool poison,gimple_seq * seq_p)1300 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1301 {
1302   unsigned c = variables->elements ();
1303   if (c == 0)
1304     return;
1305 
1306   auto_vec<tree> sorted_variables (c);
1307 
1308   for (hash_set<tree>::iterator it = variables->begin ();
1309        it != variables->end (); ++it)
1310     sorted_variables.safe_push (*it);
1311 
1312   sorted_variables.qsort (sort_by_decl_uid);
1313 
1314   unsigned i;
1315   tree var;
1316   FOR_EACH_VEC_ELT (sorted_variables, i, var)
1317     {
1318       asan_poison_variable (var, poison, seq_p);
1319 
1320       /* Add use_after_scope_memory attribute for the variable in order
1321 	 to prevent re-written into SSA.  */
1322       if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1323 			     DECL_ATTRIBUTES (var)))
1324 	DECL_ATTRIBUTES (var)
1325 	  = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1326 		       integer_one_node,
1327 		       DECL_ATTRIBUTES (var));
1328     }
1329 }
1330 
1331 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1332 
1333 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,gimple_seq * pre_p)1334 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1335 {
1336   tree bind_expr = *expr_p;
1337   bool old_keep_stack = gimplify_ctxp->keep_stack;
1338   bool old_save_stack = gimplify_ctxp->save_stack;
1339   tree t;
1340   gbind *bind_stmt;
1341   gimple_seq body, cleanup;
1342   gcall *stack_save;
1343   location_t start_locus = 0, end_locus = 0;
1344   tree ret_clauses = NULL;
1345 
1346   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1347 
1348   /* Mark variables seen in this bind expr.  */
1349   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1350     {
1351       if (VAR_P (t))
1352 	{
1353 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1354 
1355 	  /* Mark variable as local.  */
1356 	  if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t))
1357 	    {
1358 	      if (! DECL_SEEN_IN_BIND_EXPR_P (t)
1359 		  || splay_tree_lookup (ctx->variables,
1360 					(splay_tree_key) t) == NULL)
1361 		{
1362 		  int flag = GOVD_LOCAL;
1363 		  if (ctx->region_type == ORT_SIMD
1364 		      && TREE_ADDRESSABLE (t)
1365 		      && !TREE_STATIC (t))
1366 		    {
1367 		      if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1368 			ctx->add_safelen1 = true;
1369 		      else
1370 			flag = GOVD_PRIVATE;
1371 		    }
1372 		  omp_add_variable (ctx, t, flag | GOVD_SEEN);
1373 		}
1374 	      /* Static locals inside of target construct or offloaded
1375 		 routines need to be "omp declare target".  */
1376 	      if (TREE_STATIC (t))
1377 		for (; ctx; ctx = ctx->outer_context)
1378 		  if ((ctx->region_type & ORT_TARGET) != 0)
1379 		    {
1380 		      if (!lookup_attribute ("omp declare target",
1381 					     DECL_ATTRIBUTES (t)))
1382 			{
1383 			  tree id = get_identifier ("omp declare target");
1384 			  DECL_ATTRIBUTES (t)
1385 			    = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (t));
1386 			  varpool_node *node = varpool_node::get (t);
1387 			  if (node)
1388 			    {
1389 			      node->offloadable = 1;
1390 			      if (ENABLE_OFFLOADING && !DECL_EXTERNAL (t))
1391 				{
1392 				  g->have_offload = true;
1393 				  if (!in_lto_p)
1394 				    vec_safe_push (offload_vars, t);
1395 				}
1396 			    }
1397 			}
1398 		      break;
1399 		    }
1400 	    }
1401 
1402 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1403 
1404 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1405 	    cfun->has_local_explicit_reg_vars = true;
1406 	}
1407 
1408       /* Preliminarily mark non-addressed complex variables as eligible
1409 	 for promotion to gimple registers.  We'll transform their uses
1410 	 as we find them.  */
1411       if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1412 	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1413 	  && !TREE_THIS_VOLATILE (t)
1414 	  && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1415 	  && !needs_to_live_in_memory (t))
1416 	DECL_GIMPLE_REG_P (t) = 1;
1417     }
1418 
1419   bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1420 				 BIND_EXPR_BLOCK (bind_expr));
1421   gimple_push_bind_expr (bind_stmt);
1422 
1423   gimplify_ctxp->keep_stack = false;
1424   gimplify_ctxp->save_stack = false;
1425 
1426   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1427   body = NULL;
1428   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1429   gimple_bind_set_body (bind_stmt, body);
1430 
1431   /* Source location wise, the cleanup code (stack_restore and clobbers)
1432      belongs to the end of the block, so propagate what we have.  The
1433      stack_save operation belongs to the beginning of block, which we can
1434      infer from the bind_expr directly if the block has no explicit
1435      assignment.  */
1436   if (BIND_EXPR_BLOCK (bind_expr))
1437     {
1438       end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1439       start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1440     }
1441   if (start_locus == 0)
1442     start_locus = EXPR_LOCATION (bind_expr);
1443 
1444   cleanup = NULL;
1445   stack_save = NULL;
1446 
1447   /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1448      the stack space allocated to the VLAs.  */
1449   if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1450     {
1451       gcall *stack_restore;
1452 
1453       /* Save stack on entry and restore it on exit.  Add a try_finally
1454 	 block to achieve this.  */
1455       build_stack_save_restore (&stack_save, &stack_restore);
1456 
1457       gimple_set_location (stack_save, start_locus);
1458       gimple_set_location (stack_restore, end_locus);
1459 
1460       gimplify_seq_add_stmt (&cleanup, stack_restore);
1461     }
1462 
1463   /* Add clobbers for all variables that go out of scope.  */
1464   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1465     {
1466       if (VAR_P (t)
1467 	  && !is_global_var (t)
1468 	  && DECL_CONTEXT (t) == current_function_decl)
1469 	{
1470 	  if (!DECL_HARD_REGISTER (t)
1471 	      && !TREE_THIS_VOLATILE (t)
1472 	      && !DECL_HAS_VALUE_EXPR_P (t)
1473 	      /* Only care for variables that have to be in memory.  Others
1474 		 will be rewritten into SSA names, hence moved to the
1475 		 top-level.  */
1476 	      && !is_gimple_reg (t)
1477 	      && flag_stack_reuse != SR_NONE)
1478 	    {
1479 	      tree clobber = build_clobber (TREE_TYPE (t));
1480 	      gimple *clobber_stmt;
1481 	      clobber_stmt = gimple_build_assign (t, clobber);
1482 	      gimple_set_location (clobber_stmt, end_locus);
1483 	      gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1484 	    }
1485 
1486 	  if (flag_openacc && oacc_declare_returns != NULL)
1487 	    {
1488 	      tree *c = oacc_declare_returns->get (t);
1489 	      if (c != NULL)
1490 		{
1491 		  if (ret_clauses)
1492 		    OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1493 
1494 		  ret_clauses = *c;
1495 
1496 		  oacc_declare_returns->remove (t);
1497 
1498 		  if (oacc_declare_returns->is_empty ())
1499 		    {
1500 		      delete oacc_declare_returns;
1501 		      oacc_declare_returns = NULL;
1502 		    }
1503 		}
1504 	    }
1505 	}
1506 
1507       if (asan_poisoned_variables != NULL
1508 	  && asan_poisoned_variables->contains (t))
1509 	{
1510 	  asan_poisoned_variables->remove (t);
1511 	  asan_poison_variable (t, true, &cleanup);
1512 	}
1513 
1514       if (gimplify_ctxp->live_switch_vars != NULL
1515 	  && gimplify_ctxp->live_switch_vars->contains (t))
1516 	gimplify_ctxp->live_switch_vars->remove (t);
1517     }
1518 
1519   if (ret_clauses)
1520     {
1521       gomp_target *stmt;
1522       gimple_stmt_iterator si = gsi_start (cleanup);
1523 
1524       stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1525 				      ret_clauses);
1526       gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1527     }
1528 
1529   if (cleanup)
1530     {
1531       gtry *gs;
1532       gimple_seq new_body;
1533 
1534       new_body = NULL;
1535       gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1536 	  		     GIMPLE_TRY_FINALLY);
1537 
1538       if (stack_save)
1539 	gimplify_seq_add_stmt (&new_body, stack_save);
1540       gimplify_seq_add_stmt (&new_body, gs);
1541       gimple_bind_set_body (bind_stmt, new_body);
1542     }
1543 
1544   /* keep_stack propagates all the way up to the outermost BIND_EXPR.  */
1545   if (!gimplify_ctxp->keep_stack)
1546     gimplify_ctxp->keep_stack = old_keep_stack;
1547   gimplify_ctxp->save_stack = old_save_stack;
1548 
1549   gimple_pop_bind_expr ();
1550 
1551   gimplify_seq_add_stmt (pre_p, bind_stmt);
1552 
1553   if (temp)
1554     {
1555       *expr_p = temp;
1556       return GS_OK;
1557     }
1558 
1559   *expr_p = NULL_TREE;
1560   return GS_ALL_DONE;
1561 }
1562 
1563 /* Maybe add early return predict statement to PRE_P sequence.  */
1564 
1565 static void
maybe_add_early_return_predict_stmt(gimple_seq * pre_p)1566 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1567 {
1568   /* If we are not in a conditional context, add PREDICT statement.  */
1569   if (gimple_conditional_context ())
1570     {
1571       gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1572 					      NOT_TAKEN);
1573       gimplify_seq_add_stmt (pre_p, predict);
1574     }
1575 }
1576 
1577 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1578    GIMPLE value, it is assigned to a new temporary and the statement is
1579    re-written to return the temporary.
1580 
1581    PRE_P points to the sequence where side effects that must happen before
1582    STMT should be stored.  */
1583 
1584 static enum gimplify_status
gimplify_return_expr(tree stmt,gimple_seq * pre_p)1585 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1586 {
1587   greturn *ret;
1588   tree ret_expr = TREE_OPERAND (stmt, 0);
1589   tree result_decl, result;
1590 
1591   if (ret_expr == error_mark_node)
1592     return GS_ERROR;
1593 
1594   if (!ret_expr
1595       || TREE_CODE (ret_expr) == RESULT_DECL)
1596     {
1597       maybe_add_early_return_predict_stmt (pre_p);
1598       greturn *ret = gimple_build_return (ret_expr);
1599       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1600       gimplify_seq_add_stmt (pre_p, ret);
1601       return GS_ALL_DONE;
1602     }
1603 
1604   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1605     result_decl = NULL_TREE;
1606   else if (TREE_CODE (ret_expr) == COMPOUND_EXPR)
1607     {
1608       /* Used in C++ for handling EH cleanup of the return value if a local
1609 	 cleanup throws.  Assume the front-end knows what it's doing.  */
1610       result_decl = DECL_RESULT (current_function_decl);
1611       /* But crash if we end up trying to modify ret_expr below.  */
1612       ret_expr = NULL_TREE;
1613     }
1614   else
1615     {
1616       result_decl = TREE_OPERAND (ret_expr, 0);
1617 
1618       /* See through a return by reference.  */
1619       if (TREE_CODE (result_decl) == INDIRECT_REF)
1620 	result_decl = TREE_OPERAND (result_decl, 0);
1621 
1622       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1623 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1624 		  && TREE_CODE (result_decl) == RESULT_DECL);
1625     }
1626 
1627   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1628      Recall that aggregate_value_p is FALSE for any aggregate type that is
1629      returned in registers.  If we're returning values in registers, then
1630      we don't want to extend the lifetime of the RESULT_DECL, particularly
1631      across another call.  In addition, for those aggregates for which
1632      hard_function_value generates a PARALLEL, we'll die during normal
1633      expansion of structure assignments; there's special code in expand_return
1634      to handle this case that does not exist in expand_expr.  */
1635   if (!result_decl)
1636     result = NULL_TREE;
1637   else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1638     {
1639       if (!poly_int_tree_p (DECL_SIZE (result_decl)))
1640 	{
1641 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1642 	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1643 	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1644 	     should be effectively allocated by the caller, i.e. all calls to
1645 	     this function must be subject to the Return Slot Optimization.  */
1646 	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1647 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1648 	}
1649       result = result_decl;
1650     }
1651   else if (gimplify_ctxp->return_temp)
1652     result = gimplify_ctxp->return_temp;
1653   else
1654     {
1655       result = create_tmp_reg (TREE_TYPE (result_decl));
1656 
1657       /* ??? With complex control flow (usually involving abnormal edges),
1658 	 we can wind up warning about an uninitialized value for this.  Due
1659 	 to how this variable is constructed and initialized, this is never
1660 	 true.  Give up and never warn.  */
1661       TREE_NO_WARNING (result) = 1;
1662 
1663       gimplify_ctxp->return_temp = result;
1664     }
1665 
1666   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1667      Then gimplify the whole thing.  */
1668   if (result != result_decl)
1669     TREE_OPERAND (ret_expr, 0) = result;
1670 
1671   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1672 
1673   maybe_add_early_return_predict_stmt (pre_p);
1674   ret = gimple_build_return (result);
1675   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1676   gimplify_seq_add_stmt (pre_p, ret);
1677 
1678   return GS_ALL_DONE;
1679 }
1680 
1681 /* Gimplify a variable-length array DECL.  */
1682 
1683 static void
gimplify_vla_decl(tree decl,gimple_seq * seq_p)1684 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1685 {
1686   /* This is a variable-sized decl.  Simplify its size and mark it
1687      for deferred expansion.  */
1688   tree t, addr, ptr_type;
1689 
1690   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1691   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1692 
1693   /* Don't mess with a DECL_VALUE_EXPR set by the front-end.  */
1694   if (DECL_HAS_VALUE_EXPR_P (decl))
1695     return;
1696 
1697   /* All occurrences of this decl in final gimplified code will be
1698      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1699      things: First, it lets the rest of the gimplifier know what
1700      replacement to use.  Second, it lets the debug info know
1701      where to find the value.  */
1702   ptr_type = build_pointer_type (TREE_TYPE (decl));
1703   addr = create_tmp_var (ptr_type, get_name (decl));
1704   DECL_IGNORED_P (addr) = 0;
1705   t = build_fold_indirect_ref (addr);
1706   TREE_THIS_NOTRAP (t) = 1;
1707   SET_DECL_VALUE_EXPR (decl, t);
1708   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1709 
1710   t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1711 			      max_int_size_in_bytes (TREE_TYPE (decl)));
1712   /* The call has been built for a variable-sized object.  */
1713   CALL_ALLOCA_FOR_VAR_P (t) = 1;
1714   t = fold_convert (ptr_type, t);
1715   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1716 
1717   gimplify_and_add (t, seq_p);
1718 
1719   /* Record the dynamic allocation associated with DECL if requested.  */
1720   if (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC)
1721     record_dynamic_alloc (decl);
1722 }
1723 
1724 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1725    as being forced.  To be called for DECL_INITIAL of static variables.  */
1726 
1727 static tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1728 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1729 {
1730   if (TYPE_P (*tp))
1731     *walk_subtrees = 0;
1732   if (TREE_CODE (*tp) == LABEL_DECL)
1733     {
1734       FORCED_LABEL (*tp) = 1;
1735       cfun->has_forced_label_in_static = 1;
1736     }
1737 
1738   return NULL_TREE;
1739 }
1740 
1741 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1742    and initialization explicit.  */
1743 
1744 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p,gimple_seq * seq_p)1745 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1746 {
1747   tree stmt = *stmt_p;
1748   tree decl = DECL_EXPR_DECL (stmt);
1749 
1750   *stmt_p = NULL_TREE;
1751 
1752   if (TREE_TYPE (decl) == error_mark_node)
1753     return GS_ERROR;
1754 
1755   if ((TREE_CODE (decl) == TYPE_DECL
1756        || VAR_P (decl))
1757       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1758     {
1759       gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1760       if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1761 	gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1762     }
1763 
1764   /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1765      in case its size expressions contain problematic nodes like CALL_EXPR.  */
1766   if (TREE_CODE (decl) == TYPE_DECL
1767       && DECL_ORIGINAL_TYPE (decl)
1768       && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1769     {
1770       gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1771       if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1772 	gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1773     }
1774 
1775   if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1776     {
1777       tree init = DECL_INITIAL (decl);
1778       bool is_vla = false;
1779 
1780       poly_uint64 size;
1781       if (!poly_int_tree_p (DECL_SIZE_UNIT (decl), &size)
1782 	  || (!TREE_STATIC (decl)
1783 	      && flag_stack_check == GENERIC_STACK_CHECK
1784 	      && maybe_gt (size,
1785 			   (unsigned HOST_WIDE_INT) STACK_CHECK_MAX_VAR_SIZE)))
1786 	{
1787 	  gimplify_vla_decl (decl, seq_p);
1788 	  is_vla = true;
1789 	}
1790 
1791       if (asan_poisoned_variables
1792 	  && !is_vla
1793 	  && TREE_ADDRESSABLE (decl)
1794 	  && !TREE_STATIC (decl)
1795 	  && !DECL_HAS_VALUE_EXPR_P (decl)
1796 	  && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1797 	  && dbg_cnt (asan_use_after_scope)
1798 	  && !gimplify_omp_ctxp)
1799 	{
1800 	  asan_poisoned_variables->add (decl);
1801 	  asan_poison_variable (decl, false, seq_p);
1802 	  if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1803 	    gimplify_ctxp->live_switch_vars->add (decl);
1804 	}
1805 
1806       /* Some front ends do not explicitly declare all anonymous
1807 	 artificial variables.  We compensate here by declaring the
1808 	 variables, though it would be better if the front ends would
1809 	 explicitly declare them.  */
1810       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1811 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1812 	gimple_add_tmp_var (decl);
1813 
1814       if (init && init != error_mark_node)
1815 	{
1816 	  if (!TREE_STATIC (decl))
1817 	    {
1818 	      DECL_INITIAL (decl) = NULL_TREE;
1819 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1820 	      gimplify_and_add (init, seq_p);
1821 	      ggc_free (init);
1822 	    }
1823 	  else
1824 	    /* We must still examine initializers for static variables
1825 	       as they may contain a label address.  */
1826 	    walk_tree (&init, force_labels_r, NULL, NULL);
1827 	}
1828     }
1829 
1830   return GS_ALL_DONE;
1831 }
1832 
1833 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1834    and replacing the LOOP_EXPR with goto, but if the loop contains an
1835    EXIT_EXPR, we need to append a label for it to jump to.  */
1836 
1837 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,gimple_seq * pre_p)1838 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1839 {
1840   tree saved_label = gimplify_ctxp->exit_label;
1841   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1842 
1843   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1844 
1845   gimplify_ctxp->exit_label = NULL_TREE;
1846 
1847   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1848 
1849   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1850 
1851   if (gimplify_ctxp->exit_label)
1852     gimplify_seq_add_stmt (pre_p,
1853 			   gimple_build_label (gimplify_ctxp->exit_label));
1854 
1855   gimplify_ctxp->exit_label = saved_label;
1856 
1857   *expr_p = NULL;
1858   return GS_ALL_DONE;
1859 }
1860 
1861 /* Gimplify a statement list onto a sequence.  These may be created either
1862    by an enlightened front-end, or by shortcut_cond_expr.  */
1863 
1864 static enum gimplify_status
gimplify_statement_list(tree * expr_p,gimple_seq * pre_p)1865 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1866 {
1867   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1868 
1869   tree_stmt_iterator i = tsi_start (*expr_p);
1870 
1871   while (!tsi_end_p (i))
1872     {
1873       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1874       tsi_delink (&i);
1875     }
1876 
1877   if (temp)
1878     {
1879       *expr_p = temp;
1880       return GS_OK;
1881     }
1882 
1883   return GS_ALL_DONE;
1884 }
1885 
1886 /* Callback for walk_gimple_seq.  */
1887 
1888 static tree
warn_switch_unreachable_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1889 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1890 			   struct walk_stmt_info *wi)
1891 {
1892   gimple *stmt = gsi_stmt (*gsi_p);
1893 
1894   *handled_ops_p = true;
1895   switch (gimple_code (stmt))
1896     {
1897     case GIMPLE_TRY:
1898       /* A compiler-generated cleanup or a user-written try block.
1899 	 If it's empty, don't dive into it--that would result in
1900 	 worse location info.  */
1901       if (gimple_try_eval (stmt) == NULL)
1902 	{
1903 	  wi->info = stmt;
1904 	  return integer_zero_node;
1905 	}
1906       /* Fall through.  */
1907     case GIMPLE_BIND:
1908     case GIMPLE_CATCH:
1909     case GIMPLE_EH_FILTER:
1910     case GIMPLE_TRANSACTION:
1911       /* Walk the sub-statements.  */
1912       *handled_ops_p = false;
1913       break;
1914 
1915     case GIMPLE_DEBUG:
1916       /* Ignore these.  We may generate them before declarations that
1917 	 are never executed.  If there's something to warn about,
1918 	 there will be non-debug stmts too, and we'll catch those.  */
1919       break;
1920 
1921     case GIMPLE_CALL:
1922       if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1923 	{
1924 	  *handled_ops_p = false;
1925 	  break;
1926 	}
1927       /* Fall through.  */
1928     default:
1929       /* Save the first "real" statement (not a decl/lexical scope/...).  */
1930       wi->info = stmt;
1931       return integer_zero_node;
1932     }
1933   return NULL_TREE;
1934 }
1935 
1936 /* Possibly warn about unreachable statements between switch's controlling
1937    expression and the first case.  SEQ is the body of a switch expression.  */
1938 
1939 static void
maybe_warn_switch_unreachable(gimple_seq seq)1940 maybe_warn_switch_unreachable (gimple_seq seq)
1941 {
1942   if (!warn_switch_unreachable
1943       /* This warning doesn't play well with Fortran when optimizations
1944 	 are on.  */
1945       || lang_GNU_Fortran ()
1946       || seq == NULL)
1947     return;
1948 
1949   struct walk_stmt_info wi;
1950   memset (&wi, 0, sizeof (wi));
1951   walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1952   gimple *stmt = (gimple *) wi.info;
1953 
1954   if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1955     {
1956       if (gimple_code (stmt) == GIMPLE_GOTO
1957 	  && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1958 	  && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1959 	/* Don't warn for compiler-generated gotos.  These occur
1960 	   in Duff's devices, for example.  */;
1961       else
1962 	warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1963 		    "statement will never be executed");
1964     }
1965 }
1966 
1967 
1968 /* A label entry that pairs label and a location.  */
1969 struct label_entry
1970 {
1971   tree label;
1972   location_t loc;
1973 };
1974 
1975 /* Find LABEL in vector of label entries VEC.  */
1976 
1977 static struct label_entry *
find_label_entry(const auto_vec<struct label_entry> * vec,tree label)1978 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1979 {
1980   unsigned int i;
1981   struct label_entry *l;
1982 
1983   FOR_EACH_VEC_ELT (*vec, i, l)
1984     if (l->label == label)
1985       return l;
1986   return NULL;
1987 }
1988 
1989 /* Return true if LABEL, a LABEL_DECL, represents a case label
1990    in a vector of labels CASES.  */
1991 
1992 static bool
case_label_p(const vec<tree> * cases,tree label)1993 case_label_p (const vec<tree> *cases, tree label)
1994 {
1995   unsigned int i;
1996   tree l;
1997 
1998   FOR_EACH_VEC_ELT (*cases, i, l)
1999     if (CASE_LABEL (l) == label)
2000       return true;
2001   return false;
2002 }
2003 
2004 /* Find the last nondebug statement in a scope STMT.  */
2005 
2006 static gimple *
last_stmt_in_scope(gimple * stmt)2007 last_stmt_in_scope (gimple *stmt)
2008 {
2009   if (!stmt)
2010     return NULL;
2011 
2012   switch (gimple_code (stmt))
2013     {
2014     case GIMPLE_BIND:
2015       {
2016 	gbind *bind = as_a <gbind *> (stmt);
2017 	stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
2018 	return last_stmt_in_scope (stmt);
2019       }
2020 
2021     case GIMPLE_TRY:
2022       {
2023 	gtry *try_stmt = as_a <gtry *> (stmt);
2024 	stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
2025 	gimple *last_eval = last_stmt_in_scope (stmt);
2026 	if (gimple_stmt_may_fallthru (last_eval)
2027 	    && (last_eval == NULL
2028 		|| !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
2029 	    && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
2030 	  {
2031 	    stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
2032 	    return last_stmt_in_scope (stmt);
2033 	  }
2034 	else
2035 	  return last_eval;
2036       }
2037 
2038     case GIMPLE_DEBUG:
2039       gcc_unreachable ();
2040 
2041     default:
2042       return stmt;
2043     }
2044 }
2045 
2046 /* Collect interesting labels in LABELS and return the statement preceding
2047    another case label, or a user-defined label.  Store a location useful
2048    to give warnings at *PREVLOC (usually the location of the returned
2049    statement or of its surrounding scope).  */
2050 
2051 static gimple *
collect_fallthrough_labels(gimple_stmt_iterator * gsi_p,auto_vec<struct label_entry> * labels,location_t * prevloc)2052 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
2053 			    auto_vec <struct label_entry> *labels,
2054 			    location_t *prevloc)
2055 {
2056   gimple *prev = NULL;
2057 
2058   *prevloc = UNKNOWN_LOCATION;
2059   do
2060     {
2061       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
2062 	{
2063 	  /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
2064 	     which starts on a GIMPLE_SWITCH and ends with a break label.
2065 	     Handle that as a single statement that can fall through.  */
2066 	  gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
2067 	  gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
2068 	  gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
2069 	  if (last
2070 	      && gimple_code (first) == GIMPLE_SWITCH
2071 	      && gimple_code (last) == GIMPLE_LABEL)
2072 	    {
2073 	      tree label = gimple_label_label (as_a <glabel *> (last));
2074 	      if (SWITCH_BREAK_LABEL_P (label))
2075 		{
2076 		  prev = bind;
2077 		  gsi_next (gsi_p);
2078 		  continue;
2079 		}
2080 	    }
2081 	}
2082       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
2083 	  || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
2084 	{
2085 	  /* Nested scope.  Only look at the last statement of
2086 	     the innermost scope.  */
2087 	  location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
2088 	  gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
2089 	  if (last)
2090 	    {
2091 	      prev = last;
2092 	      /* It might be a label without a location.  Use the
2093 		 location of the scope then.  */
2094 	      if (!gimple_has_location (prev))
2095 		*prevloc = bind_loc;
2096 	    }
2097 	  gsi_next (gsi_p);
2098 	  continue;
2099 	}
2100 
2101       /* Ifs are tricky.  */
2102       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
2103 	{
2104 	  gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
2105 	  tree false_lab = gimple_cond_false_label (cond_stmt);
2106 	  location_t if_loc = gimple_location (cond_stmt);
2107 
2108 	  /* If we have e.g.
2109 	       if (i > 1) goto <D.2259>; else goto D;
2110 	     we can't do much with the else-branch.  */
2111 	  if (!DECL_ARTIFICIAL (false_lab))
2112 	    break;
2113 
2114 	  /* Go on until the false label, then one step back.  */
2115 	  for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2116 	    {
2117 	      gimple *stmt = gsi_stmt (*gsi_p);
2118 	      if (gimple_code (stmt) == GIMPLE_LABEL
2119 		  && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2120 		break;
2121 	    }
2122 
2123 	  /* Not found?  Oops.  */
2124 	  if (gsi_end_p (*gsi_p))
2125 	    break;
2126 
2127 	  struct label_entry l = { false_lab, if_loc };
2128 	  labels->safe_push (l);
2129 
2130 	  /* Go to the last statement of the then branch.  */
2131 	  gsi_prev (gsi_p);
2132 
2133 	  /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2134 	     <D.1759>:
2135 	     <stmt>;
2136 	     goto <D.1761>;
2137 	     <D.1760>:
2138 	   */
2139 	  if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2140 	      && !gimple_has_location (gsi_stmt (*gsi_p)))
2141 	    {
2142 	      /* Look at the statement before, it might be
2143 		 attribute fallthrough, in which case don't warn.  */
2144 	      gsi_prev (gsi_p);
2145 	      bool fallthru_before_dest
2146 		= gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2147 	      gsi_next (gsi_p);
2148 	      tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2149 	      if (!fallthru_before_dest)
2150 		{
2151 		  struct label_entry l = { goto_dest, if_loc };
2152 		  labels->safe_push (l);
2153 		}
2154 	    }
2155 	  /* And move back.  */
2156 	  gsi_next (gsi_p);
2157 	}
2158 
2159       /* Remember the last statement.  Skip labels that are of no interest
2160 	 to us.  */
2161       if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2162 	{
2163 	  tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2164 	  if (find_label_entry (labels, label))
2165 	    prev = gsi_stmt (*gsi_p);
2166 	}
2167       else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2168 	;
2169       else if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_PREDICT)
2170 	;
2171       else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2172 	prev = gsi_stmt (*gsi_p);
2173       gsi_next (gsi_p);
2174     }
2175   while (!gsi_end_p (*gsi_p)
2176 	 /* Stop if we find a case or a user-defined label.  */
2177 	 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2178 	     || !gimple_has_location (gsi_stmt (*gsi_p))));
2179 
2180   if (prev && gimple_has_location (prev))
2181     *prevloc = gimple_location (prev);
2182   return prev;
2183 }
2184 
2185 /* Return true if the switch fallthough warning should occur.  LABEL is
2186    the label statement that we're falling through to.  */
2187 
2188 static bool
should_warn_for_implicit_fallthrough(gimple_stmt_iterator * gsi_p,tree label)2189 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2190 {
2191   gimple_stmt_iterator gsi = *gsi_p;
2192 
2193   /* Don't warn if the label is marked with a "falls through" comment.  */
2194   if (FALLTHROUGH_LABEL_P (label))
2195     return false;
2196 
2197   /* Don't warn for non-case labels followed by a statement:
2198        case 0:
2199 	 foo ();
2200        label:
2201 	 bar ();
2202      as these are likely intentional.  */
2203   if (!case_label_p (&gimplify_ctxp->case_labels, label))
2204     {
2205       tree l;
2206       while (!gsi_end_p (gsi)
2207 	     && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2208 	     && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2209 	     && !case_label_p (&gimplify_ctxp->case_labels, l))
2210 	gsi_next_nondebug (&gsi);
2211       if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2212 	return false;
2213     }
2214 
2215   /* Don't warn for terminated branches, i.e. when the subsequent case labels
2216      immediately breaks.  */
2217   gsi = *gsi_p;
2218 
2219   /* Skip all immediately following labels.  */
2220   while (!gsi_end_p (gsi)
2221 	 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2222 	     || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2223     gsi_next_nondebug (&gsi);
2224 
2225   /* { ... something; default:; } */
2226   if (gsi_end_p (gsi)
2227       /* { ... something; default: break; } or
2228 	 { ... something; default: goto L; } */
2229       || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2230       /* { ... something; default: return; } */
2231       || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2232     return false;
2233 
2234   return true;
2235 }
2236 
2237 /* Callback for walk_gimple_seq.  */
2238 
2239 static tree
warn_implicit_fallthrough_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info *)2240 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2241 			     struct walk_stmt_info *)
2242 {
2243   gimple *stmt = gsi_stmt (*gsi_p);
2244 
2245   *handled_ops_p = true;
2246   switch (gimple_code (stmt))
2247     {
2248     case GIMPLE_TRY:
2249     case GIMPLE_BIND:
2250     case GIMPLE_CATCH:
2251     case GIMPLE_EH_FILTER:
2252     case GIMPLE_TRANSACTION:
2253       /* Walk the sub-statements.  */
2254       *handled_ops_p = false;
2255       break;
2256 
2257     /* Find a sequence of form:
2258 
2259        GIMPLE_LABEL
2260        [...]
2261        <may fallthru stmt>
2262        GIMPLE_LABEL
2263 
2264        and possibly warn.  */
2265     case GIMPLE_LABEL:
2266       {
2267 	/* Found a label.  Skip all immediately following labels.  */
2268 	while (!gsi_end_p (*gsi_p)
2269 	       && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2270 	  gsi_next_nondebug (gsi_p);
2271 
2272 	/* There might be no more statements.  */
2273 	if (gsi_end_p (*gsi_p))
2274 	  return integer_zero_node;
2275 
2276 	/* Vector of labels that fall through.  */
2277 	auto_vec <struct label_entry> labels;
2278 	location_t prevloc;
2279 	gimple *prev = collect_fallthrough_labels (gsi_p, &labels, &prevloc);
2280 
2281 	/* There might be no more statements.  */
2282 	if (gsi_end_p (*gsi_p))
2283 	  return integer_zero_node;
2284 
2285 	gimple *next = gsi_stmt (*gsi_p);
2286 	tree label;
2287 	/* If what follows is a label, then we may have a fallthrough.  */
2288 	if (gimple_code (next) == GIMPLE_LABEL
2289 	    && gimple_has_location (next)
2290 	    && (label = gimple_label_label (as_a <glabel *> (next)))
2291 	    && prev != NULL)
2292 	  {
2293 	    struct label_entry *l;
2294 	    bool warned_p = false;
2295 	    auto_diagnostic_group d;
2296 	    if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2297 	      /* Quiet.  */;
2298 	    else if (gimple_code (prev) == GIMPLE_LABEL
2299 		     && (label = gimple_label_label (as_a <glabel *> (prev)))
2300 		     && (l = find_label_entry (&labels, label)))
2301 	      warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2302 				     "this statement may fall through");
2303 	    else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2304 		     /* Try to be clever and don't warn when the statement
2305 			can't actually fall through.  */
2306 		     && gimple_stmt_may_fallthru (prev)
2307 		     && prevloc != UNKNOWN_LOCATION)
2308 	      warned_p = warning_at (prevloc,
2309 				     OPT_Wimplicit_fallthrough_,
2310 				     "this statement may fall through");
2311 	    if (warned_p)
2312 	      inform (gimple_location (next), "here");
2313 
2314 	    /* Mark this label as processed so as to prevent multiple
2315 	       warnings in nested switches.  */
2316 	    FALLTHROUGH_LABEL_P (label) = true;
2317 
2318 	    /* So that next warn_implicit_fallthrough_r will start looking for
2319 	       a new sequence starting with this label.  */
2320 	    gsi_prev (gsi_p);
2321 	  }
2322       }
2323       break;
2324    default:
2325       break;
2326     }
2327   return NULL_TREE;
2328 }
2329 
2330 /* Warn when a switch case falls through.  */
2331 
2332 static void
maybe_warn_implicit_fallthrough(gimple_seq seq)2333 maybe_warn_implicit_fallthrough (gimple_seq seq)
2334 {
2335   if (!warn_implicit_fallthrough)
2336     return;
2337 
2338   /* This warning is meant for C/C++/ObjC/ObjC++ only.  */
2339   if (!(lang_GNU_C ()
2340 	|| lang_GNU_CXX ()
2341 	|| lang_GNU_OBJC ()))
2342     return;
2343 
2344   struct walk_stmt_info wi;
2345   memset (&wi, 0, sizeof (wi));
2346   walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2347 }
2348 
2349 /* Callback for walk_gimple_seq.  */
2350 
2351 static tree
expand_FALLTHROUGH_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)2352 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2353 		      struct walk_stmt_info *wi)
2354 {
2355   gimple *stmt = gsi_stmt (*gsi_p);
2356 
2357   *handled_ops_p = true;
2358   switch (gimple_code (stmt))
2359     {
2360     case GIMPLE_TRY:
2361     case GIMPLE_BIND:
2362     case GIMPLE_CATCH:
2363     case GIMPLE_EH_FILTER:
2364     case GIMPLE_TRANSACTION:
2365       /* Walk the sub-statements.  */
2366       *handled_ops_p = false;
2367       break;
2368     case GIMPLE_CALL:
2369       if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2370 	{
2371 	  gsi_remove (gsi_p, true);
2372 	  if (gsi_end_p (*gsi_p))
2373 	    {
2374 	      *static_cast<location_t *>(wi->info) = gimple_location (stmt);
2375 	      return integer_zero_node;
2376 	    }
2377 
2378 	  bool found = false;
2379 	  location_t loc = gimple_location (stmt);
2380 
2381 	  gimple_stmt_iterator gsi2 = *gsi_p;
2382 	  stmt = gsi_stmt (gsi2);
2383 	  if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2384 	    {
2385 	      /* Go on until the artificial label.  */
2386 	      tree goto_dest = gimple_goto_dest (stmt);
2387 	      for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2388 		{
2389 		  if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2390 		      && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2391 			   == goto_dest)
2392 		    break;
2393 		}
2394 
2395 	      /* Not found?  Stop.  */
2396 	      if (gsi_end_p (gsi2))
2397 		break;
2398 
2399 	      /* Look one past it.  */
2400 	      gsi_next (&gsi2);
2401 	    }
2402 
2403 	  /* We're looking for a case label or default label here.  */
2404 	  while (!gsi_end_p (gsi2))
2405 	    {
2406 	      stmt = gsi_stmt (gsi2);
2407 	      if (gimple_code (stmt) == GIMPLE_LABEL)
2408 		{
2409 		  tree label = gimple_label_label (as_a <glabel *> (stmt));
2410 		  if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2411 		    {
2412 		      found = true;
2413 		      break;
2414 		    }
2415 		}
2416 	      else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2417 		;
2418 	      else if (!is_gimple_debug (stmt))
2419 		/* Anything else is not expected.  */
2420 		break;
2421 	      gsi_next (&gsi2);
2422 	    }
2423 	  if (!found)
2424 	    pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2425 		     "a case label or default label");
2426 	}
2427       break;
2428     default:
2429       break;
2430     }
2431   return NULL_TREE;
2432 }
2433 
2434 /* Expand all FALLTHROUGH () calls in SEQ.  */
2435 
2436 static void
expand_FALLTHROUGH(gimple_seq * seq_p)2437 expand_FALLTHROUGH (gimple_seq *seq_p)
2438 {
2439   struct walk_stmt_info wi;
2440   location_t loc;
2441   memset (&wi, 0, sizeof (wi));
2442   wi.info = (void *) &loc;
2443   walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2444   if (wi.callback_result == integer_zero_node)
2445     /* We've found [[fallthrough]]; at the end of a switch, which the C++
2446        standard says is ill-formed; see [dcl.attr.fallthrough].  */
2447     pedwarn (loc, 0, "attribute %<fallthrough%> not preceding "
2448 	     "a case label or default label");
2449 }
2450 
2451 
2452 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2453    branch to.  */
2454 
2455 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,gimple_seq * pre_p)2456 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2457 {
2458   tree switch_expr = *expr_p;
2459   gimple_seq switch_body_seq = NULL;
2460   enum gimplify_status ret;
2461   tree index_type = TREE_TYPE (switch_expr);
2462   if (index_type == NULL_TREE)
2463     index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2464 
2465   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2466                        fb_rvalue);
2467   if (ret == GS_ERROR || ret == GS_UNHANDLED)
2468     return ret;
2469 
2470   if (SWITCH_BODY (switch_expr))
2471     {
2472       vec<tree> labels;
2473       vec<tree> saved_labels;
2474       hash_set<tree> *saved_live_switch_vars = NULL;
2475       tree default_case = NULL_TREE;
2476       gswitch *switch_stmt;
2477 
2478       /* Save old labels, get new ones from body, then restore the old
2479          labels.  Save all the things from the switch body to append after.  */
2480       saved_labels = gimplify_ctxp->case_labels;
2481       gimplify_ctxp->case_labels.create (8);
2482 
2483       /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR.  */
2484       saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2485       tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2486       if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2487 	gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2488       else
2489 	gimplify_ctxp->live_switch_vars = NULL;
2490 
2491       bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2492       gimplify_ctxp->in_switch_expr = true;
2493 
2494       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2495 
2496       gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2497       maybe_warn_switch_unreachable (switch_body_seq);
2498       maybe_warn_implicit_fallthrough (switch_body_seq);
2499       /* Only do this for the outermost GIMPLE_SWITCH.  */
2500       if (!gimplify_ctxp->in_switch_expr)
2501 	expand_FALLTHROUGH (&switch_body_seq);
2502 
2503       labels = gimplify_ctxp->case_labels;
2504       gimplify_ctxp->case_labels = saved_labels;
2505 
2506       if (gimplify_ctxp->live_switch_vars)
2507 	{
2508 	  gcc_assert (gimplify_ctxp->live_switch_vars->is_empty ());
2509 	  delete gimplify_ctxp->live_switch_vars;
2510 	}
2511       gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2512 
2513       preprocess_case_label_vec_for_gimple (labels, index_type,
2514 					    &default_case);
2515 
2516       bool add_bind = false;
2517       if (!default_case)
2518 	{
2519 	  glabel *new_default;
2520 
2521 	  default_case
2522 	    = build_case_label (NULL_TREE, NULL_TREE,
2523 				create_artificial_label (UNKNOWN_LOCATION));
2524 	  if (old_in_switch_expr)
2525 	    {
2526 	      SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2527 	      add_bind = true;
2528 	    }
2529 	  new_default = gimple_build_label (CASE_LABEL (default_case));
2530 	  gimplify_seq_add_stmt (&switch_body_seq, new_default);
2531 	}
2532       else if (old_in_switch_expr)
2533 	{
2534 	  gimple *last = gimple_seq_last_stmt (switch_body_seq);
2535 	  if (last && gimple_code (last) == GIMPLE_LABEL)
2536 	    {
2537 	      tree label = gimple_label_label (as_a <glabel *> (last));
2538 	      if (SWITCH_BREAK_LABEL_P (label))
2539 		add_bind = true;
2540 	    }
2541 	}
2542 
2543       switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2544 					 default_case, labels);
2545       /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2546 	 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2547 	 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2548 	 so that we can easily find the start and end of the switch
2549 	 statement.  */
2550       if (add_bind)
2551 	{
2552 	  gimple_seq bind_body = NULL;
2553 	  gimplify_seq_add_stmt (&bind_body, switch_stmt);
2554 	  gimple_seq_add_seq (&bind_body, switch_body_seq);
2555 	  gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2556 	  gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2557 	  gimplify_seq_add_stmt (pre_p, bind);
2558 	}
2559       else
2560 	{
2561 	  gimplify_seq_add_stmt (pre_p, switch_stmt);
2562 	  gimplify_seq_add_seq (pre_p, switch_body_seq);
2563 	}
2564       labels.release ();
2565     }
2566   else
2567     gcc_unreachable ();
2568 
2569   return GS_ALL_DONE;
2570 }
2571 
2572 /* Gimplify the LABEL_EXPR pointed to by EXPR_P.  */
2573 
2574 static enum gimplify_status
gimplify_label_expr(tree * expr_p,gimple_seq * pre_p)2575 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2576 {
2577   gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2578 	      == current_function_decl);
2579 
2580   tree label = LABEL_EXPR_LABEL (*expr_p);
2581   glabel *label_stmt = gimple_build_label (label);
2582   gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2583   gimplify_seq_add_stmt (pre_p, label_stmt);
2584 
2585   if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2586     gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2587 						      NOT_TAKEN));
2588   else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2589     gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2590 						      TAKEN));
2591 
2592   return GS_ALL_DONE;
2593 }
2594 
2595 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
2596 
2597 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p,gimple_seq * pre_p)2598 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2599 {
2600   struct gimplify_ctx *ctxp;
2601   glabel *label_stmt;
2602 
2603   /* Invalid programs can play Duff's Device type games with, for example,
2604      #pragma omp parallel.  At least in the C front end, we don't
2605      detect such invalid branches until after gimplification, in the
2606      diagnose_omp_blocks pass.  */
2607   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2608     if (ctxp->case_labels.exists ())
2609       break;
2610 
2611   tree label = CASE_LABEL (*expr_p);
2612   label_stmt = gimple_build_label (label);
2613   gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2614   ctxp->case_labels.safe_push (*expr_p);
2615   gimplify_seq_add_stmt (pre_p, label_stmt);
2616 
2617   if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2618     gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2619 						      NOT_TAKEN));
2620   else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2621     gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2622 						      TAKEN));
2623 
2624   return GS_ALL_DONE;
2625 }
2626 
2627 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2628    if necessary.  */
2629 
2630 tree
build_and_jump(tree * label_p)2631 build_and_jump (tree *label_p)
2632 {
2633   if (label_p == NULL)
2634     /* If there's nowhere to jump, just fall through.  */
2635     return NULL_TREE;
2636 
2637   if (*label_p == NULL_TREE)
2638     {
2639       tree label = create_artificial_label (UNKNOWN_LOCATION);
2640       *label_p = label;
2641     }
2642 
2643   return build1 (GOTO_EXPR, void_type_node, *label_p);
2644 }
2645 
2646 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2647    This also involves building a label to jump to and communicating it to
2648    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
2649 
2650 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)2651 gimplify_exit_expr (tree *expr_p)
2652 {
2653   tree cond = TREE_OPERAND (*expr_p, 0);
2654   tree expr;
2655 
2656   expr = build_and_jump (&gimplify_ctxp->exit_label);
2657   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2658   *expr_p = expr;
2659 
2660   return GS_OK;
2661 }
2662 
2663 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
2664    different from its canonical type, wrap the whole thing inside a
2665    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2666    type.
2667 
2668    The canonical type of a COMPONENT_REF is the type of the field being
2669    referenced--unless the field is a bit-field which can be read directly
2670    in a smaller mode, in which case the canonical type is the
2671    sign-appropriate type corresponding to that mode.  */
2672 
2673 static void
canonicalize_component_ref(tree * expr_p)2674 canonicalize_component_ref (tree *expr_p)
2675 {
2676   tree expr = *expr_p;
2677   tree type;
2678 
2679   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2680 
2681   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2682     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2683   else
2684     type = TREE_TYPE (TREE_OPERAND (expr, 1));
2685 
2686   /* One could argue that all the stuff below is not necessary for
2687      the non-bitfield case and declare it a FE error if type
2688      adjustment would be needed.  */
2689   if (TREE_TYPE (expr) != type)
2690     {
2691 #ifdef ENABLE_TYPES_CHECKING
2692       tree old_type = TREE_TYPE (expr);
2693 #endif
2694       int type_quals;
2695 
2696       /* We need to preserve qualifiers and propagate them from
2697 	 operand 0.  */
2698       type_quals = TYPE_QUALS (type)
2699 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2700       if (TYPE_QUALS (type) != type_quals)
2701 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2702 
2703       /* Set the type of the COMPONENT_REF to the underlying type.  */
2704       TREE_TYPE (expr) = type;
2705 
2706 #ifdef ENABLE_TYPES_CHECKING
2707       /* It is now a FE error, if the conversion from the canonical
2708 	 type to the original expression type is not useless.  */
2709       gcc_assert (useless_type_conversion_p (old_type, type));
2710 #endif
2711     }
2712 }
2713 
2714 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2715    to foo, embed that change in the ADDR_EXPR by converting
2716       T array[U];
2717       (T *)&array
2718    ==>
2719       &array[L]
2720    where L is the lower bound.  For simplicity, only do this for constant
2721    lower bound.
2722    The constraint is that the type of &array[L] is trivially convertible
2723    to T *.  */
2724 
2725 static void
canonicalize_addr_expr(tree * expr_p)2726 canonicalize_addr_expr (tree *expr_p)
2727 {
2728   tree expr = *expr_p;
2729   tree addr_expr = TREE_OPERAND (expr, 0);
2730   tree datype, ddatype, pddatype;
2731 
2732   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
2733   if (!POINTER_TYPE_P (TREE_TYPE (expr))
2734       || TREE_CODE (addr_expr) != ADDR_EXPR)
2735     return;
2736 
2737   /* The addr_expr type should be a pointer to an array.  */
2738   datype = TREE_TYPE (TREE_TYPE (addr_expr));
2739   if (TREE_CODE (datype) != ARRAY_TYPE)
2740     return;
2741 
2742   /* The pointer to element type shall be trivially convertible to
2743      the expression pointer type.  */
2744   ddatype = TREE_TYPE (datype);
2745   pddatype = build_pointer_type (ddatype);
2746   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2747 				  pddatype))
2748     return;
2749 
2750   /* The lower bound and element sizes must be constant.  */
2751   if (!TYPE_SIZE_UNIT (ddatype)
2752       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2753       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2754       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2755     return;
2756 
2757   /* All checks succeeded.  Build a new node to merge the cast.  */
2758   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2759 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2760 		    NULL_TREE, NULL_TREE);
2761   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2762 
2763   /* We can have stripped a required restrict qualifier above.  */
2764   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2765     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2766 }
2767 
2768 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
2769    underneath as appropriate.  */
2770 
2771 static enum gimplify_status
gimplify_conversion(tree * expr_p)2772 gimplify_conversion (tree *expr_p)
2773 {
2774   location_t loc = EXPR_LOCATION (*expr_p);
2775   gcc_assert (CONVERT_EXPR_P (*expr_p));
2776 
2777   /* Then strip away all but the outermost conversion.  */
2778   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2779 
2780   /* And remove the outermost conversion if it's useless.  */
2781   if (tree_ssa_useless_type_conversion (*expr_p))
2782     *expr_p = TREE_OPERAND (*expr_p, 0);
2783 
2784   /* If we still have a conversion at the toplevel,
2785      then canonicalize some constructs.  */
2786   if (CONVERT_EXPR_P (*expr_p))
2787     {
2788       tree sub = TREE_OPERAND (*expr_p, 0);
2789 
2790       /* If a NOP conversion is changing the type of a COMPONENT_REF
2791 	 expression, then canonicalize its type now in order to expose more
2792 	 redundant conversions.  */
2793       if (TREE_CODE (sub) == COMPONENT_REF)
2794 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2795 
2796       /* If a NOP conversion is changing a pointer to array of foo
2797 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
2798       else if (TREE_CODE (sub) == ADDR_EXPR)
2799 	canonicalize_addr_expr (expr_p);
2800     }
2801 
2802   /* If we have a conversion to a non-register type force the
2803      use of a VIEW_CONVERT_EXPR instead.  */
2804   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2805     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2806 			       TREE_OPERAND (*expr_p, 0));
2807 
2808   /* Canonicalize CONVERT_EXPR to NOP_EXPR.  */
2809   if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2810     TREE_SET_CODE (*expr_p, NOP_EXPR);
2811 
2812   return GS_OK;
2813 }
2814 
2815 /* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
2816    DECL_VALUE_EXPR, and it's worth re-examining things.  */
2817 
2818 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)2819 gimplify_var_or_parm_decl (tree *expr_p)
2820 {
2821   tree decl = *expr_p;
2822 
2823   /* ??? If this is a local variable, and it has not been seen in any
2824      outer BIND_EXPR, then it's probably the result of a duplicate
2825      declaration, for which we've already issued an error.  It would
2826      be really nice if the front end wouldn't leak these at all.
2827      Currently the only known culprit is C++ destructors, as seen
2828      in g++.old-deja/g++.jason/binding.C.  */
2829   if (VAR_P (decl)
2830       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2831       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2832       && decl_function_context (decl) == current_function_decl)
2833     {
2834       gcc_assert (seen_error ());
2835       return GS_ERROR;
2836     }
2837 
2838   /* When within an OMP context, notice uses of variables.  */
2839   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2840     return GS_ALL_DONE;
2841 
2842   /* If the decl is an alias for another expression, substitute it now.  */
2843   if (DECL_HAS_VALUE_EXPR_P (decl))
2844     {
2845       *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
2846       return GS_OK;
2847     }
2848 
2849   return GS_ALL_DONE;
2850 }
2851 
2852 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T.  */
2853 
2854 static void
recalculate_side_effects(tree t)2855 recalculate_side_effects (tree t)
2856 {
2857   enum tree_code code = TREE_CODE (t);
2858   int len = TREE_OPERAND_LENGTH (t);
2859   int i;
2860 
2861   switch (TREE_CODE_CLASS (code))
2862     {
2863     case tcc_expression:
2864       switch (code)
2865 	{
2866 	case INIT_EXPR:
2867 	case MODIFY_EXPR:
2868 	case VA_ARG_EXPR:
2869 	case PREDECREMENT_EXPR:
2870 	case PREINCREMENT_EXPR:
2871 	case POSTDECREMENT_EXPR:
2872 	case POSTINCREMENT_EXPR:
2873 	  /* All of these have side-effects, no matter what their
2874 	     operands are.  */
2875 	  return;
2876 
2877 	default:
2878 	  break;
2879 	}
2880       /* Fall through.  */
2881 
2882     case tcc_comparison:  /* a comparison expression */
2883     case tcc_unary:       /* a unary arithmetic expression */
2884     case tcc_binary:      /* a binary arithmetic expression */
2885     case tcc_reference:   /* a reference */
2886     case tcc_vl_exp:        /* a function call */
2887       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2888       for (i = 0; i < len; ++i)
2889 	{
2890 	  tree op = TREE_OPERAND (t, i);
2891 	  if (op && TREE_SIDE_EFFECTS (op))
2892 	    TREE_SIDE_EFFECTS (t) = 1;
2893 	}
2894       break;
2895 
2896     case tcc_constant:
2897       /* No side-effects.  */
2898       return;
2899 
2900     default:
2901       gcc_unreachable ();
2902    }
2903 }
2904 
2905 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2906    node *EXPR_P.
2907 
2908       compound_lval
2909 	      : min_lval '[' val ']'
2910 	      | min_lval '.' ID
2911 	      | compound_lval '[' val ']'
2912 	      | compound_lval '.' ID
2913 
2914    This is not part of the original SIMPLE definition, which separates
2915    array and member references, but it seems reasonable to handle them
2916    together.  Also, this way we don't run into problems with union
2917    aliasing; gcc requires that for accesses through a union to alias, the
2918    union reference must be explicit, which was not always the case when we
2919    were splitting up array and member refs.
2920 
2921    PRE_P points to the sequence where side effects that must happen before
2922      *EXPR_P should be stored.
2923 
2924    POST_P points to the sequence where side effects that must happen after
2925      *EXPR_P should be stored.  */
2926 
2927 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,fallback_t fallback)2928 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2929 			fallback_t fallback)
2930 {
2931   tree *p;
2932   enum gimplify_status ret = GS_ALL_DONE, tret;
2933   int i;
2934   location_t loc = EXPR_LOCATION (*expr_p);
2935   tree expr = *expr_p;
2936 
2937   /* Create a stack of the subexpressions so later we can walk them in
2938      order from inner to outer.  */
2939   auto_vec<tree, 10> expr_stack;
2940 
2941   /* We can handle anything that get_inner_reference can deal with.  */
2942   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2943     {
2944     restart:
2945       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
2946       if (TREE_CODE (*p) == INDIRECT_REF)
2947 	*p = fold_indirect_ref_loc (loc, *p);
2948 
2949       if (handled_component_p (*p))
2950 	;
2951       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
2952 	 additional COMPONENT_REFs.  */
2953       else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2954 	       && gimplify_var_or_parm_decl (p) == GS_OK)
2955 	goto restart;
2956       else
2957 	break;
2958 
2959       expr_stack.safe_push (*p);
2960     }
2961 
2962   gcc_assert (expr_stack.length ());
2963 
2964   /* Now EXPR_STACK is a stack of pointers to all the refs we've
2965      walked through and P points to the innermost expression.
2966 
2967      Java requires that we elaborated nodes in source order.  That
2968      means we must gimplify the inner expression followed by each of
2969      the indices, in order.  But we can't gimplify the inner
2970      expression until we deal with any variable bounds, sizes, or
2971      positions in order to deal with PLACEHOLDER_EXPRs.
2972 
2973      So we do this in three steps.  First we deal with the annotations
2974      for any variables in the components, then we gimplify the base,
2975      then we gimplify any indices, from left to right.  */
2976   for (i = expr_stack.length () - 1; i >= 0; i--)
2977     {
2978       tree t = expr_stack[i];
2979 
2980       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2981 	{
2982 	  /* Gimplify the low bound and element type size and put them into
2983 	     the ARRAY_REF.  If these values are set, they have already been
2984 	     gimplified.  */
2985 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2986 	    {
2987 	      tree low = unshare_expr (array_ref_low_bound (t));
2988 	      if (!is_gimple_min_invariant (low))
2989 		{
2990 		  TREE_OPERAND (t, 2) = low;
2991 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2992 					post_p, is_gimple_reg,
2993 					fb_rvalue);
2994 		  ret = MIN (ret, tret);
2995 		}
2996 	    }
2997 	  else
2998 	    {
2999 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3000 				    is_gimple_reg, fb_rvalue);
3001 	      ret = MIN (ret, tret);
3002 	    }
3003 
3004 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
3005 	    {
3006 	      tree elmt_size = array_ref_element_size (t);
3007 	      if (!is_gimple_min_invariant (elmt_size))
3008 		{
3009 		  elmt_size = unshare_expr (elmt_size);
3010 		  tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
3011 		  tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
3012 
3013 		  /* Divide the element size by the alignment of the element
3014 		     type (above).  */
3015 		  elmt_size = size_binop_loc (loc, EXACT_DIV_EXPR,
3016 					      elmt_size, factor);
3017 
3018 		  TREE_OPERAND (t, 3) = elmt_size;
3019 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
3020 					post_p, is_gimple_reg,
3021 					fb_rvalue);
3022 		  ret = MIN (ret, tret);
3023 		}
3024 	    }
3025 	  else
3026 	    {
3027 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
3028 				    is_gimple_reg, fb_rvalue);
3029 	      ret = MIN (ret, tret);
3030 	    }
3031 	}
3032       else if (TREE_CODE (t) == COMPONENT_REF)
3033 	{
3034 	  /* Set the field offset into T and gimplify it.  */
3035 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
3036 	    {
3037 	      tree offset = component_ref_field_offset (t);
3038 	      if (!is_gimple_min_invariant (offset))
3039 		{
3040 		  offset = unshare_expr (offset);
3041 		  tree field = TREE_OPERAND (t, 1);
3042 		  tree factor
3043 		    = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
3044 
3045 		  /* Divide the offset by its alignment.  */
3046 		  offset = size_binop_loc (loc, EXACT_DIV_EXPR,
3047 					   offset, factor);
3048 
3049 		  TREE_OPERAND (t, 2) = offset;
3050 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
3051 					post_p, is_gimple_reg,
3052 					fb_rvalue);
3053 		  ret = MIN (ret, tret);
3054 		}
3055 	    }
3056 	  else
3057 	    {
3058 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
3059 				    is_gimple_reg, fb_rvalue);
3060 	      ret = MIN (ret, tret);
3061 	    }
3062 	}
3063     }
3064 
3065   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
3066      so as to match the min_lval predicate.  Failure to do so may result
3067      in the creation of large aggregate temporaries.  */
3068   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
3069 			fallback | fb_lvalue);
3070   ret = MIN (ret, tret);
3071 
3072   /* And finally, the indices and operands of ARRAY_REF.  During this
3073      loop we also remove any useless conversions.  */
3074   for (; expr_stack.length () > 0; )
3075     {
3076       tree t = expr_stack.pop ();
3077 
3078       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
3079 	{
3080 	  /* Gimplify the dimension.  */
3081 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
3082 	    {
3083 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
3084 				    is_gimple_val, fb_rvalue);
3085 	      ret = MIN (ret, tret);
3086 	    }
3087 	}
3088 
3089       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
3090 
3091       /* The innermost expression P may have originally had
3092 	 TREE_SIDE_EFFECTS set which would have caused all the outer
3093 	 expressions in *EXPR_P leading to P to also have had
3094 	 TREE_SIDE_EFFECTS set.  */
3095       recalculate_side_effects (t);
3096     }
3097 
3098   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
3099   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3100     {
3101       canonicalize_component_ref (expr_p);
3102     }
3103 
3104   expr_stack.release ();
3105 
3106   gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3107 
3108   return ret;
3109 }
3110 
3111 /*  Gimplify the self modifying expression pointed to by EXPR_P
3112     (++, --, +=, -=).
3113 
3114     PRE_P points to the list where side effects that must happen before
3115 	*EXPR_P should be stored.
3116 
3117     POST_P points to the list where side effects that must happen after
3118 	*EXPR_P should be stored.
3119 
3120     WANT_VALUE is nonzero iff we want to use the value of this expression
3121 	in another expression.
3122 
3123     ARITH_TYPE is the type the computation should be performed in.  */
3124 
3125 enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,tree arith_type)3126 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3127 			bool want_value, tree arith_type)
3128 {
3129   enum tree_code code;
3130   tree lhs, lvalue, rhs, t1;
3131   gimple_seq post = NULL, *orig_post_p = post_p;
3132   bool postfix;
3133   enum tree_code arith_code;
3134   enum gimplify_status ret;
3135   location_t loc = EXPR_LOCATION (*expr_p);
3136 
3137   code = TREE_CODE (*expr_p);
3138 
3139   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3140 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3141 
3142   /* Prefix or postfix?  */
3143   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3144     /* Faster to treat as prefix if result is not used.  */
3145     postfix = want_value;
3146   else
3147     postfix = false;
3148 
3149   /* For postfix, make sure the inner expression's post side effects
3150      are executed after side effects from this expression.  */
3151   if (postfix)
3152     post_p = &post;
3153 
3154   /* Add or subtract?  */
3155   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3156     arith_code = PLUS_EXPR;
3157   else
3158     arith_code = MINUS_EXPR;
3159 
3160   /* Gimplify the LHS into a GIMPLE lvalue.  */
3161   lvalue = TREE_OPERAND (*expr_p, 0);
3162   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3163   if (ret == GS_ERROR)
3164     return ret;
3165 
3166   /* Extract the operands to the arithmetic operation.  */
3167   lhs = lvalue;
3168   rhs = TREE_OPERAND (*expr_p, 1);
3169 
3170   /* For postfix operator, we evaluate the LHS to an rvalue and then use
3171      that as the result value and in the postqueue operation.  */
3172   if (postfix)
3173     {
3174       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3175       if (ret == GS_ERROR)
3176 	return ret;
3177 
3178       lhs = get_initialized_tmp_var (lhs, pre_p);
3179     }
3180 
3181   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
3182   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3183     {
3184       rhs = convert_to_ptrofftype_loc (loc, rhs);
3185       if (arith_code == MINUS_EXPR)
3186 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3187       t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3188     }
3189   else
3190     t1 = fold_convert (TREE_TYPE (*expr_p),
3191 		       fold_build2 (arith_code, arith_type,
3192 				    fold_convert (arith_type, lhs),
3193 				    fold_convert (arith_type, rhs)));
3194 
3195   if (postfix)
3196     {
3197       gimplify_assign (lvalue, t1, pre_p);
3198       gimplify_seq_add_seq (orig_post_p, post);
3199       *expr_p = lhs;
3200       return GS_ALL_DONE;
3201     }
3202   else
3203     {
3204       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3205       return GS_OK;
3206     }
3207 }
3208 
3209 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
3210 
3211 static void
maybe_with_size_expr(tree * expr_p)3212 maybe_with_size_expr (tree *expr_p)
3213 {
3214   tree expr = *expr_p;
3215   tree type = TREE_TYPE (expr);
3216   tree size;
3217 
3218   /* If we've already wrapped this or the type is error_mark_node, we can't do
3219      anything.  */
3220   if (TREE_CODE (expr) == WITH_SIZE_EXPR
3221       || type == error_mark_node)
3222     return;
3223 
3224   /* If the size isn't known or is a constant, we have nothing to do.  */
3225   size = TYPE_SIZE_UNIT (type);
3226   if (!size || poly_int_tree_p (size))
3227     return;
3228 
3229   /* Otherwise, make a WITH_SIZE_EXPR.  */
3230   size = unshare_expr (size);
3231   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3232   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3233 }
3234 
3235 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
3236    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
3237    the CALL_EXPR.  If ALLOW_SSA is set the actual parameter may be
3238    gimplified to an SSA name.  */
3239 
3240 enum gimplify_status
gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location,bool allow_ssa)3241 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3242 	      bool allow_ssa)
3243 {
3244   bool (*test) (tree);
3245   fallback_t fb;
3246 
3247   /* In general, we allow lvalues for function arguments to avoid
3248      extra overhead of copying large aggregates out of even larger
3249      aggregates into temporaries only to copy the temporaries to
3250      the argument list.  Make optimizers happy by pulling out to
3251      temporaries those types that fit in registers.  */
3252   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3253     test = is_gimple_val, fb = fb_rvalue;
3254   else
3255     {
3256       test = is_gimple_lvalue, fb = fb_either;
3257       /* Also strip a TARGET_EXPR that would force an extra copy.  */
3258       if (TREE_CODE (*arg_p) == TARGET_EXPR)
3259 	{
3260 	  tree init = TARGET_EXPR_INITIAL (*arg_p);
3261 	  if (init
3262 	      && !VOID_TYPE_P (TREE_TYPE (init)))
3263 	    *arg_p = init;
3264 	}
3265     }
3266 
3267   /* If this is a variable sized type, we must remember the size.  */
3268   maybe_with_size_expr (arg_p);
3269 
3270   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
3271   /* Make sure arguments have the same location as the function call
3272      itself.  */
3273   protected_set_expr_location (*arg_p, call_location);
3274 
3275   /* There is a sequence point before a function call.  Side effects in
3276      the argument list must occur before the actual call. So, when
3277      gimplifying arguments, force gimplify_expr to use an internal
3278      post queue which is then appended to the end of PRE_P.  */
3279   return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3280 }
3281 
3282 /* Don't fold inside offloading or taskreg regions: it can break code by
3283    adding decl references that weren't in the source.  We'll do it during
3284    omplower pass instead.  */
3285 
3286 static bool
maybe_fold_stmt(gimple_stmt_iterator * gsi)3287 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3288 {
3289   struct gimplify_omp_ctx *ctx;
3290   for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3291     if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3292       return false;
3293     else if ((ctx->region_type & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
3294       return false;
3295   /* Delay folding of builtins until the IL is in consistent state
3296      so the diagnostic machinery can do a better job.  */
3297   if (gimple_call_builtin_p (gsi_stmt (*gsi)))
3298     return false;
3299   return fold_stmt (gsi);
3300 }
3301 
3302 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3303    WANT_VALUE is true if the result of the call is desired.  */
3304 
3305 static enum gimplify_status
gimplify_call_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)3306 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3307 {
3308   tree fndecl, parms, p, fnptrtype;
3309   enum gimplify_status ret;
3310   int i, nargs;
3311   gcall *call;
3312   bool builtin_va_start_p = false;
3313   location_t loc = EXPR_LOCATION (*expr_p);
3314 
3315   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3316 
3317   /* For reliable diagnostics during inlining, it is necessary that
3318      every call_expr be annotated with file and line.  */
3319   if (! EXPR_HAS_LOCATION (*expr_p))
3320     SET_EXPR_LOCATION (*expr_p, input_location);
3321 
3322   /* Gimplify internal functions created in the FEs.  */
3323   if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3324     {
3325       if (want_value)
3326 	return GS_ALL_DONE;
3327 
3328       nargs = call_expr_nargs (*expr_p);
3329       enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3330       auto_vec<tree> vargs (nargs);
3331 
3332       for (i = 0; i < nargs; i++)
3333 	{
3334 	  gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3335 			EXPR_LOCATION (*expr_p));
3336 	  vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3337 	}
3338 
3339       gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3340       gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3341       gimplify_seq_add_stmt (pre_p, call);
3342       return GS_ALL_DONE;
3343     }
3344 
3345   /* This may be a call to a builtin function.
3346 
3347      Builtin function calls may be transformed into different
3348      (and more efficient) builtin function calls under certain
3349      circumstances.  Unfortunately, gimplification can muck things
3350      up enough that the builtin expanders are not aware that certain
3351      transformations are still valid.
3352 
3353      So we attempt transformation/gimplification of the call before
3354      we gimplify the CALL_EXPR.  At this time we do not manage to
3355      transform all calls in the same manner as the expanders do, but
3356      we do transform most of them.  */
3357   fndecl = get_callee_fndecl (*expr_p);
3358   if (fndecl && fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
3359     switch (DECL_FUNCTION_CODE (fndecl))
3360       {
3361       CASE_BUILT_IN_ALLOCA:
3362 	/* If the call has been built for a variable-sized object, then we
3363 	   want to restore the stack level when the enclosing BIND_EXPR is
3364 	   exited to reclaim the allocated space; otherwise, we precisely
3365 	   need to do the opposite and preserve the latest stack level.  */
3366 	if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3367 	  gimplify_ctxp->save_stack = true;
3368 	else
3369 	  gimplify_ctxp->keep_stack = true;
3370 	break;
3371 
3372       case BUILT_IN_VA_START:
3373         {
3374 	  builtin_va_start_p = TRUE;
3375 	  if (call_expr_nargs (*expr_p) < 2)
3376 	    {
3377 	      error ("too few arguments to function %<va_start%>");
3378 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3379 	      return GS_OK;
3380 	    }
3381 
3382 	  if (fold_builtin_next_arg (*expr_p, true))
3383 	    {
3384 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3385 	      return GS_OK;
3386 	    }
3387 	  break;
3388 	}
3389 
3390       case BUILT_IN_EH_RETURN:
3391 	cfun->calls_eh_return = true;
3392 	break;
3393 
3394       default:
3395         ;
3396       }
3397   if (fndecl && fndecl_built_in_p (fndecl))
3398     {
3399       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3400       if (new_tree && new_tree != *expr_p)
3401 	{
3402 	  /* There was a transformation of this call which computes the
3403 	     same value, but in a more efficient way.  Return and try
3404 	     again.  */
3405 	  *expr_p = new_tree;
3406 	  return GS_OK;
3407 	}
3408     }
3409 
3410   /* Remember the original function pointer type.  */
3411   fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3412 
3413   if (flag_openmp
3414       && fndecl
3415       && cfun
3416       && (cfun->curr_properties & PROP_gimple_any) == 0)
3417     {
3418       tree variant = omp_resolve_declare_variant (fndecl);
3419       if (variant != fndecl)
3420 	CALL_EXPR_FN (*expr_p) = build1 (ADDR_EXPR, fnptrtype, variant);
3421     }
3422 
3423   /* There is a sequence point before the call, so any side effects in
3424      the calling expression must occur before the actual call.  Force
3425      gimplify_expr to use an internal post queue.  */
3426   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3427 		       is_gimple_call_addr, fb_rvalue);
3428 
3429   nargs = call_expr_nargs (*expr_p);
3430 
3431   /* Get argument types for verification.  */
3432   fndecl = get_callee_fndecl (*expr_p);
3433   parms = NULL_TREE;
3434   if (fndecl)
3435     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3436   else
3437     parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3438 
3439   if (fndecl && DECL_ARGUMENTS (fndecl))
3440     p = DECL_ARGUMENTS (fndecl);
3441   else if (parms)
3442     p = parms;
3443   else
3444     p = NULL_TREE;
3445   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3446     ;
3447 
3448   /* If the last argument is __builtin_va_arg_pack () and it is not
3449      passed as a named argument, decrease the number of CALL_EXPR
3450      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
3451   if (!p
3452       && i < nargs
3453       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3454     {
3455       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3456       tree last_arg_fndecl = get_callee_fndecl (last_arg);
3457 
3458       if (last_arg_fndecl
3459 	  && fndecl_built_in_p (last_arg_fndecl, BUILT_IN_VA_ARG_PACK))
3460 	{
3461 	  tree call = *expr_p;
3462 
3463 	  --nargs;
3464 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3465 					  CALL_EXPR_FN (call),
3466 					  nargs, CALL_EXPR_ARGP (call));
3467 
3468 	  /* Copy all CALL_EXPR flags, location and block, except
3469 	     CALL_EXPR_VA_ARG_PACK flag.  */
3470 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3471 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3472 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3473 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
3474 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3475 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3476 
3477 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
3478 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3479 	}
3480     }
3481 
3482   /* If the call returns twice then after building the CFG the call
3483      argument computations will no longer dominate the call because
3484      we add an abnormal incoming edge to the call.  So do not use SSA
3485      vars there.  */
3486   bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3487 
3488   /* Gimplify the function arguments.  */
3489   if (nargs > 0)
3490     {
3491       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3492            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3493            PUSH_ARGS_REVERSED ? i-- : i++)
3494         {
3495           enum gimplify_status t;
3496 
3497           /* Avoid gimplifying the second argument to va_start, which needs to
3498              be the plain PARM_DECL.  */
3499           if ((i != 1) || !builtin_va_start_p)
3500             {
3501               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3502 				EXPR_LOCATION (*expr_p), ! returns_twice);
3503 
3504               if (t == GS_ERROR)
3505                 ret = GS_ERROR;
3506             }
3507         }
3508     }
3509 
3510   /* Gimplify the static chain.  */
3511   if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3512     {
3513       if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3514 	CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3515       else
3516 	{
3517 	  enum gimplify_status t;
3518 	  t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3519 			    EXPR_LOCATION (*expr_p), ! returns_twice);
3520 	  if (t == GS_ERROR)
3521 	    ret = GS_ERROR;
3522 	}
3523     }
3524 
3525   /* Verify the function result.  */
3526   if (want_value && fndecl
3527       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3528     {
3529       error_at (loc, "using result of function returning %<void%>");
3530       ret = GS_ERROR;
3531     }
3532 
3533   /* Try this again in case gimplification exposed something.  */
3534   if (ret != GS_ERROR)
3535     {
3536       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3537 
3538       if (new_tree && new_tree != *expr_p)
3539 	{
3540 	  /* There was a transformation of this call which computes the
3541 	     same value, but in a more efficient way.  Return and try
3542 	     again.  */
3543 	  *expr_p = new_tree;
3544 	  return GS_OK;
3545 	}
3546     }
3547   else
3548     {
3549       *expr_p = error_mark_node;
3550       return GS_ERROR;
3551     }
3552 
3553   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3554      decl.  This allows us to eliminate redundant or useless
3555      calls to "const" functions.  */
3556   if (TREE_CODE (*expr_p) == CALL_EXPR)
3557     {
3558       int flags = call_expr_flags (*expr_p);
3559       if (flags & (ECF_CONST | ECF_PURE)
3560 	  /* An infinite loop is considered a side effect.  */
3561 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3562 	TREE_SIDE_EFFECTS (*expr_p) = 0;
3563     }
3564 
3565   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3566      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
3567      form and delegate the creation of a GIMPLE_CALL to
3568      gimplify_modify_expr.  This is always possible because when
3569      WANT_VALUE is true, the caller wants the result of this call into
3570      a temporary, which means that we will emit an INIT_EXPR in
3571      internal_get_tmp_var which will then be handled by
3572      gimplify_modify_expr.  */
3573   if (!want_value)
3574     {
3575       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3576 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
3577       gimple_stmt_iterator gsi;
3578       call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3579       notice_special_calls (call);
3580       gimplify_seq_add_stmt (pre_p, call);
3581       gsi = gsi_last (*pre_p);
3582       maybe_fold_stmt (&gsi);
3583       *expr_p = NULL_TREE;
3584     }
3585   else
3586     /* Remember the original function type.  */
3587     CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3588 				     CALL_EXPR_FN (*expr_p));
3589 
3590   return ret;
3591 }
3592 
3593 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3594    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3595 
3596    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3597    condition is true or false, respectively.  If null, we should generate
3598    our own to skip over the evaluation of this specific expression.
3599 
3600    LOCUS is the source location of the COND_EXPR.
3601 
3602    This function is the tree equivalent of do_jump.
3603 
3604    shortcut_cond_r should only be called by shortcut_cond_expr.  */
3605 
3606 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p,location_t locus)3607 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3608 		 location_t locus)
3609 {
3610   tree local_label = NULL_TREE;
3611   tree t, expr = NULL;
3612 
3613   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3614      retain the shortcut semantics.  Just insert the gotos here;
3615      shortcut_cond_expr will append the real blocks later.  */
3616   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3617     {
3618       location_t new_locus;
3619 
3620       /* Turn if (a && b) into
3621 
3622 	 if (a); else goto no;
3623 	 if (b) goto yes; else goto no;
3624 	 (no:) */
3625 
3626       if (false_label_p == NULL)
3627 	false_label_p = &local_label;
3628 
3629       /* Keep the original source location on the first 'if'.  */
3630       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3631       append_to_statement_list (t, &expr);
3632 
3633       /* Set the source location of the && on the second 'if'.  */
3634       new_locus = rexpr_location (pred, locus);
3635       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3636 			   new_locus);
3637       append_to_statement_list (t, &expr);
3638     }
3639   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3640     {
3641       location_t new_locus;
3642 
3643       /* Turn if (a || b) into
3644 
3645 	 if (a) goto yes;
3646 	 if (b) goto yes; else goto no;
3647 	 (yes:) */
3648 
3649       if (true_label_p == NULL)
3650 	true_label_p = &local_label;
3651 
3652       /* Keep the original source location on the first 'if'.  */
3653       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3654       append_to_statement_list (t, &expr);
3655 
3656       /* Set the source location of the || on the second 'if'.  */
3657       new_locus = rexpr_location (pred, locus);
3658       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3659 			   new_locus);
3660       append_to_statement_list (t, &expr);
3661     }
3662   else if (TREE_CODE (pred) == COND_EXPR
3663 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3664 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3665     {
3666       location_t new_locus;
3667 
3668       /* As long as we're messing with gotos, turn if (a ? b : c) into
3669 	 if (a)
3670 	   if (b) goto yes; else goto no;
3671 	 else
3672 	   if (c) goto yes; else goto no;
3673 
3674 	 Don't do this if one of the arms has void type, which can happen
3675 	 in C++ when the arm is throw.  */
3676 
3677       /* Keep the original source location on the first 'if'.  Set the source
3678 	 location of the ? on the second 'if'.  */
3679       new_locus = rexpr_location (pred, locus);
3680       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3681 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3682 				      false_label_p, locus),
3683 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3684 				      false_label_p, new_locus));
3685     }
3686   else
3687     {
3688       expr = build3 (COND_EXPR, void_type_node, pred,
3689 		     build_and_jump (true_label_p),
3690 		     build_and_jump (false_label_p));
3691       SET_EXPR_LOCATION (expr, locus);
3692     }
3693 
3694   if (local_label)
3695     {
3696       t = build1 (LABEL_EXPR, void_type_node, local_label);
3697       append_to_statement_list (t, &expr);
3698     }
3699 
3700   return expr;
3701 }
3702 
3703 /* If EXPR is a GOTO_EXPR, return it.  If it is a STATEMENT_LIST, skip
3704    any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3705    statement, if it is the last one.  Otherwise, return NULL.  */
3706 
3707 static tree
find_goto(tree expr)3708 find_goto (tree expr)
3709 {
3710   if (!expr)
3711     return NULL_TREE;
3712 
3713   if (TREE_CODE (expr) == GOTO_EXPR)
3714     return expr;
3715 
3716   if (TREE_CODE (expr) != STATEMENT_LIST)
3717     return NULL_TREE;
3718 
3719   tree_stmt_iterator i = tsi_start (expr);
3720 
3721   while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3722     tsi_next (&i);
3723 
3724   if (!tsi_one_before_end_p (i))
3725     return NULL_TREE;
3726 
3727   return find_goto (tsi_stmt (i));
3728 }
3729 
3730 /* Same as find_goto, except that it returns NULL if the destination
3731    is not a LABEL_DECL.  */
3732 
3733 static inline tree
find_goto_label(tree expr)3734 find_goto_label (tree expr)
3735 {
3736   tree dest = find_goto (expr);
3737   if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3738     return dest;
3739   return NULL_TREE;
3740 }
3741 
3742 /* Given a conditional expression EXPR with short-circuit boolean
3743    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3744    predicate apart into the equivalent sequence of conditionals.  */
3745 
3746 static tree
shortcut_cond_expr(tree expr)3747 shortcut_cond_expr (tree expr)
3748 {
3749   tree pred = TREE_OPERAND (expr, 0);
3750   tree then_ = TREE_OPERAND (expr, 1);
3751   tree else_ = TREE_OPERAND (expr, 2);
3752   tree true_label, false_label, end_label, t;
3753   tree *true_label_p;
3754   tree *false_label_p;
3755   bool emit_end, emit_false, jump_over_else;
3756   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3757   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3758 
3759   /* First do simple transformations.  */
3760   if (!else_se)
3761     {
3762       /* If there is no 'else', turn
3763 	   if (a && b) then c
3764 	 into
3765 	   if (a) if (b) then c.  */
3766       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3767 	{
3768 	  /* Keep the original source location on the first 'if'.  */
3769 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3770 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3771 	  /* Set the source location of the && on the second 'if'.  */
3772 	  if (rexpr_has_location (pred))
3773 	    SET_EXPR_LOCATION (expr, rexpr_location (pred));
3774 	  then_ = shortcut_cond_expr (expr);
3775 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
3776 	  pred = TREE_OPERAND (pred, 0);
3777 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3778 	  SET_EXPR_LOCATION (expr, locus);
3779 	}
3780     }
3781 
3782   if (!then_se)
3783     {
3784       /* If there is no 'then', turn
3785 	   if (a || b); else d
3786 	 into
3787 	   if (a); else if (b); else d.  */
3788       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3789 	{
3790 	  /* Keep the original source location on the first 'if'.  */
3791 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3792 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3793 	  /* Set the source location of the || on the second 'if'.  */
3794 	  if (rexpr_has_location (pred))
3795 	    SET_EXPR_LOCATION (expr, rexpr_location (pred));
3796 	  else_ = shortcut_cond_expr (expr);
3797 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
3798 	  pred = TREE_OPERAND (pred, 0);
3799 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3800 	  SET_EXPR_LOCATION (expr, locus);
3801 	}
3802     }
3803 
3804   /* If we're done, great.  */
3805   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3806       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3807     return expr;
3808 
3809   /* Otherwise we need to mess with gotos.  Change
3810        if (a) c; else d;
3811      to
3812        if (a); else goto no;
3813        c; goto end;
3814        no: d; end:
3815      and recursively gimplify the condition.  */
3816 
3817   true_label = false_label = end_label = NULL_TREE;
3818 
3819   /* If our arms just jump somewhere, hijack those labels so we don't
3820      generate jumps to jumps.  */
3821 
3822   if (tree then_goto = find_goto_label (then_))
3823     {
3824       true_label = GOTO_DESTINATION (then_goto);
3825       then_ = NULL;
3826       then_se = false;
3827     }
3828 
3829   if (tree else_goto = find_goto_label (else_))
3830     {
3831       false_label = GOTO_DESTINATION (else_goto);
3832       else_ = NULL;
3833       else_se = false;
3834     }
3835 
3836   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
3837   if (true_label)
3838     true_label_p = &true_label;
3839   else
3840     true_label_p = NULL;
3841 
3842   /* The 'else' branch also needs a label if it contains interesting code.  */
3843   if (false_label || else_se)
3844     false_label_p = &false_label;
3845   else
3846     false_label_p = NULL;
3847 
3848   /* If there was nothing else in our arms, just forward the label(s).  */
3849   if (!then_se && !else_se)
3850     return shortcut_cond_r (pred, true_label_p, false_label_p,
3851 			    EXPR_LOC_OR_LOC (expr, input_location));
3852 
3853   /* If our last subexpression already has a terminal label, reuse it.  */
3854   if (else_se)
3855     t = expr_last (else_);
3856   else if (then_se)
3857     t = expr_last (then_);
3858   else
3859     t = NULL;
3860   if (t && TREE_CODE (t) == LABEL_EXPR)
3861     end_label = LABEL_EXPR_LABEL (t);
3862 
3863   /* If we don't care about jumping to the 'else' branch, jump to the end
3864      if the condition is false.  */
3865   if (!false_label_p)
3866     false_label_p = &end_label;
3867 
3868   /* We only want to emit these labels if we aren't hijacking them.  */
3869   emit_end = (end_label == NULL_TREE);
3870   emit_false = (false_label == NULL_TREE);
3871 
3872   /* We only emit the jump over the else clause if we have to--if the
3873      then clause may fall through.  Otherwise we can wind up with a
3874      useless jump and a useless label at the end of gimplified code,
3875      which will cause us to think that this conditional as a whole
3876      falls through even if it doesn't.  If we then inline a function
3877      which ends with such a condition, that can cause us to issue an
3878      inappropriate warning about control reaching the end of a
3879      non-void function.  */
3880   jump_over_else = block_may_fallthru (then_);
3881 
3882   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3883 			  EXPR_LOC_OR_LOC (expr, input_location));
3884 
3885   expr = NULL;
3886   append_to_statement_list (pred, &expr);
3887 
3888   append_to_statement_list (then_, &expr);
3889   if (else_se)
3890     {
3891       if (jump_over_else)
3892 	{
3893 	  tree last = expr_last (expr);
3894 	  t = build_and_jump (&end_label);
3895 	  if (rexpr_has_location (last))
3896 	    SET_EXPR_LOCATION (t, rexpr_location (last));
3897 	  append_to_statement_list (t, &expr);
3898 	}
3899       if (emit_false)
3900 	{
3901 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
3902 	  append_to_statement_list (t, &expr);
3903 	}
3904       append_to_statement_list (else_, &expr);
3905     }
3906   if (emit_end && end_label)
3907     {
3908       t = build1 (LABEL_EXPR, void_type_node, end_label);
3909       append_to_statement_list (t, &expr);
3910     }
3911 
3912   return expr;
3913 }
3914 
3915 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
3916 
3917 tree
gimple_boolify(tree expr)3918 gimple_boolify (tree expr)
3919 {
3920   tree type = TREE_TYPE (expr);
3921   location_t loc = EXPR_LOCATION (expr);
3922 
3923   if (TREE_CODE (expr) == NE_EXPR
3924       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3925       && integer_zerop (TREE_OPERAND (expr, 1)))
3926     {
3927       tree call = TREE_OPERAND (expr, 0);
3928       tree fn = get_callee_fndecl (call);
3929 
3930       /* For __builtin_expect ((long) (x), y) recurse into x as well
3931 	 if x is truth_value_p.  */
3932       if (fn
3933 	  && fndecl_built_in_p (fn, BUILT_IN_EXPECT)
3934 	  && call_expr_nargs (call) == 2)
3935 	{
3936 	  tree arg = CALL_EXPR_ARG (call, 0);
3937 	  if (arg)
3938 	    {
3939 	      if (TREE_CODE (arg) == NOP_EXPR
3940 		  && TREE_TYPE (arg) == TREE_TYPE (call))
3941 		arg = TREE_OPERAND (arg, 0);
3942 	      if (truth_value_p (TREE_CODE (arg)))
3943 		{
3944 		  arg = gimple_boolify (arg);
3945 		  CALL_EXPR_ARG (call, 0)
3946 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
3947 		}
3948 	    }
3949 	}
3950     }
3951 
3952   switch (TREE_CODE (expr))
3953     {
3954     case TRUTH_AND_EXPR:
3955     case TRUTH_OR_EXPR:
3956     case TRUTH_XOR_EXPR:
3957     case TRUTH_ANDIF_EXPR:
3958     case TRUTH_ORIF_EXPR:
3959       /* Also boolify the arguments of truth exprs.  */
3960       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3961       /* FALLTHRU */
3962 
3963     case TRUTH_NOT_EXPR:
3964       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3965 
3966       /* These expressions always produce boolean results.  */
3967       if (TREE_CODE (type) != BOOLEAN_TYPE)
3968 	TREE_TYPE (expr) = boolean_type_node;
3969       return expr;
3970 
3971     case ANNOTATE_EXPR:
3972       switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3973 	{
3974 	case annot_expr_ivdep_kind:
3975 	case annot_expr_unroll_kind:
3976 	case annot_expr_no_vector_kind:
3977 	case annot_expr_vector_kind:
3978 	case annot_expr_parallel_kind:
3979 	  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3980 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
3981 	    TREE_TYPE (expr) = boolean_type_node;
3982 	  return expr;
3983 	default:
3984 	  gcc_unreachable ();
3985 	}
3986 
3987     default:
3988       if (COMPARISON_CLASS_P (expr))
3989 	{
3990 	  /* There expressions always prduce boolean results.  */
3991 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
3992 	    TREE_TYPE (expr) = boolean_type_node;
3993 	  return expr;
3994 	}
3995       /* Other expressions that get here must have boolean values, but
3996 	 might need to be converted to the appropriate mode.  */
3997       if (TREE_CODE (type) == BOOLEAN_TYPE)
3998 	return expr;
3999       return fold_convert_loc (loc, boolean_type_node, expr);
4000     }
4001 }
4002 
4003 /* Given a conditional expression *EXPR_P without side effects, gimplify
4004    its operands.  New statements are inserted to PRE_P.  */
4005 
4006 static enum gimplify_status
gimplify_pure_cond_expr(tree * expr_p,gimple_seq * pre_p)4007 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
4008 {
4009   tree expr = *expr_p, cond;
4010   enum gimplify_status ret, tret;
4011   enum tree_code code;
4012 
4013   cond = gimple_boolify (COND_EXPR_COND (expr));
4014 
4015   /* We need to handle && and || specially, as their gimplification
4016      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
4017   code = TREE_CODE (cond);
4018   if (code == TRUTH_ANDIF_EXPR)
4019     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
4020   else if (code == TRUTH_ORIF_EXPR)
4021     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
4022   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
4023   COND_EXPR_COND (*expr_p) = cond;
4024 
4025   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
4026 				   is_gimple_val, fb_rvalue);
4027   ret = MIN (ret, tret);
4028   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
4029 				   is_gimple_val, fb_rvalue);
4030 
4031   return MIN (ret, tret);
4032 }
4033 
4034 /* Return true if evaluating EXPR could trap.
4035    EXPR is GENERIC, while tree_could_trap_p can be called
4036    only on GIMPLE.  */
4037 
4038 bool
generic_expr_could_trap_p(tree expr)4039 generic_expr_could_trap_p (tree expr)
4040 {
4041   unsigned i, n;
4042 
4043   if (!expr || is_gimple_val (expr))
4044     return false;
4045 
4046   if (!EXPR_P (expr) || tree_could_trap_p (expr))
4047     return true;
4048 
4049   n = TREE_OPERAND_LENGTH (expr);
4050   for (i = 0; i < n; i++)
4051     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
4052       return true;
4053 
4054   return false;
4055 }
4056 
4057 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
4058     into
4059 
4060     if (p)			if (p)
4061       t1 = a;			  a;
4062     else		or	else
4063       t1 = b;			  b;
4064     t1;
4065 
4066     The second form is used when *EXPR_P is of type void.
4067 
4068     PRE_P points to the list where side effects that must happen before
4069       *EXPR_P should be stored.  */
4070 
4071 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,gimple_seq * pre_p,fallback_t fallback)4072 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
4073 {
4074   tree expr = *expr_p;
4075   tree type = TREE_TYPE (expr);
4076   location_t loc = EXPR_LOCATION (expr);
4077   tree tmp, arm1, arm2;
4078   enum gimplify_status ret;
4079   tree label_true, label_false, label_cont;
4080   bool have_then_clause_p, have_else_clause_p;
4081   gcond *cond_stmt;
4082   enum tree_code pred_code;
4083   gimple_seq seq = NULL;
4084 
4085   /* If this COND_EXPR has a value, copy the values into a temporary within
4086      the arms.  */
4087   if (!VOID_TYPE_P (type))
4088     {
4089       tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
4090       tree result;
4091 
4092       /* If either an rvalue is ok or we do not require an lvalue, create the
4093 	 temporary.  But we cannot do that if the type is addressable.  */
4094       if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
4095 	  && !TREE_ADDRESSABLE (type))
4096 	{
4097 	  if (gimplify_ctxp->allow_rhs_cond_expr
4098 	      /* If either branch has side effects or could trap, it can't be
4099 		 evaluated unconditionally.  */
4100 	      && !TREE_SIDE_EFFECTS (then_)
4101 	      && !generic_expr_could_trap_p (then_)
4102 	      && !TREE_SIDE_EFFECTS (else_)
4103 	      && !generic_expr_could_trap_p (else_))
4104 	    return gimplify_pure_cond_expr (expr_p, pre_p);
4105 
4106 	  tmp = create_tmp_var (type, "iftmp");
4107 	  result = tmp;
4108 	}
4109 
4110       /* Otherwise, only create and copy references to the values.  */
4111       else
4112 	{
4113 	  type = build_pointer_type (type);
4114 
4115 	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
4116 	    then_ = build_fold_addr_expr_loc (loc, then_);
4117 
4118 	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
4119 	    else_ = build_fold_addr_expr_loc (loc, else_);
4120 
4121 	  expr
4122 	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4123 
4124 	  tmp = create_tmp_var (type, "iftmp");
4125 	  result = build_simple_mem_ref_loc (loc, tmp);
4126 	}
4127 
4128       /* Build the new then clause, `tmp = then_;'.  But don't build the
4129 	 assignment if the value is void; in C++ it can be if it's a throw.  */
4130       if (!VOID_TYPE_P (TREE_TYPE (then_)))
4131 	TREE_OPERAND (expr, 1) = build2 (INIT_EXPR, type, tmp, then_);
4132 
4133       /* Similarly, build the new else clause, `tmp = else_;'.  */
4134       if (!VOID_TYPE_P (TREE_TYPE (else_)))
4135 	TREE_OPERAND (expr, 2) = build2 (INIT_EXPR, type, tmp, else_);
4136 
4137       TREE_TYPE (expr) = void_type_node;
4138       recalculate_side_effects (expr);
4139 
4140       /* Move the COND_EXPR to the prequeue.  */
4141       gimplify_stmt (&expr, pre_p);
4142 
4143       *expr_p = result;
4144       return GS_ALL_DONE;
4145     }
4146 
4147   /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
4148   STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4149   if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4150     gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4151 
4152   /* Make sure the condition has BOOLEAN_TYPE.  */
4153   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4154 
4155   /* Break apart && and || conditions.  */
4156   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4157       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4158     {
4159       expr = shortcut_cond_expr (expr);
4160 
4161       if (expr != *expr_p)
4162 	{
4163 	  *expr_p = expr;
4164 
4165 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
4166 	     form properly, as cleanups might cause the target labels to be
4167 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
4168 	     set up a conditional context.  */
4169 	  gimple_push_condition ();
4170 	  gimplify_stmt (expr_p, &seq);
4171 	  gimple_pop_condition (pre_p);
4172 	  gimple_seq_add_seq (pre_p, seq);
4173 
4174 	  return GS_ALL_DONE;
4175 	}
4176     }
4177 
4178   /* Now do the normal gimplification.  */
4179 
4180   /* Gimplify condition.  */
4181   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
4182 		       is_gimple_condexpr_for_cond, fb_rvalue);
4183   if (ret == GS_ERROR)
4184     return GS_ERROR;
4185   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4186 
4187   gimple_push_condition ();
4188 
4189   have_then_clause_p = have_else_clause_p = false;
4190   label_true = find_goto_label (TREE_OPERAND (expr, 1));
4191   if (label_true
4192       && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4193       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4194 	 have different locations, otherwise we end up with incorrect
4195 	 location information on the branches.  */
4196       && (optimize
4197 	  || !EXPR_HAS_LOCATION (expr)
4198 	  || !rexpr_has_location (label_true)
4199 	  || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4200     {
4201       have_then_clause_p = true;
4202       label_true = GOTO_DESTINATION (label_true);
4203     }
4204   else
4205     label_true = create_artificial_label (UNKNOWN_LOCATION);
4206   label_false = find_goto_label (TREE_OPERAND (expr, 2));
4207   if (label_false
4208       && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4209       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4210 	 have different locations, otherwise we end up with incorrect
4211 	 location information on the branches.  */
4212       && (optimize
4213 	  || !EXPR_HAS_LOCATION (expr)
4214 	  || !rexpr_has_location (label_false)
4215 	  || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4216     {
4217       have_else_clause_p = true;
4218       label_false = GOTO_DESTINATION (label_false);
4219     }
4220   else
4221     label_false = create_artificial_label (UNKNOWN_LOCATION);
4222 
4223   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4224 				 &arm2);
4225   cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4226 				 label_false);
4227   gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4228   gimplify_seq_add_stmt (&seq, cond_stmt);
4229   gimple_stmt_iterator gsi = gsi_last (seq);
4230   maybe_fold_stmt (&gsi);
4231 
4232   label_cont = NULL_TREE;
4233   if (!have_then_clause_p)
4234     {
4235       /* For if (...) {} else { code; } put label_true after
4236 	 the else block.  */
4237       if (TREE_OPERAND (expr, 1) == NULL_TREE
4238 	  && !have_else_clause_p
4239 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
4240 	label_cont = label_true;
4241       else
4242 	{
4243 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4244 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4245 	  /* For if (...) { code; } else {} or
4246 	     if (...) { code; } else goto label; or
4247 	     if (...) { code; return; } else { ... }
4248 	     label_cont isn't needed.  */
4249 	  if (!have_else_clause_p
4250 	      && TREE_OPERAND (expr, 2) != NULL_TREE
4251 	      && gimple_seq_may_fallthru (seq))
4252 	    {
4253 	      gimple *g;
4254 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
4255 
4256 	      g = gimple_build_goto (label_cont);
4257 
4258 	      /* GIMPLE_COND's are very low level; they have embedded
4259 		 gotos.  This particular embedded goto should not be marked
4260 		 with the location of the original COND_EXPR, as it would
4261 		 correspond to the COND_EXPR's condition, not the ELSE or the
4262 		 THEN arms.  To avoid marking it with the wrong location, flag
4263 		 it as "no location".  */
4264 	      gimple_set_do_not_emit_location (g);
4265 
4266 	      gimplify_seq_add_stmt (&seq, g);
4267 	    }
4268 	}
4269     }
4270   if (!have_else_clause_p)
4271     {
4272       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4273       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4274     }
4275   if (label_cont)
4276     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4277 
4278   gimple_pop_condition (pre_p);
4279   gimple_seq_add_seq (pre_p, seq);
4280 
4281   if (ret == GS_ERROR)
4282     ; /* Do nothing.  */
4283   else if (have_then_clause_p || have_else_clause_p)
4284     ret = GS_ALL_DONE;
4285   else
4286     {
4287       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
4288       expr = TREE_OPERAND (expr, 0);
4289       gimplify_stmt (&expr, pre_p);
4290     }
4291 
4292   *expr_p = NULL;
4293   return ret;
4294 }
4295 
4296 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4297    to be marked addressable.
4298 
4299    We cannot rely on such an expression being directly markable if a temporary
4300    has been created by the gimplification.  In this case, we create another
4301    temporary and initialize it with a copy, which will become a store after we
4302    mark it addressable.  This can happen if the front-end passed us something
4303    that it could not mark addressable yet, like a Fortran pass-by-reference
4304    parameter (int) floatvar.  */
4305 
4306 static void
prepare_gimple_addressable(tree * expr_p,gimple_seq * seq_p)4307 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4308 {
4309   while (handled_component_p (*expr_p))
4310     expr_p = &TREE_OPERAND (*expr_p, 0);
4311   if (is_gimple_reg (*expr_p))
4312     {
4313       /* Do not allow an SSA name as the temporary.  */
4314       tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4315       DECL_GIMPLE_REG_P (var) = 0;
4316       *expr_p = var;
4317     }
4318 }
4319 
4320 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
4321    a call to __builtin_memcpy.  */
4322 
4323 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4324 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4325     				gimple_seq *seq_p)
4326 {
4327   tree t, to, to_ptr, from, from_ptr;
4328   gcall *gs;
4329   location_t loc = EXPR_LOCATION (*expr_p);
4330 
4331   to = TREE_OPERAND (*expr_p, 0);
4332   from = TREE_OPERAND (*expr_p, 1);
4333 
4334   /* Mark the RHS addressable.  Beware that it may not be possible to do so
4335      directly if a temporary has been created by the gimplification.  */
4336   prepare_gimple_addressable (&from, seq_p);
4337 
4338   mark_addressable (from);
4339   from_ptr = build_fold_addr_expr_loc (loc, from);
4340   gimplify_arg (&from_ptr, seq_p, loc);
4341 
4342   mark_addressable (to);
4343   to_ptr = build_fold_addr_expr_loc (loc, to);
4344   gimplify_arg (&to_ptr, seq_p, loc);
4345 
4346   t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4347 
4348   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4349 
4350   if (want_value)
4351     {
4352       /* tmp = memcpy() */
4353       t = create_tmp_var (TREE_TYPE (to_ptr));
4354       gimple_call_set_lhs (gs, t);
4355       gimplify_seq_add_stmt (seq_p, gs);
4356 
4357       *expr_p = build_simple_mem_ref (t);
4358       return GS_ALL_DONE;
4359     }
4360 
4361   gimplify_seq_add_stmt (seq_p, gs);
4362   *expr_p = NULL;
4363   return GS_ALL_DONE;
4364 }
4365 
4366 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
4367    a call to __builtin_memset.  In this case we know that the RHS is
4368    a CONSTRUCTOR with an empty element list.  */
4369 
4370 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4371 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4372     				gimple_seq *seq_p)
4373 {
4374   tree t, from, to, to_ptr;
4375   gcall *gs;
4376   location_t loc = EXPR_LOCATION (*expr_p);
4377 
4378   /* Assert our assumptions, to abort instead of producing wrong code
4379      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
4380      not be immediately exposed.  */
4381   from = TREE_OPERAND (*expr_p, 1);
4382   if (TREE_CODE (from) == WITH_SIZE_EXPR)
4383     from = TREE_OPERAND (from, 0);
4384 
4385   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4386 	      && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4387 
4388   /* Now proceed.  */
4389   to = TREE_OPERAND (*expr_p, 0);
4390 
4391   to_ptr = build_fold_addr_expr_loc (loc, to);
4392   gimplify_arg (&to_ptr, seq_p, loc);
4393   t = builtin_decl_implicit (BUILT_IN_MEMSET);
4394 
4395   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4396 
4397   if (want_value)
4398     {
4399       /* tmp = memset() */
4400       t = create_tmp_var (TREE_TYPE (to_ptr));
4401       gimple_call_set_lhs (gs, t);
4402       gimplify_seq_add_stmt (seq_p, gs);
4403 
4404       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4405       return GS_ALL_DONE;
4406     }
4407 
4408   gimplify_seq_add_stmt (seq_p, gs);
4409   *expr_p = NULL;
4410   return GS_ALL_DONE;
4411 }
4412 
4413 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
4414    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4415    assignment.  Return non-null if we detect a potential overlap.  */
4416 
4417 struct gimplify_init_ctor_preeval_data
4418 {
4419   /* The base decl of the lhs object.  May be NULL, in which case we
4420      have to assume the lhs is indirect.  */
4421   tree lhs_base_decl;
4422 
4423   /* The alias set of the lhs object.  */
4424   alias_set_type lhs_alias_set;
4425 };
4426 
4427 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)4428 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4429 {
4430   struct gimplify_init_ctor_preeval_data *data
4431     = (struct gimplify_init_ctor_preeval_data *) xdata;
4432   tree t = *tp;
4433 
4434   /* If we find the base object, obviously we have overlap.  */
4435   if (data->lhs_base_decl == t)
4436     return t;
4437 
4438   /* If the constructor component is indirect, determine if we have a
4439      potential overlap with the lhs.  The only bits of information we
4440      have to go on at this point are addressability and alias sets.  */
4441   if ((INDIRECT_REF_P (t)
4442        || TREE_CODE (t) == MEM_REF)
4443       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4444       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4445     return t;
4446 
4447   /* If the constructor component is a call, determine if it can hide a
4448      potential overlap with the lhs through an INDIRECT_REF like above.
4449      ??? Ugh - this is completely broken.  In fact this whole analysis
4450      doesn't look conservative.  */
4451   if (TREE_CODE (t) == CALL_EXPR)
4452     {
4453       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4454 
4455       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4456 	if (POINTER_TYPE_P (TREE_VALUE (type))
4457 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4458 	    && alias_sets_conflict_p (data->lhs_alias_set,
4459 				      get_alias_set
4460 				        (TREE_TYPE (TREE_VALUE (type)))))
4461 	  return t;
4462     }
4463 
4464   if (IS_TYPE_OR_DECL_P (t))
4465     *walk_subtrees = 0;
4466   return NULL;
4467 }
4468 
4469 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
4470    force values that overlap with the lhs (as described by *DATA)
4471    into temporaries.  */
4472 
4473 static void
gimplify_init_ctor_preeval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,struct gimplify_init_ctor_preeval_data * data)4474 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4475 			    struct gimplify_init_ctor_preeval_data *data)
4476 {
4477   enum gimplify_status one;
4478 
4479   /* If the value is constant, then there's nothing to pre-evaluate.  */
4480   if (TREE_CONSTANT (*expr_p))
4481     {
4482       /* Ensure it does not have side effects, it might contain a reference to
4483 	 the object we're initializing.  */
4484       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4485       return;
4486     }
4487 
4488   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
4489   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4490     return;
4491 
4492   /* Recurse for nested constructors.  */
4493   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4494     {
4495       unsigned HOST_WIDE_INT ix;
4496       constructor_elt *ce;
4497       vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4498 
4499       FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4500 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4501 
4502       return;
4503     }
4504 
4505   /* If this is a variable sized type, we must remember the size.  */
4506   maybe_with_size_expr (expr_p);
4507 
4508   /* Gimplify the constructor element to something appropriate for the rhs
4509      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
4510      the gimplifier will consider this a store to memory.  Doing this
4511      gimplification now means that we won't have to deal with complicated
4512      language-specific trees, nor trees like SAVE_EXPR that can induce
4513      exponential search behavior.  */
4514   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4515   if (one == GS_ERROR)
4516     {
4517       *expr_p = NULL;
4518       return;
4519     }
4520 
4521   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4522      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
4523      always be true for all scalars, since is_gimple_mem_rhs insists on a
4524      temporary variable for them.  */
4525   if (DECL_P (*expr_p))
4526     return;
4527 
4528   /* If this is of variable size, we have no choice but to assume it doesn't
4529      overlap since we can't make a temporary for it.  */
4530   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4531     return;
4532 
4533   /* Otherwise, we must search for overlap ...  */
4534   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4535     return;
4536 
4537   /* ... and if found, force the value into a temporary.  */
4538   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4539 }
4540 
4541 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
4542    a RANGE_EXPR in a CONSTRUCTOR for an array.
4543 
4544       var = lower;
4545     loop_entry:
4546       object[var] = value;
4547       if (var == upper)
4548 	goto loop_exit;
4549       var = var + 1;
4550       goto loop_entry;
4551     loop_exit:
4552 
4553    We increment var _after_ the loop exit check because we might otherwise
4554    fail if upper == TYPE_MAX_VALUE (type for upper).
4555 
4556    Note that we never have to deal with SAVE_EXPRs here, because this has
4557    already been taken care of for us, in gimplify_init_ctor_preeval().  */
4558 
4559 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4560 				     gimple_seq *, bool);
4561 
4562 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,gimple_seq * pre_p,bool cleared)4563 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4564 			       tree value, tree array_elt_type,
4565 			       gimple_seq *pre_p, bool cleared)
4566 {
4567   tree loop_entry_label, loop_exit_label, fall_thru_label;
4568   tree var, var_type, cref, tmp;
4569 
4570   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4571   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4572   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4573 
4574   /* Create and initialize the index variable.  */
4575   var_type = TREE_TYPE (upper);
4576   var = create_tmp_var (var_type);
4577   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4578 
4579   /* Add the loop entry label.  */
4580   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4581 
4582   /* Build the reference.  */
4583   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4584 		 var, NULL_TREE, NULL_TREE);
4585 
4586   /* If we are a constructor, just call gimplify_init_ctor_eval to do
4587      the store.  Otherwise just assign value to the reference.  */
4588 
4589   if (TREE_CODE (value) == CONSTRUCTOR)
4590     /* NB we might have to call ourself recursively through
4591        gimplify_init_ctor_eval if the value is a constructor.  */
4592     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4593 			     pre_p, cleared);
4594   else
4595     {
4596       if (gimplify_expr (&value, pre_p, NULL, is_gimple_val, fb_rvalue)
4597 	  != GS_ERROR)
4598 	gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4599     }
4600 
4601   /* We exit the loop when the index var is equal to the upper bound.  */
4602   gimplify_seq_add_stmt (pre_p,
4603 			 gimple_build_cond (EQ_EXPR, var, upper,
4604 					    loop_exit_label, fall_thru_label));
4605 
4606   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4607 
4608   /* Otherwise, increment the index var...  */
4609   tmp = build2 (PLUS_EXPR, var_type, var,
4610 		fold_convert (var_type, integer_one_node));
4611   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4612 
4613   /* ...and jump back to the loop entry.  */
4614   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4615 
4616   /* Add the loop exit label.  */
4617   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4618 }
4619 
4620 /* Return true if FDECL is accessing a field that is zero sized.  */
4621 
4622 static bool
zero_sized_field_decl(const_tree fdecl)4623 zero_sized_field_decl (const_tree fdecl)
4624 {
4625   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4626       && integer_zerop (DECL_SIZE (fdecl)))
4627     return true;
4628   return false;
4629 }
4630 
4631 /* Return true if TYPE is zero sized.  */
4632 
4633 static bool
zero_sized_type(const_tree type)4634 zero_sized_type (const_tree type)
4635 {
4636   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4637       && integer_zerop (TYPE_SIZE (type)))
4638     return true;
4639   return false;
4640 }
4641 
4642 /* A subroutine of gimplify_init_constructor.  Generate individual
4643    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
4644    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
4645    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
4646    zeroed first.  */
4647 
4648 static void
gimplify_init_ctor_eval(tree object,vec<constructor_elt,va_gc> * elts,gimple_seq * pre_p,bool cleared)4649 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4650 			 gimple_seq *pre_p, bool cleared)
4651 {
4652   tree array_elt_type = NULL;
4653   unsigned HOST_WIDE_INT ix;
4654   tree purpose, value;
4655 
4656   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4657     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4658 
4659   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4660     {
4661       tree cref;
4662 
4663       /* NULL values are created above for gimplification errors.  */
4664       if (value == NULL)
4665 	continue;
4666 
4667       if (cleared && initializer_zerop (value))
4668 	continue;
4669 
4670       /* ??? Here's to hoping the front end fills in all of the indices,
4671 	 so we don't have to figure out what's missing ourselves.  */
4672       gcc_assert (purpose);
4673 
4674       /* Skip zero-sized fields, unless value has side-effects.  This can
4675 	 happen with calls to functions returning a zero-sized type, which
4676 	 we shouldn't discard.  As a number of downstream passes don't
4677 	 expect sets of zero-sized fields, we rely on the gimplification of
4678 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
4679       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4680 	continue;
4681 
4682       /* If we have a RANGE_EXPR, we have to build a loop to assign the
4683 	 whole range.  */
4684       if (TREE_CODE (purpose) == RANGE_EXPR)
4685 	{
4686 	  tree lower = TREE_OPERAND (purpose, 0);
4687 	  tree upper = TREE_OPERAND (purpose, 1);
4688 
4689 	  /* If the lower bound is equal to upper, just treat it as if
4690 	     upper was the index.  */
4691 	  if (simple_cst_equal (lower, upper))
4692 	    purpose = upper;
4693 	  else
4694 	    {
4695 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
4696 					     array_elt_type, pre_p, cleared);
4697 	      continue;
4698 	    }
4699 	}
4700 
4701       if (array_elt_type)
4702 	{
4703 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
4704 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
4705 	    purpose
4706 	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4707 			      purpose);
4708 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4709 			 purpose, NULL_TREE, NULL_TREE);
4710 	}
4711       else
4712 	{
4713 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4714 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4715 			 unshare_expr (object), purpose, NULL_TREE);
4716 	}
4717 
4718       if (TREE_CODE (value) == CONSTRUCTOR
4719 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4720 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4721 				 pre_p, cleared);
4722       else
4723 	{
4724 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4725 	  gimplify_and_add (init, pre_p);
4726 	  ggc_free (init);
4727 	}
4728     }
4729 }
4730 
4731 /* Return the appropriate RHS predicate for this LHS.  */
4732 
4733 gimple_predicate
rhs_predicate_for(tree lhs)4734 rhs_predicate_for (tree lhs)
4735 {
4736   if (is_gimple_reg (lhs))
4737     return is_gimple_reg_rhs_or_call;
4738   else
4739     return is_gimple_mem_rhs_or_call;
4740 }
4741 
4742 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4743    before the LHS has been gimplified.  */
4744 
4745 static gimple_predicate
initial_rhs_predicate_for(tree lhs)4746 initial_rhs_predicate_for (tree lhs)
4747 {
4748   if (is_gimple_reg_type (TREE_TYPE (lhs)))
4749     return is_gimple_reg_rhs_or_call;
4750   else
4751     return is_gimple_mem_rhs_or_call;
4752 }
4753 
4754 /* Gimplify a C99 compound literal expression.  This just means adding
4755    the DECL_EXPR before the current statement and using its anonymous
4756    decl instead.  */
4757 
4758 static enum gimplify_status
gimplify_compound_literal_expr(tree * expr_p,gimple_seq * pre_p,bool (* gimple_test_f)(tree),fallback_t fallback)4759 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4760 				bool (*gimple_test_f) (tree),
4761 				fallback_t fallback)
4762 {
4763   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4764   tree decl = DECL_EXPR_DECL (decl_s);
4765   tree init = DECL_INITIAL (decl);
4766   /* Mark the decl as addressable if the compound literal
4767      expression is addressable now, otherwise it is marked too late
4768      after we gimplify the initialization expression.  */
4769   if (TREE_ADDRESSABLE (*expr_p))
4770     TREE_ADDRESSABLE (decl) = 1;
4771   /* Otherwise, if we don't need an lvalue and have a literal directly
4772      substitute it.  Check if it matches the gimple predicate, as
4773      otherwise we'd generate a new temporary, and we can as well just
4774      use the decl we already have.  */
4775   else if (!TREE_ADDRESSABLE (decl)
4776 	   && !TREE_THIS_VOLATILE (decl)
4777 	   && init
4778 	   && (fallback & fb_lvalue) == 0
4779 	   && gimple_test_f (init))
4780     {
4781       *expr_p = init;
4782       return GS_OK;
4783     }
4784 
4785   /* Preliminarily mark non-addressed complex variables as eligible
4786      for promotion to gimple registers.  We'll transform their uses
4787      as we find them.  */
4788   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4789        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4790       && !TREE_THIS_VOLATILE (decl)
4791       && !needs_to_live_in_memory (decl))
4792     DECL_GIMPLE_REG_P (decl) = 1;
4793 
4794   /* If the decl is not addressable, then it is being used in some
4795      expression or on the right hand side of a statement, and it can
4796      be put into a readonly data section.  */
4797   if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4798     TREE_READONLY (decl) = 1;
4799 
4800   /* This decl isn't mentioned in the enclosing block, so add it to the
4801      list of temps.  FIXME it seems a bit of a kludge to say that
4802      anonymous artificial vars aren't pushed, but everything else is.  */
4803   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4804     gimple_add_tmp_var (decl);
4805 
4806   gimplify_and_add (decl_s, pre_p);
4807   *expr_p = decl;
4808   return GS_OK;
4809 }
4810 
4811 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4812    return a new CONSTRUCTOR if something changed.  */
4813 
4814 static tree
optimize_compound_literals_in_ctor(tree orig_ctor)4815 optimize_compound_literals_in_ctor (tree orig_ctor)
4816 {
4817   tree ctor = orig_ctor;
4818   vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4819   unsigned int idx, num = vec_safe_length (elts);
4820 
4821   for (idx = 0; idx < num; idx++)
4822     {
4823       tree value = (*elts)[idx].value;
4824       tree newval = value;
4825       if (TREE_CODE (value) == CONSTRUCTOR)
4826 	newval = optimize_compound_literals_in_ctor (value);
4827       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4828 	{
4829 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4830 	  tree decl = DECL_EXPR_DECL (decl_s);
4831 	  tree init = DECL_INITIAL (decl);
4832 
4833 	  if (!TREE_ADDRESSABLE (value)
4834 	      && !TREE_ADDRESSABLE (decl)
4835 	      && init
4836 	      && TREE_CODE (init) == CONSTRUCTOR)
4837 	    newval = optimize_compound_literals_in_ctor (init);
4838 	}
4839       if (newval == value)
4840 	continue;
4841 
4842       if (ctor == orig_ctor)
4843 	{
4844 	  ctor = copy_node (orig_ctor);
4845 	  CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4846 	  elts = CONSTRUCTOR_ELTS (ctor);
4847 	}
4848       (*elts)[idx].value = newval;
4849     }
4850   return ctor;
4851 }
4852 
4853 /* A subroutine of gimplify_modify_expr.  Break out elements of a
4854    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4855 
4856    Note that we still need to clear any elements that don't have explicit
4857    initializers, so if not all elements are initialized we keep the
4858    original MODIFY_EXPR, we just remove all of the constructor elements.
4859 
4860    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4861    GS_ERROR if we would have to create a temporary when gimplifying
4862    this constructor.  Otherwise, return GS_OK.
4863 
4864    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
4865 
4866 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,bool notify_temp_creation)4867 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4868 			   bool want_value, bool notify_temp_creation)
4869 {
4870   tree object, ctor, type;
4871   enum gimplify_status ret;
4872   vec<constructor_elt, va_gc> *elts;
4873 
4874   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4875 
4876   if (!notify_temp_creation)
4877     {
4878       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4879 			   is_gimple_lvalue, fb_lvalue);
4880       if (ret == GS_ERROR)
4881 	return ret;
4882     }
4883 
4884   object = TREE_OPERAND (*expr_p, 0);
4885   ctor = TREE_OPERAND (*expr_p, 1)
4886     = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4887   type = TREE_TYPE (ctor);
4888   elts = CONSTRUCTOR_ELTS (ctor);
4889   ret = GS_ALL_DONE;
4890 
4891   switch (TREE_CODE (type))
4892     {
4893     case RECORD_TYPE:
4894     case UNION_TYPE:
4895     case QUAL_UNION_TYPE:
4896     case ARRAY_TYPE:
4897       {
4898 	struct gimplify_init_ctor_preeval_data preeval_data;
4899 	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4900 	HOST_WIDE_INT num_unique_nonzero_elements;
4901 	bool cleared, complete_p, valid_const_initializer;
4902 	/* Use readonly data for initializers of this or smaller size
4903 	   regardless of the num_nonzero_elements / num_unique_nonzero_elements
4904 	   ratio.  */
4905 	const HOST_WIDE_INT min_unique_size = 64;
4906 	/* If num_nonzero_elements / num_unique_nonzero_elements ratio
4907 	   is smaller than this, use readonly data.  */
4908 	const int unique_nonzero_ratio = 8;
4909 
4910 	/* Aggregate types must lower constructors to initialization of
4911 	   individual elements.  The exception is that a CONSTRUCTOR node
4912 	   with no elements indicates zero-initialization of the whole.  */
4913 	if (vec_safe_is_empty (elts))
4914 	  {
4915 	    if (notify_temp_creation)
4916 	      return GS_OK;
4917 	    break;
4918 	  }
4919 
4920 	/* Fetch information about the constructor to direct later processing.
4921 	   We might want to make static versions of it in various cases, and
4922 	   can only do so if it known to be a valid constant initializer.  */
4923 	valid_const_initializer
4924 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
4925 				      &num_unique_nonzero_elements,
4926 				      &num_ctor_elements, &complete_p);
4927 
4928 	/* If a const aggregate variable is being initialized, then it
4929 	   should never be a lose to promote the variable to be static.  */
4930 	if (valid_const_initializer
4931 	    && num_nonzero_elements > 1
4932 	    && TREE_READONLY (object)
4933 	    && VAR_P (object)
4934 	    && !DECL_REGISTER (object)
4935 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4936 	    /* For ctors that have many repeated nonzero elements
4937 	       represented through RANGE_EXPRs, prefer initializing
4938 	       those through runtime loops over copies of large amounts
4939 	       of data from readonly data section.  */
4940 	    && (num_unique_nonzero_elements
4941 		> num_nonzero_elements / unique_nonzero_ratio
4942 		|| ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4943 		    <= (unsigned HOST_WIDE_INT) min_unique_size)))
4944 	  {
4945 	    if (notify_temp_creation)
4946 	      return GS_ERROR;
4947 	    DECL_INITIAL (object) = ctor;
4948 	    TREE_STATIC (object) = 1;
4949 	    if (!DECL_NAME (object))
4950 	      DECL_NAME (object) = create_tmp_var_name ("C");
4951 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4952 
4953 	    /* ??? C++ doesn't automatically append a .<number> to the
4954 	       assembler name, and even when it does, it looks at FE private
4955 	       data structures to figure out what that number should be,
4956 	       which are not set for this variable.  I suppose this is
4957 	       important for local statics for inline functions, which aren't
4958 	       "local" in the object file sense.  So in order to get a unique
4959 	       TU-local symbol, we must invoke the lhd version now.  */
4960 	    lhd_set_decl_assembler_name (object);
4961 
4962 	    *expr_p = NULL_TREE;
4963 	    break;
4964 	  }
4965 
4966 	/* If there are "lots" of initialized elements, even discounting
4967 	   those that are not address constants (and thus *must* be
4968 	   computed at runtime), then partition the constructor into
4969 	   constant and non-constant parts.  Block copy the constant
4970 	   parts in, then generate code for the non-constant parts.  */
4971 	/* TODO.  There's code in cp/typeck.c to do this.  */
4972 
4973 	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4974 	  /* store_constructor will ignore the clearing of variable-sized
4975 	     objects.  Initializers for such objects must explicitly set
4976 	     every field that needs to be set.  */
4977 	  cleared = false;
4978 	else if (!complete_p)
4979 	  /* If the constructor isn't complete, clear the whole object
4980 	     beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4981 
4982 	     ??? This ought not to be needed.  For any element not present
4983 	     in the initializer, we should simply set them to zero.  Except
4984 	     we'd need to *find* the elements that are not present, and that
4985 	     requires trickery to avoid quadratic compile-time behavior in
4986 	     large cases or excessive memory use in small cases.  */
4987 	  cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4988 	else if (num_ctor_elements - num_nonzero_elements
4989 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4990 		 && num_nonzero_elements < num_ctor_elements / 4)
4991 	  /* If there are "lots" of zeros, it's more efficient to clear
4992 	     the memory and then set the nonzero elements.  */
4993 	  cleared = true;
4994 	else
4995 	  cleared = false;
4996 
4997 	/* If there are "lots" of initialized elements, and all of them
4998 	   are valid address constants, then the entire initializer can
4999 	   be dropped to memory, and then memcpy'd out.  Don't do this
5000 	   for sparse arrays, though, as it's more efficient to follow
5001 	   the standard CONSTRUCTOR behavior of memset followed by
5002 	   individual element initialization.  Also don't do this for small
5003 	   all-zero initializers (which aren't big enough to merit
5004 	   clearing), and don't try to make bitwise copies of
5005 	   TREE_ADDRESSABLE types.  */
5006 
5007 	if (valid_const_initializer
5008 	    && !(cleared || num_nonzero_elements == 0)
5009 	    && !TREE_ADDRESSABLE (type))
5010 	  {
5011 	    HOST_WIDE_INT size = int_size_in_bytes (type);
5012 	    unsigned int align;
5013 
5014 	    /* ??? We can still get unbounded array types, at least
5015 	       from the C++ front end.  This seems wrong, but attempt
5016 	       to work around it for now.  */
5017 	    if (size < 0)
5018 	      {
5019 		size = int_size_in_bytes (TREE_TYPE (object));
5020 		if (size >= 0)
5021 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
5022 	      }
5023 
5024 	    /* Find the maximum alignment we can assume for the object.  */
5025 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
5026 	    if (DECL_P (object))
5027 	      align = DECL_ALIGN (object);
5028 	    else
5029 	      align = TYPE_ALIGN (type);
5030 
5031 	    /* Do a block move either if the size is so small as to make
5032 	       each individual move a sub-unit move on average, or if it
5033 	       is so large as to make individual moves inefficient.  */
5034 	    if (size > 0
5035 		&& num_nonzero_elements > 1
5036 		/* For ctors that have many repeated nonzero elements
5037 		   represented through RANGE_EXPRs, prefer initializing
5038 		   those through runtime loops over copies of large amounts
5039 		   of data from readonly data section.  */
5040 		&& (num_unique_nonzero_elements
5041 		    > num_nonzero_elements / unique_nonzero_ratio
5042 		    || size <= min_unique_size)
5043 		&& (size < num_nonzero_elements
5044 		    || !can_move_by_pieces (size, align)))
5045 	      {
5046 		if (notify_temp_creation)
5047 		  return GS_ERROR;
5048 
5049 		walk_tree (&ctor, force_labels_r, NULL, NULL);
5050 		ctor = tree_output_constant_def (ctor);
5051 		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
5052 		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
5053 		TREE_OPERAND (*expr_p, 1) = ctor;
5054 
5055 		/* This is no longer an assignment of a CONSTRUCTOR, but
5056 		   we still may have processing to do on the LHS.  So
5057 		   pretend we didn't do anything here to let that happen.  */
5058 		return GS_UNHANDLED;
5059 	      }
5060 	  }
5061 
5062 	/* If the target is volatile, we have non-zero elements and more than
5063 	   one field to assign, initialize the target from a temporary.  */
5064 	if (TREE_THIS_VOLATILE (object)
5065 	    && !TREE_ADDRESSABLE (type)
5066 	    && (num_nonzero_elements > 0 || !cleared)
5067 	    && vec_safe_length (elts) > 1)
5068 	  {
5069 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
5070 	    TREE_OPERAND (*expr_p, 0) = temp;
5071 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
5072 			      *expr_p,
5073 			      build2 (MODIFY_EXPR, void_type_node,
5074 				      object, temp));
5075 	    return GS_OK;
5076 	  }
5077 
5078 	if (notify_temp_creation)
5079 	  return GS_OK;
5080 
5081 	/* If there are nonzero elements and if needed, pre-evaluate to capture
5082 	   elements overlapping with the lhs into temporaries.  We must do this
5083 	   before clearing to fetch the values before they are zeroed-out.  */
5084 	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
5085 	  {
5086 	    preeval_data.lhs_base_decl = get_base_address (object);
5087 	    if (!DECL_P (preeval_data.lhs_base_decl))
5088 	      preeval_data.lhs_base_decl = NULL;
5089 	    preeval_data.lhs_alias_set = get_alias_set (object);
5090 
5091 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
5092 					pre_p, post_p, &preeval_data);
5093 	  }
5094 
5095 	bool ctor_has_side_effects_p
5096 	  = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
5097 
5098 	if (cleared)
5099 	  {
5100 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
5101 	       Note that we still have to gimplify, in order to handle the
5102 	       case of variable sized types.  Avoid shared tree structures.  */
5103 	    CONSTRUCTOR_ELTS (ctor) = NULL;
5104 	    TREE_SIDE_EFFECTS (ctor) = 0;
5105 	    object = unshare_expr (object);
5106 	    gimplify_stmt (expr_p, pre_p);
5107 	  }
5108 
5109 	/* If we have not block cleared the object, or if there are nonzero
5110 	   elements in the constructor, or if the constructor has side effects,
5111 	   add assignments to the individual scalar fields of the object.  */
5112 	if (!cleared
5113 	    || num_nonzero_elements > 0
5114 	    || ctor_has_side_effects_p)
5115 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5116 
5117 	*expr_p = NULL_TREE;
5118       }
5119       break;
5120 
5121     case COMPLEX_TYPE:
5122       {
5123 	tree r, i;
5124 
5125 	if (notify_temp_creation)
5126 	  return GS_OK;
5127 
5128 	/* Extract the real and imaginary parts out of the ctor.  */
5129 	gcc_assert (elts->length () == 2);
5130 	r = (*elts)[0].value;
5131 	i = (*elts)[1].value;
5132 	if (r == NULL || i == NULL)
5133 	  {
5134 	    tree zero = build_zero_cst (TREE_TYPE (type));
5135 	    if (r == NULL)
5136 	      r = zero;
5137 	    if (i == NULL)
5138 	      i = zero;
5139 	  }
5140 
5141 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5142 	   represent creation of a complex value.  */
5143 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5144 	  {
5145 	    ctor = build_complex (type, r, i);
5146 	    TREE_OPERAND (*expr_p, 1) = ctor;
5147 	  }
5148 	else
5149 	  {
5150 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
5151 	    TREE_OPERAND (*expr_p, 1) = ctor;
5152 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5153 				 pre_p,
5154 				 post_p,
5155 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5156 				 fb_rvalue);
5157 	  }
5158       }
5159       break;
5160 
5161     case VECTOR_TYPE:
5162       {
5163 	unsigned HOST_WIDE_INT ix;
5164 	constructor_elt *ce;
5165 
5166 	if (notify_temp_creation)
5167 	  return GS_OK;
5168 
5169 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
5170 	if (TREE_CONSTANT (ctor))
5171 	  {
5172 	    bool constant_p = true;
5173 	    tree value;
5174 
5175 	    /* Even when ctor is constant, it might contain non-*_CST
5176 	       elements, such as addresses or trapping values like
5177 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
5178 	       in VECTOR_CST nodes.  */
5179 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5180 	      if (!CONSTANT_CLASS_P (value))
5181 		{
5182 		  constant_p = false;
5183 		  break;
5184 		}
5185 
5186 	    if (constant_p)
5187 	      {
5188 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5189 		break;
5190 	      }
5191 
5192 	    TREE_CONSTANT (ctor) = 0;
5193 	  }
5194 
5195 	/* Vector types use CONSTRUCTOR all the way through gimple
5196 	   compilation as a general initializer.  */
5197 	FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5198 	  {
5199 	    enum gimplify_status tret;
5200 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5201 				  fb_rvalue);
5202 	    if (tret == GS_ERROR)
5203 	      ret = GS_ERROR;
5204 	    else if (TREE_STATIC (ctor)
5205 		     && !initializer_constant_valid_p (ce->value,
5206 						       TREE_TYPE (ce->value)))
5207 	      TREE_STATIC (ctor) = 0;
5208 	  }
5209 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5210 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5211       }
5212       break;
5213 
5214     default:
5215       /* So how did we get a CONSTRUCTOR for a scalar type?  */
5216       gcc_unreachable ();
5217     }
5218 
5219   if (ret == GS_ERROR)
5220     return GS_ERROR;
5221   /* If we have gimplified both sides of the initializer but have
5222      not emitted an assignment, do so now.  */
5223   if (*expr_p)
5224     {
5225       tree lhs = TREE_OPERAND (*expr_p, 0);
5226       tree rhs = TREE_OPERAND (*expr_p, 1);
5227       if (want_value && object == lhs)
5228 	lhs = unshare_expr (lhs);
5229       gassign *init = gimple_build_assign (lhs, rhs);
5230       gimplify_seq_add_stmt (pre_p, init);
5231     }
5232   if (want_value)
5233     {
5234       *expr_p = object;
5235       return GS_OK;
5236     }
5237   else
5238     {
5239       *expr_p = NULL;
5240       return GS_ALL_DONE;
5241     }
5242 }
5243 
5244 /* Given a pointer value OP0, return a simplified version of an
5245    indirection through OP0, or NULL_TREE if no simplification is
5246    possible.  This may only be applied to a rhs of an expression.
5247    Note that the resulting type may be different from the type pointed
5248    to in the sense that it is still compatible from the langhooks
5249    point of view. */
5250 
5251 static tree
gimple_fold_indirect_ref_rhs(tree t)5252 gimple_fold_indirect_ref_rhs (tree t)
5253 {
5254   return gimple_fold_indirect_ref (t);
5255 }
5256 
5257 /* Subroutine of gimplify_modify_expr to do simplifications of
5258    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
5259    something changes.  */
5260 
5261 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5262 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5263 			  gimple_seq *pre_p, gimple_seq *post_p,
5264 			  bool want_value)
5265 {
5266   enum gimplify_status ret = GS_UNHANDLED;
5267   bool changed;
5268 
5269   do
5270     {
5271       changed = false;
5272       switch (TREE_CODE (*from_p))
5273 	{
5274 	case VAR_DECL:
5275 	  /* If we're assigning from a read-only variable initialized with
5276 	     a constructor, do the direct assignment from the constructor,
5277 	     but only if neither source nor target are volatile since this
5278 	     latter assignment might end up being done on a per-field basis.  */
5279 	  if (DECL_INITIAL (*from_p)
5280 	      && TREE_READONLY (*from_p)
5281 	      && !TREE_THIS_VOLATILE (*from_p)
5282 	      && !TREE_THIS_VOLATILE (*to_p)
5283 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5284 	    {
5285 	      tree old_from = *from_p;
5286 	      enum gimplify_status subret;
5287 
5288 	      /* Move the constructor into the RHS.  */
5289 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
5290 
5291 	      /* Let's see if gimplify_init_constructor will need to put
5292 		 it in memory.  */
5293 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
5294 						  false, true);
5295 	      if (subret == GS_ERROR)
5296 		{
5297 		  /* If so, revert the change.  */
5298 		  *from_p = old_from;
5299 		}
5300 	      else
5301 		{
5302 		  ret = GS_OK;
5303 		  changed = true;
5304 		}
5305 	    }
5306 	  break;
5307 	case INDIRECT_REF:
5308 	  {
5309 	    /* If we have code like
5310 
5311 	     *(const A*)(A*)&x
5312 
5313 	     where the type of "x" is a (possibly cv-qualified variant
5314 	     of "A"), treat the entire expression as identical to "x".
5315 	     This kind of code arises in C++ when an object is bound
5316 	     to a const reference, and if "x" is a TARGET_EXPR we want
5317 	     to take advantage of the optimization below.  */
5318 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5319 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5320 	    if (t)
5321 	      {
5322 		if (TREE_THIS_VOLATILE (t) != volatile_p)
5323 		  {
5324 		    if (DECL_P (t))
5325 		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5326 						    build_fold_addr_expr (t));
5327 		    if (REFERENCE_CLASS_P (t))
5328 		      TREE_THIS_VOLATILE (t) = volatile_p;
5329 		  }
5330 		*from_p = t;
5331 		ret = GS_OK;
5332 		changed = true;
5333 	      }
5334 	    break;
5335 	  }
5336 
5337 	case TARGET_EXPR:
5338 	  {
5339 	    /* If we are initializing something from a TARGET_EXPR, strip the
5340 	       TARGET_EXPR and initialize it directly, if possible.  This can't
5341 	       be done if the initializer is void, since that implies that the
5342 	       temporary is set in some non-trivial way.
5343 
5344 	       ??? What about code that pulls out the temp and uses it
5345 	       elsewhere? I think that such code never uses the TARGET_EXPR as
5346 	       an initializer.  If I'm wrong, we'll die because the temp won't
5347 	       have any RTL.  In that case, I guess we'll need to replace
5348 	       references somehow.  */
5349 	    tree init = TARGET_EXPR_INITIAL (*from_p);
5350 
5351 	    if (init
5352 		&& (TREE_CODE (*expr_p) != MODIFY_EXPR
5353 		    || !TARGET_EXPR_NO_ELIDE (*from_p))
5354 		&& !VOID_TYPE_P (TREE_TYPE (init)))
5355 	      {
5356 		*from_p = init;
5357 		ret = GS_OK;
5358 		changed = true;
5359 	      }
5360 	  }
5361 	  break;
5362 
5363 	case COMPOUND_EXPR:
5364 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5365 	     caught.  */
5366 	  gimplify_compound_expr (from_p, pre_p, true);
5367 	  ret = GS_OK;
5368 	  changed = true;
5369 	  break;
5370 
5371 	case CONSTRUCTOR:
5372 	  /* If we already made some changes, let the front end have a
5373 	     crack at this before we break it down.  */
5374 	  if (ret != GS_UNHANDLED)
5375 	    break;
5376 	  /* If we're initializing from a CONSTRUCTOR, break this into
5377 	     individual MODIFY_EXPRs.  */
5378 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5379 					    false);
5380 
5381 	case COND_EXPR:
5382 	  /* If we're assigning to a non-register type, push the assignment
5383 	     down into the branches.  This is mandatory for ADDRESSABLE types,
5384 	     since we cannot generate temporaries for such, but it saves a
5385 	     copy in other cases as well.  */
5386 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5387 	    {
5388 	      /* This code should mirror the code in gimplify_cond_expr. */
5389 	      enum tree_code code = TREE_CODE (*expr_p);
5390 	      tree cond = *from_p;
5391 	      tree result = *to_p;
5392 
5393 	      ret = gimplify_expr (&result, pre_p, post_p,
5394 				   is_gimple_lvalue, fb_lvalue);
5395 	      if (ret != GS_ERROR)
5396 		ret = GS_OK;
5397 
5398 	      /* If we are going to write RESULT more than once, clear
5399 		 TREE_READONLY flag, otherwise we might incorrectly promote
5400 		 the variable to static const and initialize it at compile
5401 		 time in one of the branches.  */
5402 	      if (VAR_P (result)
5403 		  && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5404 		  && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5405 		TREE_READONLY (result) = 0;
5406 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5407 		TREE_OPERAND (cond, 1)
5408 		  = build2 (code, void_type_node, result,
5409 			    TREE_OPERAND (cond, 1));
5410 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5411 		TREE_OPERAND (cond, 2)
5412 		  = build2 (code, void_type_node, unshare_expr (result),
5413 			    TREE_OPERAND (cond, 2));
5414 
5415 	      TREE_TYPE (cond) = void_type_node;
5416 	      recalculate_side_effects (cond);
5417 
5418 	      if (want_value)
5419 		{
5420 		  gimplify_and_add (cond, pre_p);
5421 		  *expr_p = unshare_expr (result);
5422 		}
5423 	      else
5424 		*expr_p = cond;
5425 	      return ret;
5426 	    }
5427 	  break;
5428 
5429 	case CALL_EXPR:
5430 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
5431 	     return slot so that we don't generate a temporary.  */
5432 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5433 	      && aggregate_value_p (*from_p, *from_p))
5434 	    {
5435 	      bool use_target;
5436 
5437 	      if (!(rhs_predicate_for (*to_p))(*from_p))
5438 		/* If we need a temporary, *to_p isn't accurate.  */
5439 		use_target = false;
5440 	      /* It's OK to use the return slot directly unless it's an NRV. */
5441 	      else if (TREE_CODE (*to_p) == RESULT_DECL
5442 		       && DECL_NAME (*to_p) == NULL_TREE
5443 		       && needs_to_live_in_memory (*to_p))
5444 		use_target = true;
5445 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5446 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5447 		/* Don't force regs into memory.  */
5448 		use_target = false;
5449 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
5450 		/* It's OK to use the target directly if it's being
5451 		   initialized. */
5452 		use_target = true;
5453 	      else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5454 		       != INTEGER_CST)
5455 		/* Always use the target and thus RSO for variable-sized types.
5456 		   GIMPLE cannot deal with a variable-sized assignment
5457 		   embedded in a call statement.  */
5458 		use_target = true;
5459 	      else if (TREE_CODE (*to_p) != SSA_NAME
5460 		      && (!is_gimple_variable (*to_p)
5461 			  || needs_to_live_in_memory (*to_p)))
5462 		/* Don't use the original target if it's already addressable;
5463 		   if its address escapes, and the called function uses the
5464 		   NRV optimization, a conforming program could see *to_p
5465 		   change before the called function returns; see c++/19317.
5466 		   When optimizing, the return_slot pass marks more functions
5467 		   as safe after we have escape info.  */
5468 		use_target = false;
5469 	      else
5470 		use_target = true;
5471 
5472 	      if (use_target)
5473 		{
5474 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5475 		  mark_addressable (*to_p);
5476 		}
5477 	    }
5478 	  break;
5479 
5480 	case WITH_SIZE_EXPR:
5481 	  /* Likewise for calls that return an aggregate of non-constant size,
5482 	     since we would not be able to generate a temporary at all.  */
5483 	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5484 	    {
5485 	      *from_p = TREE_OPERAND (*from_p, 0);
5486 	      /* We don't change ret in this case because the
5487 		 WITH_SIZE_EXPR might have been added in
5488 		 gimplify_modify_expr, so returning GS_OK would lead to an
5489 		 infinite loop.  */
5490 	      changed = true;
5491 	    }
5492 	  break;
5493 
5494 	  /* If we're initializing from a container, push the initialization
5495 	     inside it.  */
5496 	case CLEANUP_POINT_EXPR:
5497 	case BIND_EXPR:
5498 	case STATEMENT_LIST:
5499 	  {
5500 	    tree wrap = *from_p;
5501 	    tree t;
5502 
5503 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5504 				 fb_lvalue);
5505 	    if (ret != GS_ERROR)
5506 	      ret = GS_OK;
5507 
5508 	    t = voidify_wrapper_expr (wrap, *expr_p);
5509 	    gcc_assert (t == *expr_p);
5510 
5511 	    if (want_value)
5512 	      {
5513 		gimplify_and_add (wrap, pre_p);
5514 		*expr_p = unshare_expr (*to_p);
5515 	      }
5516 	    else
5517 	      *expr_p = wrap;
5518 	    return GS_OK;
5519 	  }
5520 
5521 	case COMPOUND_LITERAL_EXPR:
5522 	  {
5523 	    tree complit = TREE_OPERAND (*expr_p, 1);
5524 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5525 	    tree decl = DECL_EXPR_DECL (decl_s);
5526 	    tree init = DECL_INITIAL (decl);
5527 
5528 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5529 	       into struct T x = { 0, 1, 2 } if the address of the
5530 	       compound literal has never been taken.  */
5531 	    if (!TREE_ADDRESSABLE (complit)
5532 		&& !TREE_ADDRESSABLE (decl)
5533 		&& init)
5534 	      {
5535 		*expr_p = copy_node (*expr_p);
5536 		TREE_OPERAND (*expr_p, 1) = init;
5537 		return GS_OK;
5538 	      }
5539 	  }
5540 
5541 	default:
5542 	  break;
5543 	}
5544     }
5545   while (changed);
5546 
5547   return ret;
5548 }
5549 
5550 
5551 /* Return true if T looks like a valid GIMPLE statement.  */
5552 
5553 static bool
is_gimple_stmt(tree t)5554 is_gimple_stmt (tree t)
5555 {
5556   const enum tree_code code = TREE_CODE (t);
5557 
5558   switch (code)
5559     {
5560     case NOP_EXPR:
5561       /* The only valid NOP_EXPR is the empty statement.  */
5562       return IS_EMPTY_STMT (t);
5563 
5564     case BIND_EXPR:
5565     case COND_EXPR:
5566       /* These are only valid if they're void.  */
5567       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5568 
5569     case SWITCH_EXPR:
5570     case GOTO_EXPR:
5571     case RETURN_EXPR:
5572     case LABEL_EXPR:
5573     case CASE_LABEL_EXPR:
5574     case TRY_CATCH_EXPR:
5575     case TRY_FINALLY_EXPR:
5576     case EH_FILTER_EXPR:
5577     case CATCH_EXPR:
5578     case ASM_EXPR:
5579     case STATEMENT_LIST:
5580     case OACC_PARALLEL:
5581     case OACC_KERNELS:
5582     case OACC_SERIAL:
5583     case OACC_DATA:
5584     case OACC_HOST_DATA:
5585     case OACC_DECLARE:
5586     case OACC_UPDATE:
5587     case OACC_ENTER_DATA:
5588     case OACC_EXIT_DATA:
5589     case OACC_CACHE:
5590     case OMP_PARALLEL:
5591     case OMP_FOR:
5592     case OMP_SIMD:
5593     case OMP_DISTRIBUTE:
5594     case OMP_LOOP:
5595     case OACC_LOOP:
5596     case OMP_SCAN:
5597     case OMP_SECTIONS:
5598     case OMP_SECTION:
5599     case OMP_SINGLE:
5600     case OMP_MASTER:
5601     case OMP_TASKGROUP:
5602     case OMP_ORDERED:
5603     case OMP_CRITICAL:
5604     case OMP_TASK:
5605     case OMP_TARGET:
5606     case OMP_TARGET_DATA:
5607     case OMP_TARGET_UPDATE:
5608     case OMP_TARGET_ENTER_DATA:
5609     case OMP_TARGET_EXIT_DATA:
5610     case OMP_TASKLOOP:
5611     case OMP_TEAMS:
5612       /* These are always void.  */
5613       return true;
5614 
5615     case CALL_EXPR:
5616     case MODIFY_EXPR:
5617     case PREDICT_EXPR:
5618       /* These are valid regardless of their type.  */
5619       return true;
5620 
5621     default:
5622       return false;
5623     }
5624 }
5625 
5626 
5627 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
5628    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5629    DECL_GIMPLE_REG_P set.
5630 
5631    IMPORTANT NOTE: This promotion is performed by introducing a load of the
5632    other, unmodified part of the complex object just before the total store.
5633    As a consequence, if the object is still uninitialized, an undefined value
5634    will be loaded into a register, which may result in a spurious exception
5635    if the register is floating-point and the value happens to be a signaling
5636    NaN for example.  Then the fully-fledged complex operations lowering pass
5637    followed by a DCE pass are necessary in order to fix things up.  */
5638 
5639 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,gimple_seq * pre_p,bool want_value)5640 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5641                                    bool want_value)
5642 {
5643   enum tree_code code, ocode;
5644   tree lhs, rhs, new_rhs, other, realpart, imagpart;
5645 
5646   lhs = TREE_OPERAND (*expr_p, 0);
5647   rhs = TREE_OPERAND (*expr_p, 1);
5648   code = TREE_CODE (lhs);
5649   lhs = TREE_OPERAND (lhs, 0);
5650 
5651   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5652   other = build1 (ocode, TREE_TYPE (rhs), lhs);
5653   TREE_NO_WARNING (other) = 1;
5654   other = get_formal_tmp_var (other, pre_p);
5655 
5656   realpart = code == REALPART_EXPR ? rhs : other;
5657   imagpart = code == REALPART_EXPR ? other : rhs;
5658 
5659   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5660     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5661   else
5662     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5663 
5664   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5665   *expr_p = (want_value) ? rhs : NULL_TREE;
5666 
5667   return GS_ALL_DONE;
5668 }
5669 
5670 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5671 
5672       modify_expr
5673 	      : varname '=' rhs
5674 	      | '*' ID '=' rhs
5675 
5676     PRE_P points to the list where side effects that must happen before
5677 	*EXPR_P should be stored.
5678 
5679     POST_P points to the list where side effects that must happen after
5680 	*EXPR_P should be stored.
5681 
5682     WANT_VALUE is nonzero iff we want to use the value of this expression
5683 	in another expression.  */
5684 
5685 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5686 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5687 		      bool want_value)
5688 {
5689   tree *from_p = &TREE_OPERAND (*expr_p, 1);
5690   tree *to_p = &TREE_OPERAND (*expr_p, 0);
5691   enum gimplify_status ret = GS_UNHANDLED;
5692   gimple *assign;
5693   location_t loc = EXPR_LOCATION (*expr_p);
5694   gimple_stmt_iterator gsi;
5695 
5696   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5697 	      || TREE_CODE (*expr_p) == INIT_EXPR);
5698 
5699   /* Trying to simplify a clobber using normal logic doesn't work,
5700      so handle it here.  */
5701   if (TREE_CLOBBER_P (*from_p))
5702     {
5703       ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5704       if (ret == GS_ERROR)
5705 	return ret;
5706       gcc_assert (!want_value);
5707       if (!VAR_P (*to_p) && TREE_CODE (*to_p) != MEM_REF)
5708 	{
5709 	  tree addr = get_initialized_tmp_var (build_fold_addr_expr (*to_p),
5710 					       pre_p, post_p);
5711 	  *to_p = build_simple_mem_ref_loc (EXPR_LOCATION (*to_p), addr);
5712 	}
5713       gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5714       *expr_p = NULL;
5715       return GS_ALL_DONE;
5716     }
5717 
5718   /* Insert pointer conversions required by the middle-end that are not
5719      required by the frontend.  This fixes middle-end type checking for
5720      for example gcc.dg/redecl-6.c.  */
5721   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5722     {
5723       STRIP_USELESS_TYPE_CONVERSION (*from_p);
5724       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5725 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5726     }
5727 
5728   /* See if any simplifications can be done based on what the RHS is.  */
5729   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5730 				  want_value);
5731   if (ret != GS_UNHANDLED)
5732     return ret;
5733 
5734   /* For zero sized types only gimplify the left hand side and right hand
5735      side as statements and throw away the assignment.  Do this after
5736      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5737      types properly.  */
5738   if (zero_sized_type (TREE_TYPE (*from_p))
5739       && !want_value
5740       /* Don't do this for calls that return addressable types, expand_call
5741 	 relies on those having a lhs.  */
5742       && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5743 	   && TREE_CODE (*from_p) == CALL_EXPR))
5744     {
5745       gimplify_stmt (from_p, pre_p);
5746       gimplify_stmt (to_p, pre_p);
5747       *expr_p = NULL_TREE;
5748       return GS_ALL_DONE;
5749     }
5750 
5751   /* If the value being copied is of variable width, compute the length
5752      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
5753      before gimplifying any of the operands so that we can resolve any
5754      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
5755      the size of the expression to be copied, not of the destination, so
5756      that is what we must do here.  */
5757   maybe_with_size_expr (from_p);
5758 
5759   /* As a special case, we have to temporarily allow for assignments
5760      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
5761      a toplevel statement, when gimplifying the GENERIC expression
5762      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5763      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5764 
5765      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
5766      prevent gimplify_expr from trying to create a new temporary for
5767      foo's LHS, we tell it that it should only gimplify until it
5768      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
5769      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5770      and all we need to do here is set 'a' to be its LHS.  */
5771 
5772   /* Gimplify the RHS first for C++17 and bug 71104.  */
5773   gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5774   ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5775   if (ret == GS_ERROR)
5776     return ret;
5777 
5778   /* Then gimplify the LHS.  */
5779   /* If we gimplified the RHS to a CALL_EXPR and that call may return
5780      twice we have to make sure to gimplify into non-SSA as otherwise
5781      the abnormal edge added later will make those defs not dominate
5782      their uses.
5783      ???  Technically this applies only to the registers used in the
5784      resulting non-register *TO_P.  */
5785   bool saved_into_ssa = gimplify_ctxp->into_ssa;
5786   if (saved_into_ssa
5787       && TREE_CODE (*from_p) == CALL_EXPR
5788       && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5789     gimplify_ctxp->into_ssa = false;
5790   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5791   gimplify_ctxp->into_ssa = saved_into_ssa;
5792   if (ret == GS_ERROR)
5793     return ret;
5794 
5795   /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5796      guess for the predicate was wrong.  */
5797   gimple_predicate final_pred = rhs_predicate_for (*to_p);
5798   if (final_pred != initial_pred)
5799     {
5800       ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5801       if (ret == GS_ERROR)
5802 	return ret;
5803     }
5804 
5805   /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5806      size as argument to the call.  */
5807   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5808     {
5809       tree call = TREE_OPERAND (*from_p, 0);
5810       tree vlasize = TREE_OPERAND (*from_p, 1);
5811 
5812       if (TREE_CODE (call) == CALL_EXPR
5813 	  && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5814 	{
5815 	  int nargs = call_expr_nargs (call);
5816 	  tree type = TREE_TYPE (call);
5817 	  tree ap = CALL_EXPR_ARG (call, 0);
5818 	  tree tag = CALL_EXPR_ARG (call, 1);
5819 	  tree aptag = CALL_EXPR_ARG (call, 2);
5820 	  tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5821 						       IFN_VA_ARG, type,
5822 						       nargs + 1, ap, tag,
5823 						       aptag, vlasize);
5824 	  TREE_OPERAND (*from_p, 0) = newcall;
5825 	}
5826     }
5827 
5828   /* Now see if the above changed *from_p to something we handle specially.  */
5829   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5830 				  want_value);
5831   if (ret != GS_UNHANDLED)
5832     return ret;
5833 
5834   /* If we've got a variable sized assignment between two lvalues (i.e. does
5835      not involve a call), then we can make things a bit more straightforward
5836      by converting the assignment to memcpy or memset.  */
5837   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5838     {
5839       tree from = TREE_OPERAND (*from_p, 0);
5840       tree size = TREE_OPERAND (*from_p, 1);
5841 
5842       if (TREE_CODE (from) == CONSTRUCTOR)
5843 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5844 
5845       if (is_gimple_addressable (from))
5846 	{
5847 	  *from_p = from;
5848 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5849 	      					 pre_p);
5850 	}
5851     }
5852 
5853   /* Transform partial stores to non-addressable complex variables into
5854      total stores.  This allows us to use real instead of virtual operands
5855      for these variables, which improves optimization.  */
5856   if ((TREE_CODE (*to_p) == REALPART_EXPR
5857        || TREE_CODE (*to_p) == IMAGPART_EXPR)
5858       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5859     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5860 
5861   /* Try to alleviate the effects of the gimplification creating artificial
5862      temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5863      make sure not to create DECL_DEBUG_EXPR links across functions.  */
5864   if (!gimplify_ctxp->into_ssa
5865       && VAR_P (*from_p)
5866       && DECL_IGNORED_P (*from_p)
5867       && DECL_P (*to_p)
5868       && !DECL_IGNORED_P (*to_p)
5869       && decl_function_context (*to_p) == current_function_decl
5870       && decl_function_context (*from_p) == current_function_decl)
5871     {
5872       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5873 	DECL_NAME (*from_p)
5874 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5875       DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5876       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5877    }
5878 
5879   if (want_value && TREE_THIS_VOLATILE (*to_p))
5880     *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5881 
5882   if (TREE_CODE (*from_p) == CALL_EXPR)
5883     {
5884       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5885 	 instead of a GIMPLE_ASSIGN.  */
5886       gcall *call_stmt;
5887       if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5888 	{
5889 	  /* Gimplify internal functions created in the FEs.  */
5890 	  int nargs = call_expr_nargs (*from_p), i;
5891 	  enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5892 	  auto_vec<tree> vargs (nargs);
5893 
5894 	  for (i = 0; i < nargs; i++)
5895 	    {
5896 	      gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5897 			    EXPR_LOCATION (*from_p));
5898 	      vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5899 	    }
5900 	  call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5901 	  gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5902 	  gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5903 	}
5904       else
5905 	{
5906 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5907 	  CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5908 	  STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5909 	  tree fndecl = get_callee_fndecl (*from_p);
5910 	  if (fndecl
5911 	      && fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
5912 	      && call_expr_nargs (*from_p) == 3)
5913 	    call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5914 						    CALL_EXPR_ARG (*from_p, 0),
5915 						    CALL_EXPR_ARG (*from_p, 1),
5916 						    CALL_EXPR_ARG (*from_p, 2));
5917 	  else
5918 	    {
5919 	      call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5920 	    }
5921 	}
5922       notice_special_calls (call_stmt);
5923       if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5924 	gimple_call_set_lhs (call_stmt, *to_p);
5925       else if (TREE_CODE (*to_p) == SSA_NAME)
5926 	/* The above is somewhat premature, avoid ICEing later for a
5927 	   SSA name w/o a definition.  We may have uses in the GIMPLE IL.
5928 	   ???  This doesn't make it a default-def.  */
5929 	SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5930 
5931       assign = call_stmt;
5932     }
5933   else
5934     {
5935       assign = gimple_build_assign (*to_p, *from_p);
5936       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5937       if (COMPARISON_CLASS_P (*from_p))
5938 	gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5939     }
5940 
5941   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5942     {
5943       /* We should have got an SSA name from the start.  */
5944       gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5945 		  || ! gimple_in_ssa_p (cfun));
5946     }
5947 
5948   gimplify_seq_add_stmt (pre_p, assign);
5949   gsi = gsi_last (*pre_p);
5950   maybe_fold_stmt (&gsi);
5951 
5952   if (want_value)
5953     {
5954       *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5955       return GS_OK;
5956     }
5957   else
5958     *expr_p = NULL;
5959 
5960   return GS_ALL_DONE;
5961 }
5962 
5963 /* Gimplify a comparison between two variable-sized objects.  Do this
5964    with a call to BUILT_IN_MEMCMP.  */
5965 
5966 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)5967 gimplify_variable_sized_compare (tree *expr_p)
5968 {
5969   location_t loc = EXPR_LOCATION (*expr_p);
5970   tree op0 = TREE_OPERAND (*expr_p, 0);
5971   tree op1 = TREE_OPERAND (*expr_p, 1);
5972   tree t, arg, dest, src, expr;
5973 
5974   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5975   arg = unshare_expr (arg);
5976   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5977   src = build_fold_addr_expr_loc (loc, op1);
5978   dest = build_fold_addr_expr_loc (loc, op0);
5979   t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5980   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5981 
5982   expr
5983     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5984   SET_EXPR_LOCATION (expr, loc);
5985   *expr_p = expr;
5986 
5987   return GS_OK;
5988 }
5989 
5990 /* Gimplify a comparison between two aggregate objects of integral scalar
5991    mode as a comparison between the bitwise equivalent scalar values.  */
5992 
5993 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)5994 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5995 {
5996   location_t loc = EXPR_LOCATION (*expr_p);
5997   tree op0 = TREE_OPERAND (*expr_p, 0);
5998   tree op1 = TREE_OPERAND (*expr_p, 1);
5999 
6000   tree type = TREE_TYPE (op0);
6001   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
6002 
6003   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
6004   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
6005 
6006   *expr_p
6007     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
6008 
6009   return GS_OK;
6010 }
6011 
6012 /* Gimplify an expression sequence.  This function gimplifies each
6013    expression and rewrites the original expression with the last
6014    expression of the sequence in GIMPLE form.
6015 
6016    PRE_P points to the list where the side effects for all the
6017        expressions in the sequence will be emitted.
6018 
6019    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
6020 
6021 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)6022 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
6023 {
6024   tree t = *expr_p;
6025 
6026   do
6027     {
6028       tree *sub_p = &TREE_OPERAND (t, 0);
6029 
6030       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
6031 	gimplify_compound_expr (sub_p, pre_p, false);
6032       else
6033 	gimplify_stmt (sub_p, pre_p);
6034 
6035       t = TREE_OPERAND (t, 1);
6036     }
6037   while (TREE_CODE (t) == COMPOUND_EXPR);
6038 
6039   *expr_p = t;
6040   if (want_value)
6041     return GS_OK;
6042   else
6043     {
6044       gimplify_stmt (expr_p, pre_p);
6045       return GS_ALL_DONE;
6046     }
6047 }
6048 
6049 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
6050    gimplify.  After gimplification, EXPR_P will point to a new temporary
6051    that holds the original value of the SAVE_EXPR node.
6052 
6053    PRE_P points to the list where side effects that must happen before
6054    *EXPR_P should be stored.  */
6055 
6056 static enum gimplify_status
gimplify_save_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6057 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6058 {
6059   enum gimplify_status ret = GS_ALL_DONE;
6060   tree val;
6061 
6062   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
6063   val = TREE_OPERAND (*expr_p, 0);
6064 
6065   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
6066   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
6067     {
6068       /* The operand may be a void-valued expression.  It is
6069 	 being executed only for its side-effects.  */
6070       if (TREE_TYPE (val) == void_type_node)
6071 	{
6072 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
6073 			       is_gimple_stmt, fb_none);
6074 	  val = NULL;
6075 	}
6076       else
6077 	/* The temporary may not be an SSA name as later abnormal and EH
6078 	   control flow may invalidate use/def domination.  When in SSA
6079 	   form then assume there are no such issues and SAVE_EXPRs only
6080 	   appear via GENERIC foldings.  */
6081 	val = get_initialized_tmp_var (val, pre_p, post_p,
6082 				       gimple_in_ssa_p (cfun));
6083 
6084       TREE_OPERAND (*expr_p, 0) = val;
6085       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
6086     }
6087 
6088   *expr_p = val;
6089 
6090   return ret;
6091 }
6092 
6093 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
6094 
6095       unary_expr
6096 	      : ...
6097 	      | '&' varname
6098 	      ...
6099 
6100     PRE_P points to the list where side effects that must happen before
6101 	*EXPR_P should be stored.
6102 
6103     POST_P points to the list where side effects that must happen after
6104 	*EXPR_P should be stored.  */
6105 
6106 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6107 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6108 {
6109   tree expr = *expr_p;
6110   tree op0 = TREE_OPERAND (expr, 0);
6111   enum gimplify_status ret;
6112   location_t loc = EXPR_LOCATION (*expr_p);
6113 
6114   switch (TREE_CODE (op0))
6115     {
6116     case INDIRECT_REF:
6117     do_indirect_ref:
6118       /* Check if we are dealing with an expression of the form '&*ptr'.
6119 	 While the front end folds away '&*ptr' into 'ptr', these
6120 	 expressions may be generated internally by the compiler (e.g.,
6121 	 builtins like __builtin_va_end).  */
6122       /* Caution: the silent array decomposition semantics we allow for
6123 	 ADDR_EXPR means we can't always discard the pair.  */
6124       /* Gimplification of the ADDR_EXPR operand may drop
6125 	 cv-qualification conversions, so make sure we add them if
6126 	 needed.  */
6127       {
6128 	tree op00 = TREE_OPERAND (op0, 0);
6129 	tree t_expr = TREE_TYPE (expr);
6130 	tree t_op00 = TREE_TYPE (op00);
6131 
6132         if (!useless_type_conversion_p (t_expr, t_op00))
6133 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6134         *expr_p = op00;
6135         ret = GS_OK;
6136       }
6137       break;
6138 
6139     case VIEW_CONVERT_EXPR:
6140       /* Take the address of our operand and then convert it to the type of
6141 	 this ADDR_EXPR.
6142 
6143 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6144 	 all clear.  The impact of this transformation is even less clear.  */
6145 
6146       /* If the operand is a useless conversion, look through it.  Doing so
6147 	 guarantees that the ADDR_EXPR and its operand will remain of the
6148 	 same type.  */
6149       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6150 	op0 = TREE_OPERAND (op0, 0);
6151 
6152       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6153 				  build_fold_addr_expr_loc (loc,
6154 							TREE_OPERAND (op0, 0)));
6155       ret = GS_OK;
6156       break;
6157 
6158     case MEM_REF:
6159       if (integer_zerop (TREE_OPERAND (op0, 1)))
6160 	goto do_indirect_ref;
6161 
6162       /* fall through */
6163 
6164     default:
6165       /* If we see a call to a declared builtin or see its address
6166 	 being taken (we can unify those cases here) then we can mark
6167 	 the builtin for implicit generation by GCC.  */
6168       if (TREE_CODE (op0) == FUNCTION_DECL
6169 	  && fndecl_built_in_p (op0, BUILT_IN_NORMAL)
6170 	  && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6171 	set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6172 
6173       /* We use fb_either here because the C frontend sometimes takes
6174 	 the address of a call that returns a struct; see
6175 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
6176 	 the implied temporary explicit.  */
6177 
6178       /* Make the operand addressable.  */
6179       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6180 			   is_gimple_addressable, fb_either);
6181       if (ret == GS_ERROR)
6182 	break;
6183 
6184       /* Then mark it.  Beware that it may not be possible to do so directly
6185 	 if a temporary has been created by the gimplification.  */
6186       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6187 
6188       op0 = TREE_OPERAND (expr, 0);
6189 
6190       /* For various reasons, the gimplification of the expression
6191 	 may have made a new INDIRECT_REF.  */
6192       if (TREE_CODE (op0) == INDIRECT_REF
6193 	  || (TREE_CODE (op0) == MEM_REF
6194 	      && integer_zerop (TREE_OPERAND (op0, 1))))
6195 	goto do_indirect_ref;
6196 
6197       mark_addressable (TREE_OPERAND (expr, 0));
6198 
6199       /* The FEs may end up building ADDR_EXPRs early on a decl with
6200 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
6201 	 here.  */
6202       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6203 	*expr_p = build_fold_addr_expr (op0);
6204 
6205       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
6206       recompute_tree_invariant_for_addr_expr (*expr_p);
6207 
6208       /* If we re-built the ADDR_EXPR add a conversion to the original type
6209          if required.  */
6210       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6211 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6212 
6213       break;
6214     }
6215 
6216   return ret;
6217 }
6218 
6219 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
6220    value; output operands should be a gimple lvalue.  */
6221 
6222 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6223 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6224 {
6225   tree expr;
6226   int noutputs;
6227   const char **oconstraints;
6228   int i;
6229   tree link;
6230   const char *constraint;
6231   bool allows_mem, allows_reg, is_inout;
6232   enum gimplify_status ret, tret;
6233   gasm *stmt;
6234   vec<tree, va_gc> *inputs;
6235   vec<tree, va_gc> *outputs;
6236   vec<tree, va_gc> *clobbers;
6237   vec<tree, va_gc> *labels;
6238   tree link_next;
6239 
6240   expr = *expr_p;
6241   noutputs = list_length (ASM_OUTPUTS (expr));
6242   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6243 
6244   inputs = NULL;
6245   outputs = NULL;
6246   clobbers = NULL;
6247   labels = NULL;
6248 
6249   ret = GS_ALL_DONE;
6250   link_next = NULL_TREE;
6251   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6252     {
6253       bool ok;
6254       size_t constraint_len;
6255 
6256       link_next = TREE_CHAIN (link);
6257 
6258       oconstraints[i]
6259 	= constraint
6260 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6261       constraint_len = strlen (constraint);
6262       if (constraint_len == 0)
6263         continue;
6264 
6265       ok = parse_output_constraint (&constraint, i, 0, 0,
6266 				    &allows_mem, &allows_reg, &is_inout);
6267       if (!ok)
6268 	{
6269 	  ret = GS_ERROR;
6270 	  is_inout = false;
6271 	}
6272 
6273       /* If we can't make copies, we can only accept memory.
6274 	 Similarly for VLAs.  */
6275       tree outtype = TREE_TYPE (TREE_VALUE (link));
6276       if (outtype != error_mark_node
6277 	  && (TREE_ADDRESSABLE (outtype)
6278 	      || !COMPLETE_TYPE_P (outtype)
6279 	      || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (outtype))))
6280 	{
6281 	  if (allows_mem)
6282 	    allows_reg = 0;
6283 	  else
6284 	    {
6285 	      error ("impossible constraint in %<asm%>");
6286 	      error ("non-memory output %d must stay in memory", i);
6287 	      return GS_ERROR;
6288 	    }
6289 	}
6290 
6291       if (!allows_reg && allows_mem)
6292 	mark_addressable (TREE_VALUE (link));
6293 
6294       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6295 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6296 			    fb_lvalue | fb_mayfail);
6297       if (tret == GS_ERROR)
6298 	{
6299 	  error ("invalid lvalue in %<asm%> output %d", i);
6300 	  ret = tret;
6301 	}
6302 
6303       /* If the constraint does not allow memory make sure we gimplify
6304          it to a register if it is not already but its base is.  This
6305 	 happens for complex and vector components.  */
6306       if (!allows_mem)
6307 	{
6308 	  tree op = TREE_VALUE (link);
6309 	  if (! is_gimple_val (op)
6310 	      && is_gimple_reg_type (TREE_TYPE (op))
6311 	      && is_gimple_reg (get_base_address (op)))
6312 	    {
6313 	      tree tem = create_tmp_reg (TREE_TYPE (op));
6314 	      tree ass;
6315 	      if (is_inout)
6316 		{
6317 		  ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6318 				tem, unshare_expr (op));
6319 		  gimplify_and_add (ass, pre_p);
6320 		}
6321 	      ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6322 	      gimplify_and_add (ass, post_p);
6323 
6324 	      TREE_VALUE (link) = tem;
6325 	      tret = GS_OK;
6326 	    }
6327 	}
6328 
6329       vec_safe_push (outputs, link);
6330       TREE_CHAIN (link) = NULL_TREE;
6331 
6332       if (is_inout)
6333 	{
6334 	  /* An input/output operand.  To give the optimizers more
6335 	     flexibility, split it into separate input and output
6336  	     operands.  */
6337 	  tree input;
6338 	  /* Buffer big enough to format a 32-bit UINT_MAX into.  */
6339 	  char buf[11];
6340 
6341 	  /* Turn the in/out constraint into an output constraint.  */
6342 	  char *p = xstrdup (constraint);
6343 	  p[0] = '=';
6344 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6345 
6346 	  /* And add a matching input constraint.  */
6347 	  if (allows_reg)
6348 	    {
6349 	      sprintf (buf, "%u", i);
6350 
6351 	      /* If there are multiple alternatives in the constraint,
6352 		 handle each of them individually.  Those that allow register
6353 		 will be replaced with operand number, the others will stay
6354 		 unchanged.  */
6355 	      if (strchr (p, ',') != NULL)
6356 		{
6357 		  size_t len = 0, buflen = strlen (buf);
6358 		  char *beg, *end, *str, *dst;
6359 
6360 		  for (beg = p + 1;;)
6361 		    {
6362 		      end = strchr (beg, ',');
6363 		      if (end == NULL)
6364 			end = strchr (beg, '\0');
6365 		      if ((size_t) (end - beg) < buflen)
6366 			len += buflen + 1;
6367 		      else
6368 			len += end - beg + 1;
6369 		      if (*end)
6370 			beg = end + 1;
6371 		      else
6372 			break;
6373 		    }
6374 
6375 		  str = (char *) alloca (len);
6376 		  for (beg = p + 1, dst = str;;)
6377 		    {
6378 		      const char *tem;
6379 		      bool mem_p, reg_p, inout_p;
6380 
6381 		      end = strchr (beg, ',');
6382 		      if (end)
6383 			*end = '\0';
6384 		      beg[-1] = '=';
6385 		      tem = beg - 1;
6386 		      parse_output_constraint (&tem, i, 0, 0,
6387 					       &mem_p, &reg_p, &inout_p);
6388 		      if (dst != str)
6389 			*dst++ = ',';
6390 		      if (reg_p)
6391 			{
6392 			  memcpy (dst, buf, buflen);
6393 			  dst += buflen;
6394 			}
6395 		      else
6396 			{
6397 			  if (end)
6398 			    len = end - beg;
6399 			  else
6400 			    len = strlen (beg);
6401 			  memcpy (dst, beg, len);
6402 			  dst += len;
6403 			}
6404 		      if (end)
6405 			beg = end + 1;
6406 		      else
6407 			break;
6408 		    }
6409 		  *dst = '\0';
6410 		  input = build_string (dst - str, str);
6411 		}
6412 	      else
6413 		input = build_string (strlen (buf), buf);
6414 	    }
6415 	  else
6416 	    input = build_string (constraint_len - 1, constraint + 1);
6417 
6418 	  free (p);
6419 
6420 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
6421 				   unshare_expr (TREE_VALUE (link)));
6422 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6423 	}
6424     }
6425 
6426   link_next = NULL_TREE;
6427   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6428     {
6429       link_next = TREE_CHAIN (link);
6430       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6431       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6432 			      oconstraints, &allows_mem, &allows_reg);
6433 
6434       /* If we can't make copies, we can only accept memory.  */
6435       tree intype = TREE_TYPE (TREE_VALUE (link));
6436       if (intype != error_mark_node
6437 	  && (TREE_ADDRESSABLE (intype)
6438 	      || !COMPLETE_TYPE_P (intype)
6439 	      || !tree_fits_poly_uint64_p (TYPE_SIZE_UNIT (intype))))
6440 	{
6441 	  if (allows_mem)
6442 	    allows_reg = 0;
6443 	  else
6444 	    {
6445 	      error ("impossible constraint in %<asm%>");
6446 	      error ("non-memory input %d must stay in memory", i);
6447 	      return GS_ERROR;
6448 	    }
6449 	}
6450 
6451       /* If the operand is a memory input, it should be an lvalue.  */
6452       if (!allows_reg && allows_mem)
6453 	{
6454 	  tree inputv = TREE_VALUE (link);
6455 	  STRIP_NOPS (inputv);
6456 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6457 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
6458 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6459 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6460 	      || TREE_CODE (inputv) == MODIFY_EXPR)
6461 	    TREE_VALUE (link) = error_mark_node;
6462 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6463 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
6464 	  if (tret != GS_ERROR)
6465 	    {
6466 	      /* Unlike output operands, memory inputs are not guaranteed
6467 		 to be lvalues by the FE, and while the expressions are
6468 		 marked addressable there, if it is e.g. a statement
6469 		 expression, temporaries in it might not end up being
6470 		 addressable.  They might be already used in the IL and thus
6471 		 it is too late to make them addressable now though.  */
6472 	      tree x = TREE_VALUE (link);
6473 	      while (handled_component_p (x))
6474 		x = TREE_OPERAND (x, 0);
6475 	      if (TREE_CODE (x) == MEM_REF
6476 		  && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6477 		x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6478 	      if ((VAR_P (x)
6479 		   || TREE_CODE (x) == PARM_DECL
6480 		   || TREE_CODE (x) == RESULT_DECL)
6481 		  && !TREE_ADDRESSABLE (x)
6482 		  && is_gimple_reg (x))
6483 		{
6484 		  warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6485 					       input_location), 0,
6486 			      "memory input %d is not directly addressable",
6487 			      i);
6488 		  prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6489 		}
6490 	    }
6491 	  mark_addressable (TREE_VALUE (link));
6492 	  if (tret == GS_ERROR)
6493 	    {
6494 	      error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6495 			"memory input %d is not directly addressable", i);
6496 	      ret = tret;
6497 	    }
6498 	}
6499       else
6500 	{
6501 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6502 				is_gimple_asm_val, fb_rvalue);
6503 	  if (tret == GS_ERROR)
6504 	    ret = tret;
6505 	}
6506 
6507       TREE_CHAIN (link) = NULL_TREE;
6508       vec_safe_push (inputs, link);
6509     }
6510 
6511   link_next = NULL_TREE;
6512   for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6513     {
6514       link_next = TREE_CHAIN (link);
6515       TREE_CHAIN (link) = NULL_TREE;
6516       vec_safe_push (clobbers, link);
6517     }
6518 
6519   link_next = NULL_TREE;
6520   for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6521     {
6522       link_next = TREE_CHAIN (link);
6523       TREE_CHAIN (link) = NULL_TREE;
6524       vec_safe_push (labels, link);
6525     }
6526 
6527   /* Do not add ASMs with errors to the gimple IL stream.  */
6528   if (ret != GS_ERROR)
6529     {
6530       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6531 				   inputs, outputs, clobbers, labels);
6532 
6533       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6534       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6535       gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6536 
6537       gimplify_seq_add_stmt (pre_p, stmt);
6538     }
6539 
6540   return ret;
6541 }
6542 
6543 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
6544    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6545    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6546    return to this function.
6547 
6548    FIXME should we complexify the prequeue handling instead?  Or use flags
6549    for all the cleanups and let the optimizer tighten them up?  The current
6550    code seems pretty fragile; it will break on a cleanup within any
6551    non-conditional nesting.  But any such nesting would be broken, anyway;
6552    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6553    and continues out of it.  We can do that at the RTL level, though, so
6554    having an optimizer to tighten up try/finally regions would be a Good
6555    Thing.  */
6556 
6557 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,gimple_seq * pre_p)6558 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6559 {
6560   gimple_stmt_iterator iter;
6561   gimple_seq body_sequence = NULL;
6562 
6563   tree temp = voidify_wrapper_expr (*expr_p, NULL);
6564 
6565   /* We only care about the number of conditions between the innermost
6566      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
6567      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
6568   int old_conds = gimplify_ctxp->conditions;
6569   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6570   bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6571   gimplify_ctxp->conditions = 0;
6572   gimplify_ctxp->conditional_cleanups = NULL;
6573   gimplify_ctxp->in_cleanup_point_expr = true;
6574 
6575   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6576 
6577   gimplify_ctxp->conditions = old_conds;
6578   gimplify_ctxp->conditional_cleanups = old_cleanups;
6579   gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6580 
6581   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6582     {
6583       gimple *wce = gsi_stmt (iter);
6584 
6585       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6586 	{
6587 	  if (gsi_one_before_end_p (iter))
6588 	    {
6589               /* Note that gsi_insert_seq_before and gsi_remove do not
6590                  scan operands, unlike some other sequence mutators.  */
6591 	      if (!gimple_wce_cleanup_eh_only (wce))
6592 		gsi_insert_seq_before_without_update (&iter,
6593 						      gimple_wce_cleanup (wce),
6594 						      GSI_SAME_STMT);
6595 	      gsi_remove (&iter, true);
6596 	      break;
6597 	    }
6598 	  else
6599 	    {
6600 	      gtry *gtry;
6601 	      gimple_seq seq;
6602 	      enum gimple_try_flags kind;
6603 
6604 	      if (gimple_wce_cleanup_eh_only (wce))
6605 		kind = GIMPLE_TRY_CATCH;
6606 	      else
6607 		kind = GIMPLE_TRY_FINALLY;
6608 	      seq = gsi_split_seq_after (iter);
6609 
6610 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6611               /* Do not use gsi_replace here, as it may scan operands.
6612                  We want to do a simple structural modification only.  */
6613 	      gsi_set_stmt (&iter, gtry);
6614 	      iter = gsi_start (gtry->eval);
6615 	    }
6616 	}
6617       else
6618 	gsi_next (&iter);
6619     }
6620 
6621   gimplify_seq_add_seq (pre_p, body_sequence);
6622   if (temp)
6623     {
6624       *expr_p = temp;
6625       return GS_OK;
6626     }
6627   else
6628     {
6629       *expr_p = NULL;
6630       return GS_ALL_DONE;
6631     }
6632 }
6633 
6634 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
6635    is the cleanup action required.  EH_ONLY is true if the cleanup should
6636    only be executed if an exception is thrown, not on normal exit.
6637    If FORCE_UNCOND is true perform the cleanup unconditionally;  this is
6638    only valid for clobbers.  */
6639 
6640 static void
6641 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6642 		     bool force_uncond = false)
6643 {
6644   gimple *wce;
6645   gimple_seq cleanup_stmts = NULL;
6646 
6647   /* Errors can result in improperly nested cleanups.  Which results in
6648      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
6649   if (seen_error ())
6650     return;
6651 
6652   if (gimple_conditional_context ())
6653     {
6654       /* If we're in a conditional context, this is more complex.  We only
6655 	 want to run the cleanup if we actually ran the initialization that
6656 	 necessitates it, but we want to run it after the end of the
6657 	 conditional context.  So we wrap the try/finally around the
6658 	 condition and use a flag to determine whether or not to actually
6659 	 run the destructor.  Thus
6660 
6661 	   test ? f(A()) : 0
6662 
6663 	 becomes (approximately)
6664 
6665 	   flag = 0;
6666 	   try {
6667 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
6668 	     else { val = 0; }
6669 	   } finally {
6670 	     if (flag) A::~A(temp);
6671 	   }
6672 	   val
6673       */
6674       if (force_uncond)
6675 	{
6676 	  gimplify_stmt (&cleanup, &cleanup_stmts);
6677 	  wce = gimple_build_wce (cleanup_stmts);
6678 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6679 	}
6680       else
6681 	{
6682 	  tree flag = create_tmp_var (boolean_type_node, "cleanup");
6683 	  gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6684 	  gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6685 
6686 	  cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6687 	  gimplify_stmt (&cleanup, &cleanup_stmts);
6688 	  wce = gimple_build_wce (cleanup_stmts);
6689 
6690 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6691 	  gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6692 	  gimplify_seq_add_stmt (pre_p, ftrue);
6693 
6694 	  /* Because of this manipulation, and the EH edges that jump
6695 	     threading cannot redirect, the temporary (VAR) will appear
6696 	     to be used uninitialized.  Don't warn.  */
6697 	  TREE_NO_WARNING (var) = 1;
6698 	}
6699     }
6700   else
6701     {
6702       gimplify_stmt (&cleanup, &cleanup_stmts);
6703       wce = gimple_build_wce (cleanup_stmts);
6704       gimple_wce_set_cleanup_eh_only (wce, eh_only);
6705       gimplify_seq_add_stmt (pre_p, wce);
6706     }
6707 }
6708 
6709 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
6710 
6711 static enum gimplify_status
gimplify_target_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6712 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6713 {
6714   tree targ = *expr_p;
6715   tree temp = TARGET_EXPR_SLOT (targ);
6716   tree init = TARGET_EXPR_INITIAL (targ);
6717   enum gimplify_status ret;
6718 
6719   bool unpoison_empty_seq = false;
6720   gimple_stmt_iterator unpoison_it;
6721 
6722   if (init)
6723     {
6724       tree cleanup = NULL_TREE;
6725 
6726       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6727 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
6728       if (!poly_int_tree_p (DECL_SIZE (temp)))
6729 	{
6730 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6731 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6732 	  gimplify_vla_decl (temp, pre_p);
6733 	}
6734       else
6735 	{
6736 	  /* Save location where we need to place unpoisoning.  It's possible
6737 	     that a variable will be converted to needs_to_live_in_memory.  */
6738 	  unpoison_it = gsi_last (*pre_p);
6739 	  unpoison_empty_seq = gsi_end_p (unpoison_it);
6740 
6741 	  gimple_add_tmp_var (temp);
6742 	}
6743 
6744       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6745 	 expression is supposed to initialize the slot.  */
6746       if (VOID_TYPE_P (TREE_TYPE (init)))
6747 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6748       else
6749 	{
6750 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6751 	  init = init_expr;
6752 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6753 	  init = NULL;
6754 	  ggc_free (init_expr);
6755 	}
6756       if (ret == GS_ERROR)
6757 	{
6758 	  /* PR c++/28266 Make sure this is expanded only once. */
6759 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6760 	  return GS_ERROR;
6761 	}
6762       if (init)
6763 	gimplify_and_add (init, pre_p);
6764 
6765       /* If needed, push the cleanup for the temp.  */
6766       if (TARGET_EXPR_CLEANUP (targ))
6767 	{
6768 	  if (CLEANUP_EH_ONLY (targ))
6769 	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6770 				 CLEANUP_EH_ONLY (targ), pre_p);
6771 	  else
6772 	    cleanup = TARGET_EXPR_CLEANUP (targ);
6773 	}
6774 
6775       /* Add a clobber for the temporary going out of scope, like
6776 	 gimplify_bind_expr.  */
6777       if (gimplify_ctxp->in_cleanup_point_expr
6778 	  && needs_to_live_in_memory (temp))
6779 	{
6780 	  if (flag_stack_reuse == SR_ALL)
6781 	    {
6782 	      tree clobber = build_clobber (TREE_TYPE (temp));
6783 	      clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6784 	      gimple_push_cleanup (temp, clobber, false, pre_p, true);
6785 	    }
6786 	  if (asan_poisoned_variables
6787 	      && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6788 	      && !TREE_STATIC (temp)
6789 	      && dbg_cnt (asan_use_after_scope)
6790 	      && !gimplify_omp_ctxp)
6791 	    {
6792 	      tree asan_cleanup = build_asan_poison_call_expr (temp);
6793 	      if (asan_cleanup)
6794 		{
6795 		  if (unpoison_empty_seq)
6796 		    unpoison_it = gsi_start (*pre_p);
6797 
6798 		  asan_poison_variable (temp, false, &unpoison_it,
6799 					unpoison_empty_seq);
6800 		  gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6801 		}
6802 	    }
6803 	}
6804       if (cleanup)
6805 	gimple_push_cleanup (temp, cleanup, false, pre_p);
6806 
6807       /* Only expand this once.  */
6808       TREE_OPERAND (targ, 3) = init;
6809       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6810     }
6811   else
6812     /* We should have expanded this before.  */
6813     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6814 
6815   *expr_p = temp;
6816   return GS_OK;
6817 }
6818 
6819 /* Gimplification of expression trees.  */
6820 
6821 /* Gimplify an expression which appears at statement context.  The
6822    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
6823    NULL, a new sequence is allocated.
6824 
6825    Return true if we actually added a statement to the queue.  */
6826 
6827 bool
gimplify_stmt(tree * stmt_p,gimple_seq * seq_p)6828 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6829 {
6830   gimple_seq_node last;
6831 
6832   last = gimple_seq_last (*seq_p);
6833   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6834   return last != gimple_seq_last (*seq_p);
6835 }
6836 
6837 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6838    to CTX.  If entries already exist, force them to be some flavor of private.
6839    If there is no enclosing parallel, do nothing.  */
6840 
6841 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)6842 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6843 {
6844   splay_tree_node n;
6845 
6846   if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6847     return;
6848 
6849   do
6850     {
6851       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6852       if (n != NULL)
6853 	{
6854 	  if (n->value & GOVD_SHARED)
6855 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6856 	  else if (n->value & GOVD_MAP)
6857 	    n->value |= GOVD_MAP_TO_ONLY;
6858 	  else
6859 	    return;
6860 	}
6861       else if ((ctx->region_type & ORT_TARGET) != 0)
6862 	{
6863 	  if (ctx->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
6864 	    omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6865 	  else
6866 	    omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6867 	}
6868       else if (ctx->region_type != ORT_WORKSHARE
6869 	       && ctx->region_type != ORT_TASKGROUP
6870 	       && ctx->region_type != ORT_SIMD
6871 	       && ctx->region_type != ORT_ACC
6872 	       && !(ctx->region_type & ORT_TARGET_DATA))
6873 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6874 
6875       ctx = ctx->outer_context;
6876     }
6877   while (ctx);
6878 }
6879 
6880 /* Similarly for each of the type sizes of TYPE.  */
6881 
6882 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)6883 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6884 {
6885   if (type == NULL || type == error_mark_node)
6886     return;
6887   type = TYPE_MAIN_VARIANT (type);
6888 
6889   if (ctx->privatized_types->add (type))
6890     return;
6891 
6892   switch (TREE_CODE (type))
6893     {
6894     case INTEGER_TYPE:
6895     case ENUMERAL_TYPE:
6896     case BOOLEAN_TYPE:
6897     case REAL_TYPE:
6898     case FIXED_POINT_TYPE:
6899       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6900       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6901       break;
6902 
6903     case ARRAY_TYPE:
6904       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6905       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6906       break;
6907 
6908     case RECORD_TYPE:
6909     case UNION_TYPE:
6910     case QUAL_UNION_TYPE:
6911       {
6912 	tree field;
6913 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6914 	  if (TREE_CODE (field) == FIELD_DECL)
6915 	    {
6916 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6917 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6918 	    }
6919       }
6920       break;
6921 
6922     case POINTER_TYPE:
6923     case REFERENCE_TYPE:
6924       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6925       break;
6926 
6927     default:
6928       break;
6929     }
6930 
6931   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6932   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6933   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6934 }
6935 
6936 /* Add an entry for DECL in the OMP context CTX with FLAGS.  */
6937 
6938 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)6939 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6940 {
6941   splay_tree_node n;
6942   unsigned int nflags;
6943   tree t;
6944 
6945   if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6946     return;
6947 
6948   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
6949      there are constructors involved somewhere.  Exception is a shared clause,
6950      there is nothing privatized in that case.  */
6951   if ((flags & GOVD_SHARED) == 0
6952       && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6953 	  || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6954     flags |= GOVD_SEEN;
6955 
6956   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6957   if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6958     {
6959       /* We shouldn't be re-adding the decl with the same data
6960 	 sharing class.  */
6961       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6962       nflags = n->value | flags;
6963       /* The only combination of data sharing classes we should see is
6964 	 FIRSTPRIVATE and LASTPRIVATE.  However, OpenACC permits
6965 	 reduction variables to be used in data sharing clauses.  */
6966       gcc_assert ((ctx->region_type & ORT_ACC) != 0
6967 		  || ((nflags & GOVD_DATA_SHARE_CLASS)
6968 		      == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6969 		  || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6970       n->value = nflags;
6971       return;
6972     }
6973 
6974   /* When adding a variable-sized variable, we have to handle all sorts
6975      of additional bits of data: the pointer replacement variable, and
6976      the parameters of the type.  */
6977   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6978     {
6979       /* Add the pointer replacement variable as PRIVATE if the variable
6980 	 replacement is private, else FIRSTPRIVATE since we'll need the
6981 	 address of the original variable either for SHARED, or for the
6982 	 copy into or out of the context.  */
6983       if (!(flags & GOVD_LOCAL) && ctx->region_type != ORT_TASKGROUP)
6984 	{
6985 	  if (flags & GOVD_MAP)
6986 	    nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6987 	  else if (flags & GOVD_PRIVATE)
6988 	    nflags = GOVD_PRIVATE;
6989 	  else if (((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6990 		    && (flags & GOVD_FIRSTPRIVATE))
6991 		   || (ctx->region_type == ORT_TARGET_DATA
6992 		       && (flags & GOVD_DATA_SHARE_CLASS) == 0))
6993 	    nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6994 	  else
6995 	    nflags = GOVD_FIRSTPRIVATE;
6996 	  nflags |= flags & GOVD_SEEN;
6997 	  t = DECL_VALUE_EXPR (decl);
6998 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6999 	  t = TREE_OPERAND (t, 0);
7000 	  gcc_assert (DECL_P (t));
7001 	  omp_add_variable (ctx, t, nflags);
7002 	}
7003 
7004       /* Add all of the variable and type parameters (which should have
7005 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
7006       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
7007       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
7008       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7009 
7010       /* The variable-sized variable itself is never SHARED, only some form
7011 	 of PRIVATE.  The sharing would take place via the pointer variable
7012 	 which we remapped above.  */
7013       if (flags & GOVD_SHARED)
7014 	flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
7015 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
7016 
7017       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
7018 	 alloca statement we generate for the variable, so make sure it
7019 	 is available.  This isn't automatically needed for the SHARED
7020 	 case, since we won't be allocating local storage then.
7021 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
7022 	 in this case omp_notice_variable will be called later
7023 	 on when it is gimplified.  */
7024       else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
7025 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
7026 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
7027     }
7028   else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
7029 	   && lang_hooks.decls.omp_privatize_by_reference (decl))
7030     {
7031       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
7032 
7033       /* Similar to the direct variable sized case above, we'll need the
7034 	 size of references being privatized.  */
7035       if ((flags & GOVD_SHARED) == 0)
7036 	{
7037 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7038 	  if (DECL_P (t))
7039 	    omp_notice_variable (ctx, t, true);
7040 	}
7041     }
7042 
7043   if (n != NULL)
7044     n->value |= flags;
7045   else
7046     splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
7047 
7048   /* For reductions clauses in OpenACC loop directives, by default create a
7049      copy clause on the enclosing parallel construct for carrying back the
7050      results.  */
7051   if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
7052     {
7053       struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
7054       while (outer_ctx)
7055 	{
7056 	  n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
7057 	  if (n != NULL)
7058 	    {
7059 	      /* Ignore local variables and explicitly declared clauses.  */
7060 	      if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
7061 		break;
7062 	      else if (outer_ctx->region_type == ORT_ACC_KERNELS)
7063 		{
7064 		  /* According to the OpenACC spec, such a reduction variable
7065 		     should already have a copy map on a kernels construct,
7066 		     verify that here.  */
7067 		  gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
7068 			      && (n->value & GOVD_MAP));
7069 		}
7070 	      else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7071 		{
7072 		  /* Remove firstprivate and make it a copy map.  */
7073 		  n->value &= ~GOVD_FIRSTPRIVATE;
7074 		  n->value |= GOVD_MAP;
7075 		}
7076 	    }
7077 	  else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
7078 	    {
7079 	      splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
7080 				 GOVD_MAP | GOVD_SEEN);
7081 	      break;
7082 	    }
7083 	  outer_ctx = outer_ctx->outer_context;
7084 	}
7085     }
7086 }
7087 
7088 /* Notice a threadprivate variable DECL used in OMP context CTX.
7089    This just prints out diagnostics about threadprivate variable uses
7090    in untied tasks.  If DECL2 is non-NULL, prevent this warning
7091    on that variable.  */
7092 
7093 static bool
omp_notice_threadprivate_variable(struct gimplify_omp_ctx * ctx,tree decl,tree decl2)7094 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
7095 				   tree decl2)
7096 {
7097   splay_tree_node n;
7098   struct gimplify_omp_ctx *octx;
7099 
7100   for (octx = ctx; octx; octx = octx->outer_context)
7101     if ((octx->region_type & ORT_TARGET) != 0
7102 	|| octx->order_concurrent)
7103       {
7104 	n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
7105 	if (n == NULL)
7106 	  {
7107 	    if (octx->order_concurrent)
7108 	      {
7109 		error ("threadprivate variable %qE used in a region with"
7110 		       " %<order(concurrent)%> clause", DECL_NAME (decl));
7111 		error_at (octx->location, "enclosing region");
7112 	      }
7113 	    else
7114 	      {
7115 		error ("threadprivate variable %qE used in target region",
7116 		       DECL_NAME (decl));
7117 		error_at (octx->location, "enclosing target region");
7118 	      }
7119 	    splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
7120 	  }
7121 	if (decl2)
7122 	  splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
7123       }
7124 
7125   if (ctx->region_type != ORT_UNTIED_TASK)
7126     return false;
7127   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7128   if (n == NULL)
7129     {
7130       error ("threadprivate variable %qE used in untied task",
7131 	     DECL_NAME (decl));
7132       error_at (ctx->location, "enclosing task");
7133       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
7134     }
7135   if (decl2)
7136     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
7137   return false;
7138 }
7139 
7140 /* Return true if global var DECL is device resident.  */
7141 
7142 static bool
device_resident_p(tree decl)7143 device_resident_p (tree decl)
7144 {
7145   tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7146 
7147   if (!attr)
7148     return false;
7149 
7150   for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7151     {
7152       tree c = TREE_VALUE (t);
7153       if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7154 	return true;
7155     }
7156 
7157   return false;
7158 }
7159 
7160 /* Return true if DECL has an ACC DECLARE attribute.  */
7161 
7162 static bool
is_oacc_declared(tree decl)7163 is_oacc_declared (tree decl)
7164 {
7165   tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7166   tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7167   return declared != NULL_TREE;
7168 }
7169 
7170 /* Determine outer default flags for DECL mentioned in an OMP region
7171    but not declared in an enclosing clause.
7172 
7173    ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7174    remapped firstprivate instead of shared.  To some extent this is
7175    addressed in omp_firstprivatize_type_sizes, but not
7176    effectively.  */
7177 
7178 static unsigned
omp_default_clause(struct gimplify_omp_ctx * ctx,tree decl,bool in_code,unsigned flags)7179 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7180 		    bool in_code, unsigned flags)
7181 {
7182   enum omp_clause_default_kind default_kind = ctx->default_kind;
7183   enum omp_clause_default_kind kind;
7184 
7185   kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7186   if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7187     default_kind = kind;
7188   else if (VAR_P (decl) && TREE_STATIC (decl) && DECL_IN_CONSTANT_POOL (decl))
7189     default_kind = OMP_CLAUSE_DEFAULT_SHARED;
7190 
7191   switch (default_kind)
7192     {
7193     case OMP_CLAUSE_DEFAULT_NONE:
7194       {
7195 	const char *rtype;
7196 
7197 	if (ctx->region_type & ORT_PARALLEL)
7198 	  rtype = "parallel";
7199 	else if ((ctx->region_type & ORT_TASKLOOP) == ORT_TASKLOOP)
7200 	  rtype = "taskloop";
7201 	else if (ctx->region_type & ORT_TASK)
7202 	  rtype = "task";
7203 	else if (ctx->region_type & ORT_TEAMS)
7204 	  rtype = "teams";
7205 	else
7206 	  gcc_unreachable ();
7207 
7208 	error ("%qE not specified in enclosing %qs",
7209 	       DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7210 	error_at (ctx->location, "enclosing %qs", rtype);
7211       }
7212       /* FALLTHRU */
7213     case OMP_CLAUSE_DEFAULT_SHARED:
7214       flags |= GOVD_SHARED;
7215       break;
7216     case OMP_CLAUSE_DEFAULT_PRIVATE:
7217       flags |= GOVD_PRIVATE;
7218       break;
7219     case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7220       flags |= GOVD_FIRSTPRIVATE;
7221       break;
7222     case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7223       /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
7224       gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7225       if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7226 	{
7227 	  omp_notice_variable (octx, decl, in_code);
7228 	  for (; octx; octx = octx->outer_context)
7229 	    {
7230 	      splay_tree_node n2;
7231 
7232 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7233 	      if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7234 		  && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7235 		continue;
7236 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7237 		{
7238 		  flags |= GOVD_FIRSTPRIVATE;
7239 		  goto found_outer;
7240 		}
7241 	      if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7242 		{
7243 		  flags |= GOVD_SHARED;
7244 		  goto found_outer;
7245 		}
7246 	    }
7247 	}
7248 
7249       if (TREE_CODE (decl) == PARM_DECL
7250 	  || (!is_global_var (decl)
7251 	      && DECL_CONTEXT (decl) == current_function_decl))
7252 	flags |= GOVD_FIRSTPRIVATE;
7253       else
7254 	flags |= GOVD_SHARED;
7255     found_outer:
7256       break;
7257 
7258     default:
7259       gcc_unreachable ();
7260     }
7261 
7262   return flags;
7263 }
7264 
7265 
7266 /* Determine outer default flags for DECL mentioned in an OACC region
7267    but not declared in an enclosing clause.  */
7268 
7269 static unsigned
oacc_default_clause(struct gimplify_omp_ctx * ctx,tree decl,unsigned flags)7270 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7271 {
7272   const char *rkind;
7273   bool on_device = false;
7274   bool is_private = false;
7275   bool declared = is_oacc_declared (decl);
7276   tree type = TREE_TYPE (decl);
7277 
7278   if (lang_hooks.decls.omp_privatize_by_reference (decl))
7279     type = TREE_TYPE (type);
7280 
7281   /* For Fortran COMMON blocks, only used variables in those blocks are
7282      transfered and remapped.  The block itself will have a private clause to
7283      avoid transfering the data twice.
7284      The hook evaluates to false by default.  For a variable in Fortran's COMMON
7285      or EQUIVALENCE block, returns 'true' (as we have shared=false) - as only
7286      the variables in such a COMMON/EQUIVALENCE block shall be privatized not
7287      the whole block.  For C++ and Fortran, it can also be true under certain
7288      other conditions, if DECL_HAS_VALUE_EXPR.  */
7289   if (RECORD_OR_UNION_TYPE_P (type))
7290     is_private = lang_hooks.decls.omp_disregard_value_expr (decl, false);
7291 
7292   if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7293       && is_global_var (decl)
7294       && device_resident_p (decl)
7295       && !is_private)
7296     {
7297       on_device = true;
7298       flags |= GOVD_MAP_TO_ONLY;
7299     }
7300 
7301   switch (ctx->region_type)
7302     {
7303     case ORT_ACC_KERNELS:
7304       rkind = "kernels";
7305 
7306       if (is_private)
7307 	flags |= GOVD_FIRSTPRIVATE;
7308       else if (AGGREGATE_TYPE_P (type))
7309 	{
7310 	  /* Aggregates default to 'present_or_copy', or 'present'.  */
7311 	  if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7312 	    flags |= GOVD_MAP;
7313 	  else
7314 	    flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7315 	}
7316       else
7317 	/* Scalars default to 'copy'.  */
7318 	flags |= GOVD_MAP | GOVD_MAP_FORCE;
7319 
7320       break;
7321 
7322     case ORT_ACC_PARALLEL:
7323     case ORT_ACC_SERIAL:
7324       rkind = ctx->region_type == ORT_ACC_PARALLEL ? "parallel" : "serial";
7325 
7326       if (is_private)
7327 	flags |= GOVD_FIRSTPRIVATE;
7328       else if (on_device || declared)
7329 	flags |= GOVD_MAP;
7330       else if (AGGREGATE_TYPE_P (type))
7331 	{
7332 	  /* Aggregates default to 'present_or_copy', or 'present'.  */
7333 	  if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7334 	    flags |= GOVD_MAP;
7335 	  else
7336 	    flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7337 	}
7338       else
7339 	/* Scalars default to 'firstprivate'.  */
7340 	flags |= GOVD_FIRSTPRIVATE;
7341 
7342       break;
7343 
7344     default:
7345       gcc_unreachable ();
7346     }
7347 
7348   if (DECL_ARTIFICIAL (decl))
7349     ; /* We can get compiler-generated decls, and should not complain
7350 	 about them.  */
7351   else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7352     {
7353       error ("%qE not specified in enclosing OpenACC %qs construct",
7354 	     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7355       inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7356     }
7357   else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7358     ; /* Handled above.  */
7359   else
7360     gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7361 
7362   return flags;
7363 }
7364 
7365 /* Record the fact that DECL was used within the OMP context CTX.
7366    IN_CODE is true when real code uses DECL, and false when we should
7367    merely emit default(none) errors.  Return true if DECL is going to
7368    be remapped and thus DECL shouldn't be gimplified into its
7369    DECL_VALUE_EXPR (if any).  */
7370 
7371 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)7372 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7373 {
7374   splay_tree_node n;
7375   unsigned flags = in_code ? GOVD_SEEN : 0;
7376   bool ret = false, shared;
7377 
7378   if (error_operand_p (decl))
7379     return false;
7380 
7381   if (ctx->region_type == ORT_NONE)
7382     return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7383 
7384   if (is_global_var (decl))
7385     {
7386       /* Threadprivate variables are predetermined.  */
7387       if (DECL_THREAD_LOCAL_P (decl))
7388 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7389 
7390       if (DECL_HAS_VALUE_EXPR_P (decl))
7391 	{
7392 	  if (ctx->region_type & ORT_ACC)
7393 	    /* For OpenACC, defer expansion of value to avoid transfering
7394 	       privatized common block data instead of im-/explicitly transfered
7395 	       variables which are in common blocks.  */
7396 	    ;
7397 	  else
7398 	    {
7399 	      tree value = get_base_address (DECL_VALUE_EXPR (decl));
7400 
7401 	      if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7402 		return omp_notice_threadprivate_variable (ctx, decl, value);
7403 	    }
7404 	}
7405 
7406       if (gimplify_omp_ctxp->outer_context == NULL
7407 	  && VAR_P (decl)
7408 	  && oacc_get_fn_attrib (current_function_decl))
7409 	{
7410 	  location_t loc = DECL_SOURCE_LOCATION (decl);
7411 
7412 	  if (lookup_attribute ("omp declare target link",
7413 				DECL_ATTRIBUTES (decl)))
7414 	    {
7415 	      error_at (loc,
7416 			"%qE with %<link%> clause used in %<routine%> function",
7417 			DECL_NAME (decl));
7418 	      return false;
7419 	    }
7420 	  else if (!lookup_attribute ("omp declare target",
7421 				      DECL_ATTRIBUTES (decl)))
7422 	    {
7423 	      error_at (loc,
7424 			"%qE requires a %<declare%> directive for use "
7425 			"in a %<routine%> function", DECL_NAME (decl));
7426 	      return false;
7427 	    }
7428 	}
7429     }
7430 
7431   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7432   if ((ctx->region_type & ORT_TARGET) != 0)
7433     {
7434       if (ctx->region_type & ORT_ACC)
7435 	/* For OpenACC, as remarked above, defer expansion.  */
7436 	shared = false;
7437       else
7438 	shared = true;
7439 
7440       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7441       if (n == NULL)
7442 	{
7443 	  unsigned nflags = flags;
7444 	  if ((ctx->region_type & ORT_ACC) == 0)
7445 	    {
7446 	      bool is_declare_target = false;
7447 	      if (is_global_var (decl)
7448 		  && varpool_node::get_create (decl)->offloadable)
7449 		{
7450 		  struct gimplify_omp_ctx *octx;
7451 		  for (octx = ctx->outer_context;
7452 		       octx; octx = octx->outer_context)
7453 		    {
7454 		      n = splay_tree_lookup (octx->variables,
7455 					     (splay_tree_key)decl);
7456 		      if (n
7457 			  && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7458 			  && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7459 			break;
7460 		    }
7461 		  is_declare_target = octx == NULL;
7462 		}
7463 	      if (!is_declare_target)
7464 		{
7465 		  int gdmk;
7466 		  if (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7467 		      || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7468 			  && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7469 			      == POINTER_TYPE)))
7470 		    gdmk = GDMK_POINTER;
7471 		  else if (lang_hooks.decls.omp_scalar_p (decl))
7472 		    gdmk = GDMK_SCALAR;
7473 		  else
7474 		    gdmk = GDMK_AGGREGATE;
7475 		  if (ctx->defaultmap[gdmk] == 0)
7476 		    {
7477 		      tree d = lang_hooks.decls.omp_report_decl (decl);
7478 		      error ("%qE not specified in enclosing %<target%>",
7479 			     DECL_NAME (d));
7480 		      error_at (ctx->location, "enclosing %<target%>");
7481 		    }
7482 		  else if (ctx->defaultmap[gdmk]
7483 			   & (GOVD_MAP_0LEN_ARRAY | GOVD_FIRSTPRIVATE))
7484 		    nflags |= ctx->defaultmap[gdmk];
7485 		  else
7486 		    {
7487 		      gcc_assert (ctx->defaultmap[gdmk] & GOVD_MAP);
7488 		      nflags |= ctx->defaultmap[gdmk] & ~GOVD_MAP;
7489 		    }
7490 		}
7491 	    }
7492 
7493 	  struct gimplify_omp_ctx *octx = ctx->outer_context;
7494 	  if ((ctx->region_type & ORT_ACC) && octx)
7495 	    {
7496 	      /* Look in outer OpenACC contexts, to see if there's a
7497 		 data attribute for this variable.  */
7498 	      omp_notice_variable (octx, decl, in_code);
7499 
7500 	      for (; octx; octx = octx->outer_context)
7501 		{
7502 		  if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7503 		    break;
7504 		  splay_tree_node n2
7505 		    = splay_tree_lookup (octx->variables,
7506 					 (splay_tree_key) decl);
7507 		  if (n2)
7508 		    {
7509 		      if (octx->region_type == ORT_ACC_HOST_DATA)
7510 		        error ("variable %qE declared in enclosing "
7511 			       "%<host_data%> region", DECL_NAME (decl));
7512 		      nflags |= GOVD_MAP;
7513 		      if (octx->region_type == ORT_ACC_DATA
7514 			  && (n2->value & GOVD_MAP_0LEN_ARRAY))
7515 			nflags |= GOVD_MAP_0LEN_ARRAY;
7516 		      goto found_outer;
7517 		    }
7518 		}
7519 	    }
7520 
7521 	  if ((nflags & ~(GOVD_MAP_TO_ONLY | GOVD_MAP_FROM_ONLY
7522 			  | GOVD_MAP_ALLOC_ONLY)) == flags)
7523 	    {
7524 	      tree type = TREE_TYPE (decl);
7525 
7526 	      if (gimplify_omp_ctxp->target_firstprivatize_array_bases
7527 		  && lang_hooks.decls.omp_privatize_by_reference (decl))
7528 		type = TREE_TYPE (type);
7529 	      if (!lang_hooks.types.omp_mappable_type (type))
7530 		{
7531 		  error ("%qD referenced in target region does not have "
7532 			 "a mappable type", decl);
7533 		  nflags |= GOVD_MAP | GOVD_EXPLICIT;
7534 		}
7535 	      else
7536 		{
7537 		  if ((ctx->region_type & ORT_ACC) != 0)
7538 		    nflags = oacc_default_clause (ctx, decl, flags);
7539 		  else
7540 		    nflags |= GOVD_MAP;
7541 		}
7542 	    }
7543 	found_outer:
7544 	  omp_add_variable (ctx, decl, nflags);
7545 	}
7546       else
7547 	{
7548 	  /* If nothing changed, there's nothing left to do.  */
7549 	  if ((n->value & flags) == flags)
7550 	    return ret;
7551 	  flags |= n->value;
7552 	  n->value = flags;
7553 	}
7554       goto do_outer;
7555     }
7556 
7557   if (n == NULL)
7558     {
7559       if (ctx->region_type == ORT_WORKSHARE
7560 	  || ctx->region_type == ORT_TASKGROUP
7561 	  || ctx->region_type == ORT_SIMD
7562 	  || ctx->region_type == ORT_ACC
7563 	  || (ctx->region_type & ORT_TARGET_DATA) != 0)
7564 	goto do_outer;
7565 
7566       flags = omp_default_clause (ctx, decl, in_code, flags);
7567 
7568       if ((flags & GOVD_PRIVATE)
7569 	  && lang_hooks.decls.omp_private_outer_ref (decl))
7570 	flags |= GOVD_PRIVATE_OUTER_REF;
7571 
7572       omp_add_variable (ctx, decl, flags);
7573 
7574       shared = (flags & GOVD_SHARED) != 0;
7575       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7576       goto do_outer;
7577     }
7578 
7579   /* Don't mark as GOVD_SEEN addressable temporaries seen only in simd
7580      lb, b or incr expressions, those shouldn't be turned into simd arrays.  */
7581   if (ctx->region_type == ORT_SIMD
7582       && ctx->in_for_exprs
7583       && ((n->value & (GOVD_PRIVATE | GOVD_SEEN | GOVD_EXPLICIT))
7584 	  == GOVD_PRIVATE))
7585     flags &= ~GOVD_SEEN;
7586 
7587   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7588       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7589       && DECL_SIZE (decl))
7590     {
7591       if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7592 	{
7593 	  splay_tree_node n2;
7594 	  tree t = DECL_VALUE_EXPR (decl);
7595 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7596 	  t = TREE_OPERAND (t, 0);
7597 	  gcc_assert (DECL_P (t));
7598 	  n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7599 	  n2->value |= GOVD_SEEN;
7600 	}
7601       else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7602 	       && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7603 	       && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7604 		   != INTEGER_CST))
7605 	{
7606 	  splay_tree_node n2;
7607 	  tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7608 	  gcc_assert (DECL_P (t));
7609 	  n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7610 	  if (n2)
7611 	    omp_notice_variable (ctx, t, true);
7612 	}
7613     }
7614 
7615   if (ctx->region_type & ORT_ACC)
7616     /* For OpenACC, as remarked above, defer expansion.  */
7617     shared = false;
7618   else
7619     shared = ((flags | n->value) & GOVD_SHARED) != 0;
7620   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7621 
7622   /* If nothing changed, there's nothing left to do.  */
7623   if ((n->value & flags) == flags)
7624     return ret;
7625   flags |= n->value;
7626   n->value = flags;
7627 
7628  do_outer:
7629   /* If the variable is private in the current context, then we don't
7630      need to propagate anything to an outer context.  */
7631   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7632     return ret;
7633   if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7634       == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7635     return ret;
7636   if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7637 		| GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7638       == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7639     return ret;
7640   if (ctx->outer_context
7641       && omp_notice_variable (ctx->outer_context, decl, in_code))
7642     return true;
7643   return ret;
7644 }
7645 
7646 /* Verify that DECL is private within CTX.  If there's specific information
7647    to the contrary in the innermost scope, generate an error.  */
7648 
7649 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl,int simd)7650 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7651 {
7652   splay_tree_node n;
7653 
7654   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7655   if (n != NULL)
7656     {
7657       if (n->value & GOVD_SHARED)
7658 	{
7659 	  if (ctx == gimplify_omp_ctxp)
7660 	    {
7661 	      if (simd)
7662 		error ("iteration variable %qE is predetermined linear",
7663 		       DECL_NAME (decl));
7664 	      else
7665 		error ("iteration variable %qE should be private",
7666 		       DECL_NAME (decl));
7667 	      n->value = GOVD_PRIVATE;
7668 	      return true;
7669 	    }
7670 	  else
7671 	    return false;
7672 	}
7673       else if ((n->value & GOVD_EXPLICIT) != 0
7674 	       && (ctx == gimplify_omp_ctxp
7675 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
7676 		       && gimplify_omp_ctxp->outer_context == ctx)))
7677 	{
7678 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7679 	    error ("iteration variable %qE should not be firstprivate",
7680 		   DECL_NAME (decl));
7681 	  else if ((n->value & GOVD_REDUCTION) != 0)
7682 	    error ("iteration variable %qE should not be reduction",
7683 		   DECL_NAME (decl));
7684 	  else if (simd != 1 && (n->value & GOVD_LINEAR) != 0)
7685 	    error ("iteration variable %qE should not be linear",
7686 		   DECL_NAME (decl));
7687 	}
7688       return (ctx == gimplify_omp_ctxp
7689 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
7690 		  && gimplify_omp_ctxp->outer_context == ctx));
7691     }
7692 
7693   if (ctx->region_type != ORT_WORKSHARE
7694       && ctx->region_type != ORT_TASKGROUP
7695       && ctx->region_type != ORT_SIMD
7696       && ctx->region_type != ORT_ACC)
7697     return false;
7698   else if (ctx->outer_context)
7699     return omp_is_private (ctx->outer_context, decl, simd);
7700   return false;
7701 }
7702 
7703 /* Return true if DECL is private within a parallel region
7704    that binds to the current construct's context or in parallel
7705    region's REDUCTION clause.  */
7706 
7707 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl,bool copyprivate)7708 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7709 {
7710   splay_tree_node n;
7711 
7712   do
7713     {
7714       ctx = ctx->outer_context;
7715       if (ctx == NULL)
7716 	{
7717 	  if (is_global_var (decl))
7718 	    return false;
7719 
7720 	  /* References might be private, but might be shared too,
7721 	     when checking for copyprivate, assume they might be
7722 	     private, otherwise assume they might be shared.  */
7723 	  if (copyprivate)
7724 	    return true;
7725 
7726 	  if (lang_hooks.decls.omp_privatize_by_reference (decl))
7727 	    return false;
7728 
7729 	  /* Treat C++ privatized non-static data members outside
7730 	     of the privatization the same.  */
7731 	  if (omp_member_access_dummy_var (decl))
7732 	    return false;
7733 
7734 	  return true;
7735 	}
7736 
7737       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7738 
7739       if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7740 	  && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7741 	{
7742 	  if ((ctx->region_type & ORT_TARGET_DATA) != 0
7743 	      || n == NULL
7744 	      || (n->value & GOVD_MAP) == 0)
7745 	    continue;
7746 	  return false;
7747 	}
7748 
7749       if (n != NULL)
7750 	{
7751 	  if ((n->value & GOVD_LOCAL) != 0
7752 	      && omp_member_access_dummy_var (decl))
7753 	    return false;
7754 	  return (n->value & GOVD_SHARED) == 0;
7755 	}
7756 
7757       if (ctx->region_type == ORT_WORKSHARE
7758 	  || ctx->region_type == ORT_TASKGROUP
7759 	  || ctx->region_type == ORT_SIMD
7760 	  || ctx->region_type == ORT_ACC)
7761 	continue;
7762 
7763       break;
7764     }
7765   while (1);
7766   return false;
7767 }
7768 
7769 /* Callback for walk_tree to find a DECL_EXPR for the given DECL.  */
7770 
7771 static tree
find_decl_expr(tree * tp,int * walk_subtrees,void * data)7772 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7773 {
7774   tree t = *tp;
7775 
7776   /* If this node has been visited, unmark it and keep looking.  */
7777   if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7778     return t;
7779 
7780   if (IS_TYPE_OR_DECL_P (t))
7781     *walk_subtrees = 0;
7782   return NULL_TREE;
7783 }
7784 
7785 /* If *LIST_P contains any OpenMP depend clauses with iterators,
7786    lower all the depend clauses by populating corresponding depend
7787    array.  Returns 0 if there are no such depend clauses, or
7788    2 if all depend clauses should be removed, 1 otherwise.  */
7789 
7790 static int
gimplify_omp_depend(tree * list_p,gimple_seq * pre_p)7791 gimplify_omp_depend (tree *list_p, gimple_seq *pre_p)
7792 {
7793   tree c;
7794   gimple *g;
7795   size_t n[4] = { 0, 0, 0, 0 };
7796   bool unused[4];
7797   tree counts[4] = { NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE };
7798   tree last_iter = NULL_TREE, last_count = NULL_TREE;
7799   size_t i, j;
7800   location_t first_loc = UNKNOWN_LOCATION;
7801 
7802   for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
7803     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
7804       {
7805 	switch (OMP_CLAUSE_DEPEND_KIND (c))
7806 	  {
7807 	  case OMP_CLAUSE_DEPEND_IN:
7808 	    i = 2;
7809 	    break;
7810 	  case OMP_CLAUSE_DEPEND_OUT:
7811 	  case OMP_CLAUSE_DEPEND_INOUT:
7812 	    i = 0;
7813 	    break;
7814 	  case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
7815 	    i = 1;
7816 	    break;
7817 	  case OMP_CLAUSE_DEPEND_DEPOBJ:
7818 	    i = 3;
7819 	    break;
7820 	  case OMP_CLAUSE_DEPEND_SOURCE:
7821 	  case OMP_CLAUSE_DEPEND_SINK:
7822 	    continue;
7823 	  default:
7824 	    gcc_unreachable ();
7825 	  }
7826 	tree t = OMP_CLAUSE_DECL (c);
7827 	if (first_loc == UNKNOWN_LOCATION)
7828 	  first_loc = OMP_CLAUSE_LOCATION (c);
7829 	if (TREE_CODE (t) == TREE_LIST
7830 	    && TREE_PURPOSE (t)
7831 	    && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
7832 	  {
7833 	    if (TREE_PURPOSE (t) != last_iter)
7834 	      {
7835 		tree tcnt = size_one_node;
7836 		for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
7837 		  {
7838 		    if (gimplify_expr (&TREE_VEC_ELT (it, 1), pre_p, NULL,
7839 				       is_gimple_val, fb_rvalue) == GS_ERROR
7840 			|| gimplify_expr (&TREE_VEC_ELT (it, 2), pre_p, NULL,
7841 					  is_gimple_val, fb_rvalue) == GS_ERROR
7842 			|| gimplify_expr (&TREE_VEC_ELT (it, 3), pre_p, NULL,
7843 					  is_gimple_val, fb_rvalue) == GS_ERROR
7844 			|| (gimplify_expr (&TREE_VEC_ELT (it, 4), pre_p, NULL,
7845 					   is_gimple_val, fb_rvalue)
7846 			    == GS_ERROR))
7847 		      return 2;
7848 		    tree var = TREE_VEC_ELT (it, 0);
7849 		    tree begin = TREE_VEC_ELT (it, 1);
7850 		    tree end = TREE_VEC_ELT (it, 2);
7851 		    tree step = TREE_VEC_ELT (it, 3);
7852 		    tree orig_step = TREE_VEC_ELT (it, 4);
7853 		    tree type = TREE_TYPE (var);
7854 		    tree stype = TREE_TYPE (step);
7855 		    location_t loc = DECL_SOURCE_LOCATION (var);
7856 		    tree endmbegin;
7857 		    /* Compute count for this iterator as
7858 		       orig_step > 0
7859 		       ? (begin < end ? (end - begin + (step - 1)) / step : 0)
7860 		       : (begin > end ? (end - begin + (step + 1)) / step : 0)
7861 		       and compute product of those for the entire depend
7862 		       clause.  */
7863 		    if (POINTER_TYPE_P (type))
7864 		      endmbegin = fold_build2_loc (loc, POINTER_DIFF_EXPR,
7865 						   stype, end, begin);
7866 		    else
7867 		      endmbegin = fold_build2_loc (loc, MINUS_EXPR, type,
7868 						   end, begin);
7869 		    tree stepm1 = fold_build2_loc (loc, MINUS_EXPR, stype,
7870 						   step,
7871 						   build_int_cst (stype, 1));
7872 		    tree stepp1 = fold_build2_loc (loc, PLUS_EXPR, stype, step,
7873 						   build_int_cst (stype, 1));
7874 		    tree pos = fold_build2_loc (loc, PLUS_EXPR, stype,
7875 						unshare_expr (endmbegin),
7876 						stepm1);
7877 		    pos = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7878 					   pos, step);
7879 		    tree neg = fold_build2_loc (loc, PLUS_EXPR, stype,
7880 						endmbegin, stepp1);
7881 		    if (TYPE_UNSIGNED (stype))
7882 		      {
7883 			neg = fold_build1_loc (loc, NEGATE_EXPR, stype, neg);
7884 			step = fold_build1_loc (loc, NEGATE_EXPR, stype, step);
7885 		      }
7886 		    neg = fold_build2_loc (loc, TRUNC_DIV_EXPR, stype,
7887 					   neg, step);
7888 		    step = NULL_TREE;
7889 		    tree cond = fold_build2_loc (loc, LT_EXPR,
7890 						 boolean_type_node,
7891 						 begin, end);
7892 		    pos = fold_build3_loc (loc, COND_EXPR, stype, cond, pos,
7893 					   build_int_cst (stype, 0));
7894 		    cond = fold_build2_loc (loc, LT_EXPR, boolean_type_node,
7895 					    end, begin);
7896 		    neg = fold_build3_loc (loc, COND_EXPR, stype, cond, neg,
7897 					   build_int_cst (stype, 0));
7898 		    tree osteptype = TREE_TYPE (orig_step);
7899 		    cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
7900 					    orig_step,
7901 					    build_int_cst (osteptype, 0));
7902 		    tree cnt = fold_build3_loc (loc, COND_EXPR, stype,
7903 						cond, pos, neg);
7904 		    cnt = fold_convert_loc (loc, sizetype, cnt);
7905 		    if (gimplify_expr (&cnt, pre_p, NULL, is_gimple_val,
7906 				       fb_rvalue) == GS_ERROR)
7907 		      return 2;
7908 		    tcnt = size_binop_loc (loc, MULT_EXPR, tcnt, cnt);
7909 		  }
7910 		if (gimplify_expr (&tcnt, pre_p, NULL, is_gimple_val,
7911 				   fb_rvalue) == GS_ERROR)
7912 		  return 2;
7913 		last_iter = TREE_PURPOSE (t);
7914 		last_count = tcnt;
7915 	      }
7916 	    if (counts[i] == NULL_TREE)
7917 	      counts[i] = last_count;
7918 	    else
7919 	      counts[i] = size_binop_loc (OMP_CLAUSE_LOCATION (c),
7920 					  PLUS_EXPR, counts[i], last_count);
7921 	  }
7922 	else
7923 	  n[i]++;
7924       }
7925   for (i = 0; i < 4; i++)
7926     if (counts[i])
7927       break;
7928   if (i == 4)
7929     return 0;
7930 
7931   tree total = size_zero_node;
7932   for (i = 0; i < 4; i++)
7933     {
7934       unused[i] = counts[i] == NULL_TREE && n[i] == 0;
7935       if (counts[i] == NULL_TREE)
7936 	counts[i] = size_zero_node;
7937       if (n[i])
7938 	counts[i] = size_binop (PLUS_EXPR, counts[i], size_int (n[i]));
7939       if (gimplify_expr (&counts[i], pre_p, NULL, is_gimple_val,
7940 			 fb_rvalue) == GS_ERROR)
7941 	return 2;
7942       total = size_binop (PLUS_EXPR, total, counts[i]);
7943     }
7944 
7945   if (gimplify_expr (&total, pre_p, NULL, is_gimple_val, fb_rvalue)
7946       == GS_ERROR)
7947     return 2;
7948   bool is_old = unused[1] && unused[3];
7949   tree totalpx = size_binop (PLUS_EXPR, unshare_expr (total),
7950 			     size_int (is_old ? 1 : 4));
7951   tree type = build_array_type (ptr_type_node, build_index_type (totalpx));
7952   tree array = create_tmp_var_raw (type);
7953   TREE_ADDRESSABLE (array) = 1;
7954   if (!poly_int_tree_p (totalpx))
7955     {
7956       if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (array)))
7957 	gimplify_type_sizes (TREE_TYPE (array), pre_p);
7958       if (gimplify_omp_ctxp)
7959 	{
7960 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
7961 	  while (ctx
7962 		 && (ctx->region_type == ORT_WORKSHARE
7963 		     || ctx->region_type == ORT_TASKGROUP
7964 		     || ctx->region_type == ORT_SIMD
7965 		     || ctx->region_type == ORT_ACC))
7966 	    ctx = ctx->outer_context;
7967 	  if (ctx)
7968 	    omp_add_variable (ctx, array, GOVD_LOCAL | GOVD_SEEN);
7969 	}
7970       gimplify_vla_decl (array, pre_p);
7971     }
7972   else
7973     gimple_add_tmp_var (array);
7974   tree r = build4 (ARRAY_REF, ptr_type_node, array, size_int (0), NULL_TREE,
7975 		   NULL_TREE);
7976   tree tem;
7977   if (!is_old)
7978     {
7979       tem = build2 (MODIFY_EXPR, void_type_node, r,
7980 		    build_int_cst (ptr_type_node, 0));
7981       gimplify_and_add (tem, pre_p);
7982       r = build4 (ARRAY_REF, ptr_type_node, array, size_int (1), NULL_TREE,
7983 		  NULL_TREE);
7984     }
7985   tem = build2 (MODIFY_EXPR, void_type_node, r,
7986 		fold_convert (ptr_type_node, total));
7987   gimplify_and_add (tem, pre_p);
7988   for (i = 1; i < (is_old ? 2 : 4); i++)
7989     {
7990       r = build4 (ARRAY_REF, ptr_type_node, array, size_int (i + !is_old),
7991 		  NULL_TREE, NULL_TREE);
7992       tem = build2 (MODIFY_EXPR, void_type_node, r, counts[i - 1]);
7993       gimplify_and_add (tem, pre_p);
7994     }
7995 
7996   tree cnts[4];
7997   for (j = 4; j; j--)
7998     if (!unused[j - 1])
7999       break;
8000   for (i = 0; i < 4; i++)
8001     {
8002       if (i && (i >= j || unused[i - 1]))
8003 	{
8004 	  cnts[i] = cnts[i - 1];
8005 	  continue;
8006 	}
8007       cnts[i] = create_tmp_var (sizetype);
8008       if (i == 0)
8009 	g = gimple_build_assign (cnts[i], size_int (is_old ? 2 : 5));
8010       else
8011 	{
8012 	  tree t;
8013 	  if (is_old)
8014 	    t = size_binop (PLUS_EXPR, counts[0], size_int (2));
8015 	  else
8016 	    t = size_binop (PLUS_EXPR, cnts[i - 1], counts[i - 1]);
8017 	  if (gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue)
8018 	      == GS_ERROR)
8019 	    return 2;
8020 	  g = gimple_build_assign (cnts[i], t);
8021 	}
8022       gimple_seq_add_stmt (pre_p, g);
8023     }
8024 
8025   last_iter = NULL_TREE;
8026   tree last_bind = NULL_TREE;
8027   tree *last_body = NULL;
8028   for (c = *list_p; c; c = OMP_CLAUSE_CHAIN (c))
8029     if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND)
8030       {
8031 	switch (OMP_CLAUSE_DEPEND_KIND (c))
8032 	  {
8033 	  case OMP_CLAUSE_DEPEND_IN:
8034 	    i = 2;
8035 	    break;
8036 	  case OMP_CLAUSE_DEPEND_OUT:
8037 	  case OMP_CLAUSE_DEPEND_INOUT:
8038 	    i = 0;
8039 	    break;
8040 	  case OMP_CLAUSE_DEPEND_MUTEXINOUTSET:
8041 	    i = 1;
8042 	    break;
8043 	  case OMP_CLAUSE_DEPEND_DEPOBJ:
8044 	    i = 3;
8045 	    break;
8046 	  case OMP_CLAUSE_DEPEND_SOURCE:
8047 	  case OMP_CLAUSE_DEPEND_SINK:
8048 	    continue;
8049 	  default:
8050 	    gcc_unreachable ();
8051 	  }
8052 	tree t = OMP_CLAUSE_DECL (c);
8053 	if (TREE_CODE (t) == TREE_LIST
8054 	    && TREE_PURPOSE (t)
8055 	    && TREE_CODE (TREE_PURPOSE (t)) == TREE_VEC)
8056 	  {
8057 	    if (TREE_PURPOSE (t) != last_iter)
8058 	      {
8059 		if (last_bind)
8060 		  gimplify_and_add (last_bind, pre_p);
8061 		tree block = TREE_VEC_ELT (TREE_PURPOSE (t), 5);
8062 		last_bind = build3 (BIND_EXPR, void_type_node,
8063 				    BLOCK_VARS (block), NULL, block);
8064 		TREE_SIDE_EFFECTS (last_bind) = 1;
8065 		SET_EXPR_LOCATION (last_bind, OMP_CLAUSE_LOCATION (c));
8066 		tree *p = &BIND_EXPR_BODY (last_bind);
8067 		for (tree it = TREE_PURPOSE (t); it; it = TREE_CHAIN (it))
8068 		  {
8069 		    tree var = TREE_VEC_ELT (it, 0);
8070 		    tree begin = TREE_VEC_ELT (it, 1);
8071 		    tree end = TREE_VEC_ELT (it, 2);
8072 		    tree step = TREE_VEC_ELT (it, 3);
8073 		    tree orig_step = TREE_VEC_ELT (it, 4);
8074 		    tree type = TREE_TYPE (var);
8075 		    location_t loc = DECL_SOURCE_LOCATION (var);
8076 		    /* Emit:
8077 		       var = begin;
8078 		       goto cond_label;
8079 		       beg_label:
8080 		       ...
8081 		       var = var + step;
8082 		       cond_label:
8083 		       if (orig_step > 0) {
8084 			 if (var < end) goto beg_label;
8085 		       } else {
8086 			 if (var > end) goto beg_label;
8087 		       }
8088 		       for each iterator, with inner iterators added to
8089 		       the ... above.  */
8090 		    tree beg_label = create_artificial_label (loc);
8091 		    tree cond_label = NULL_TREE;
8092 		    tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8093 				      var, begin);
8094 		    append_to_statement_list_force (tem, p);
8095 		    tem = build_and_jump (&cond_label);
8096 		    append_to_statement_list_force (tem, p);
8097 		    tem = build1 (LABEL_EXPR, void_type_node, beg_label);
8098 		    append_to_statement_list (tem, p);
8099 		    tree bind = build3 (BIND_EXPR, void_type_node, NULL_TREE,
8100 					NULL_TREE, NULL_TREE);
8101 		    TREE_SIDE_EFFECTS (bind) = 1;
8102 		    SET_EXPR_LOCATION (bind, loc);
8103 		    append_to_statement_list_force (bind, p);
8104 		    if (POINTER_TYPE_P (type))
8105 		      tem = build2_loc (loc, POINTER_PLUS_EXPR, type,
8106 					var, fold_convert_loc (loc, sizetype,
8107 							       step));
8108 		    else
8109 		      tem = build2_loc (loc, PLUS_EXPR, type, var, step);
8110 		    tem = build2_loc (loc, MODIFY_EXPR, void_type_node,
8111 				      var, tem);
8112 		    append_to_statement_list_force (tem, p);
8113 		    tem = build1 (LABEL_EXPR, void_type_node, cond_label);
8114 		    append_to_statement_list (tem, p);
8115 		    tree cond = fold_build2_loc (loc, LT_EXPR,
8116 						 boolean_type_node,
8117 						 var, end);
8118 		    tree pos
8119 		      = fold_build3_loc (loc, COND_EXPR, void_type_node,
8120 					 cond, build_and_jump (&beg_label),
8121 					 void_node);
8122 		    cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8123 					    var, end);
8124 		    tree neg
8125 		      = fold_build3_loc (loc, COND_EXPR, void_type_node,
8126 					 cond, build_and_jump (&beg_label),
8127 					 void_node);
8128 		    tree osteptype = TREE_TYPE (orig_step);
8129 		    cond = fold_build2_loc (loc, GT_EXPR, boolean_type_node,
8130 					    orig_step,
8131 					    build_int_cst (osteptype, 0));
8132 		    tem = fold_build3_loc (loc, COND_EXPR, void_type_node,
8133 					   cond, pos, neg);
8134 		    append_to_statement_list_force (tem, p);
8135 		    p = &BIND_EXPR_BODY (bind);
8136 		  }
8137 		last_body = p;
8138 	      }
8139 	    last_iter = TREE_PURPOSE (t);
8140 	    if (TREE_CODE (TREE_VALUE (t)) == COMPOUND_EXPR)
8141 	      {
8142 		append_to_statement_list (TREE_OPERAND (TREE_VALUE (t),
8143 					  0), last_body);
8144 		TREE_VALUE (t) = TREE_OPERAND (TREE_VALUE (t), 1);
8145 	      }
8146 	    if (error_operand_p (TREE_VALUE (t)))
8147 	      return 2;
8148 	    TREE_VALUE (t) = build_fold_addr_expr (TREE_VALUE (t));
8149 	    r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8150 			NULL_TREE, NULL_TREE);
8151 	    tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8152 			      void_type_node, r, TREE_VALUE (t));
8153 	    append_to_statement_list_force (tem, last_body);
8154 	    tem = build2_loc (OMP_CLAUSE_LOCATION (c), MODIFY_EXPR,
8155 			      void_type_node, cnts[i],
8156 			      size_binop (PLUS_EXPR, cnts[i], size_int (1)));
8157 	    append_to_statement_list_force (tem, last_body);
8158 	    TREE_VALUE (t) = null_pointer_node;
8159 	  }
8160 	else
8161 	  {
8162 	    if (last_bind)
8163 	      {
8164 		gimplify_and_add (last_bind, pre_p);
8165 		last_bind = NULL_TREE;
8166 	      }
8167 	    if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8168 	      {
8169 		gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8170 			       NULL, is_gimple_val, fb_rvalue);
8171 		OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8172 	      }
8173 	    if (error_operand_p (OMP_CLAUSE_DECL (c)))
8174 	      return 2;
8175 	    OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8176 	    if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8177 			       is_gimple_val, fb_rvalue) == GS_ERROR)
8178 	      return 2;
8179 	    r = build4 (ARRAY_REF, ptr_type_node, array, cnts[i],
8180 			NULL_TREE, NULL_TREE);
8181 	    tem = build2 (MODIFY_EXPR, void_type_node, r, OMP_CLAUSE_DECL (c));
8182 	    gimplify_and_add (tem, pre_p);
8183 	    g = gimple_build_assign (cnts[i], size_binop (PLUS_EXPR, cnts[i],
8184 							  size_int (1)));
8185 	    gimple_seq_add_stmt (pre_p, g);
8186 	  }
8187       }
8188   if (last_bind)
8189     gimplify_and_add (last_bind, pre_p);
8190   tree cond = boolean_false_node;
8191   if (is_old)
8192     {
8193       if (!unused[0])
8194 	cond = build2_loc (first_loc, NE_EXPR, boolean_type_node, cnts[0],
8195 			   size_binop_loc (first_loc, PLUS_EXPR, counts[0],
8196 					   size_int (2)));
8197       if (!unused[2])
8198 	cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8199 			   build2_loc (first_loc, NE_EXPR, boolean_type_node,
8200 				       cnts[2],
8201 				       size_binop_loc (first_loc, PLUS_EXPR,
8202 						       totalpx,
8203 						       size_int (1))));
8204     }
8205   else
8206     {
8207       tree prev = size_int (5);
8208       for (i = 0; i < 4; i++)
8209 	{
8210 	  if (unused[i])
8211 	    continue;
8212 	  prev = size_binop_loc (first_loc, PLUS_EXPR, counts[i], prev);
8213 	  cond = build2_loc (first_loc, TRUTH_OR_EXPR, boolean_type_node, cond,
8214 			     build2_loc (first_loc, NE_EXPR, boolean_type_node,
8215 					 cnts[i], unshare_expr (prev)));
8216 	}
8217     }
8218   tem = build3_loc (first_loc, COND_EXPR, void_type_node, cond,
8219 		    build_call_expr_loc (first_loc,
8220 					 builtin_decl_explicit (BUILT_IN_TRAP),
8221 					 0), void_node);
8222   gimplify_and_add (tem, pre_p);
8223   c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_DEPEND);
8224   OMP_CLAUSE_DEPEND_KIND (c) = OMP_CLAUSE_DEPEND_LAST;
8225   OMP_CLAUSE_DECL (c) = build_fold_addr_expr (array);
8226   OMP_CLAUSE_CHAIN (c) = *list_p;
8227   *list_p = c;
8228   return 1;
8229 }
8230 
8231 /* Insert a GOMP_MAP_ALLOC or GOMP_MAP_RELEASE node following a
8232    GOMP_MAP_STRUCT mapping.  C is an always_pointer mapping.  STRUCT_NODE is
8233    the struct node to insert the new mapping after (when the struct node is
8234    initially created).  PREV_NODE is the first of two or three mappings for a
8235    pointer, and is either:
8236      - the node before C, when a pair of mappings is used, e.g. for a C/C++
8237        array section.
8238      - not the node before C.  This is true when we have a reference-to-pointer
8239        type (with a mapping for the reference and for the pointer), or for
8240        Fortran derived-type mappings with a GOMP_MAP_TO_PSET.
8241    If SCP is non-null, the new node is inserted before *SCP.
8242    if SCP is null, the new node is inserted before PREV_NODE.
8243    The return type is:
8244      - PREV_NODE, if SCP is non-null.
8245      - The newly-created ALLOC or RELEASE node, if SCP is null.
8246      - The second newly-created ALLOC or RELEASE node, if we are mapping a
8247        reference to a pointer.  */
8248 
8249 static tree
insert_struct_comp_map(enum tree_code code,tree c,tree struct_node,tree prev_node,tree * scp)8250 insert_struct_comp_map (enum tree_code code, tree c, tree struct_node,
8251 			tree prev_node, tree *scp)
8252 {
8253   enum gomp_map_kind mkind
8254     = (code == OMP_TARGET_EXIT_DATA || code == OACC_EXIT_DATA)
8255       ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8256 
8257   tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8258   tree cl = scp ? prev_node : c2;
8259   OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8260   OMP_CLAUSE_DECL (c2) = unshare_expr (OMP_CLAUSE_DECL (c));
8261   OMP_CLAUSE_CHAIN (c2) = scp ? *scp : prev_node;
8262   if (OMP_CLAUSE_CHAIN (prev_node) != c
8263       && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8264       && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8265 	  == GOMP_MAP_TO_PSET))
8266     OMP_CLAUSE_SIZE (c2) = OMP_CLAUSE_SIZE (OMP_CLAUSE_CHAIN (prev_node));
8267   else
8268     OMP_CLAUSE_SIZE (c2) = TYPE_SIZE_UNIT (ptr_type_node);
8269   if (struct_node)
8270     OMP_CLAUSE_CHAIN (struct_node) = c2;
8271 
8272   /* We might need to create an additional mapping if we have a reference to a
8273      pointer (in C++).  Don't do this if we have something other than a
8274      GOMP_MAP_ALWAYS_POINTER though, i.e. a GOMP_MAP_TO_PSET.  */
8275   if (OMP_CLAUSE_CHAIN (prev_node) != c
8276       && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (prev_node)) == OMP_CLAUSE_MAP
8277       && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8278 	   == GOMP_MAP_ALWAYS_POINTER)
8279 	  || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (prev_node))
8280 	      == GOMP_MAP_ATTACH_DETACH)))
8281     {
8282       tree c4 = OMP_CLAUSE_CHAIN (prev_node);
8283       tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_MAP);
8284       OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8285       OMP_CLAUSE_DECL (c3) = unshare_expr (OMP_CLAUSE_DECL (c4));
8286       OMP_CLAUSE_SIZE (c3) = TYPE_SIZE_UNIT (ptr_type_node);
8287       OMP_CLAUSE_CHAIN (c3) = prev_node;
8288       if (!scp)
8289 	OMP_CLAUSE_CHAIN (c2) = c3;
8290       else
8291 	cl = c3;
8292     }
8293 
8294   if (scp)
8295     *scp = c2;
8296 
8297   return cl;
8298 }
8299 
8300 /* Strip ARRAY_REFS or an indirect ref off BASE, find the containing object,
8301    and set *BITPOSP and *POFFSETP to the bit offset of the access.
8302    If BASE_REF is non-NULL and the containing object is a reference, set
8303    *BASE_REF to that reference before dereferencing the object.
8304    If BASE_REF is NULL, check that the containing object is a COMPONENT_REF or
8305    has array type, else return NULL.  */
8306 
8307 static tree
extract_base_bit_offset(tree base,tree * base_ref,poly_int64 * bitposp,poly_offset_int * poffsetp)8308 extract_base_bit_offset (tree base, tree *base_ref, poly_int64 *bitposp,
8309 			 poly_offset_int *poffsetp)
8310 {
8311   tree offset;
8312   poly_int64 bitsize, bitpos;
8313   machine_mode mode;
8314   int unsignedp, reversep, volatilep = 0;
8315   poly_offset_int poffset;
8316 
8317   if (base_ref)
8318     {
8319       *base_ref = NULL_TREE;
8320 
8321       while (TREE_CODE (base) == ARRAY_REF)
8322 	base = TREE_OPERAND (base, 0);
8323 
8324       if (TREE_CODE (base) == INDIRECT_REF)
8325 	base = TREE_OPERAND (base, 0);
8326     }
8327   else
8328     {
8329       if (TREE_CODE (base) == ARRAY_REF)
8330 	{
8331 	  while (TREE_CODE (base) == ARRAY_REF)
8332 	    base = TREE_OPERAND (base, 0);
8333 	  if (TREE_CODE (base) != COMPONENT_REF
8334 	      || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE)
8335 	    return NULL_TREE;
8336 	}
8337       else if (TREE_CODE (base) == INDIRECT_REF
8338 	       && TREE_CODE (TREE_OPERAND (base, 0)) == COMPONENT_REF
8339 	       && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8340 		   == REFERENCE_TYPE))
8341 	base = TREE_OPERAND (base, 0);
8342     }
8343 
8344   base = get_inner_reference (base, &bitsize, &bitpos, &offset, &mode,
8345 			      &unsignedp, &reversep, &volatilep);
8346 
8347   tree orig_base = base;
8348 
8349   if ((TREE_CODE (base) == INDIRECT_REF
8350        || (TREE_CODE (base) == MEM_REF
8351 	   && integer_zerop (TREE_OPERAND (base, 1))))
8352       && DECL_P (TREE_OPERAND (base, 0))
8353       && TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0))) == REFERENCE_TYPE)
8354     base = TREE_OPERAND (base, 0);
8355 
8356   gcc_assert (offset == NULL_TREE || poly_int_tree_p (offset));
8357 
8358   if (offset)
8359     poffset = wi::to_poly_offset (offset);
8360   else
8361     poffset = 0;
8362 
8363   if (maybe_ne (bitpos, 0))
8364     poffset += bits_to_bytes_round_down (bitpos);
8365 
8366   *bitposp = bitpos;
8367   *poffsetp = poffset;
8368 
8369   /* Set *BASE_REF if BASE was a dereferenced reference variable.  */
8370   if (base_ref && orig_base != base)
8371     *base_ref = orig_base;
8372 
8373   return base;
8374 }
8375 
8376 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
8377    and previous omp contexts.  */
8378 
8379 static void
gimplify_scan_omp_clauses(tree * list_p,gimple_seq * pre_p,enum omp_region_type region_type,enum tree_code code)8380 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
8381 			   enum omp_region_type region_type,
8382 			   enum tree_code code)
8383 {
8384   struct gimplify_omp_ctx *ctx, *outer_ctx;
8385   tree c;
8386   hash_map<tree, tree> *struct_map_to_clause = NULL;
8387   hash_set<tree> *struct_deref_set = NULL;
8388   tree *prev_list_p = NULL, *orig_list_p = list_p;
8389   int handled_depend_iterators = -1;
8390   int nowait = -1;
8391 
8392   ctx = new_omp_context (region_type);
8393   ctx->code = code;
8394   outer_ctx = ctx->outer_context;
8395   if (code == OMP_TARGET)
8396     {
8397       if (!lang_GNU_Fortran ())
8398 	ctx->defaultmap[GDMK_POINTER] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
8399       ctx->defaultmap[GDMK_SCALAR] = GOVD_FIRSTPRIVATE;
8400     }
8401   if (!lang_GNU_Fortran ())
8402     switch (code)
8403       {
8404       case OMP_TARGET:
8405       case OMP_TARGET_DATA:
8406       case OMP_TARGET_ENTER_DATA:
8407       case OMP_TARGET_EXIT_DATA:
8408       case OACC_DECLARE:
8409       case OACC_HOST_DATA:
8410       case OACC_PARALLEL:
8411       case OACC_KERNELS:
8412 	ctx->target_firstprivatize_array_bases = true;
8413       default:
8414 	break;
8415       }
8416 
8417   while ((c = *list_p) != NULL)
8418     {
8419       bool remove = false;
8420       bool notice_outer = true;
8421       const char *check_non_private = NULL;
8422       unsigned int flags;
8423       tree decl;
8424 
8425       switch (OMP_CLAUSE_CODE (c))
8426 	{
8427 	case OMP_CLAUSE_PRIVATE:
8428 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
8429 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
8430 	    {
8431 	      flags |= GOVD_PRIVATE_OUTER_REF;
8432 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
8433 	    }
8434 	  else
8435 	    notice_outer = false;
8436 	  goto do_add;
8437 	case OMP_CLAUSE_SHARED:
8438 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
8439 	  goto do_add;
8440 	case OMP_CLAUSE_FIRSTPRIVATE:
8441 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8442 	  check_non_private = "firstprivate";
8443 	  goto do_add;
8444 	case OMP_CLAUSE_LASTPRIVATE:
8445 	  if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8446 	    switch (code)
8447 	      {
8448 	      case OMP_DISTRIBUTE:
8449 		error_at (OMP_CLAUSE_LOCATION (c),
8450 			  "conditional %<lastprivate%> clause on "
8451 			  "%qs construct", "distribute");
8452 		OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8453 		break;
8454 	      case OMP_TASKLOOP:
8455 		error_at (OMP_CLAUSE_LOCATION (c),
8456 			  "conditional %<lastprivate%> clause on "
8457 			  "%qs construct", "taskloop");
8458 		OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8459 		break;
8460 	      default:
8461 		break;
8462 	      }
8463 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
8464 	  if (code != OMP_LOOP)
8465 	    check_non_private = "lastprivate";
8466 	  decl = OMP_CLAUSE_DECL (c);
8467 	  if (error_operand_p (decl))
8468 	    goto do_add;
8469 	  if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c)
8470 	      && !lang_hooks.decls.omp_scalar_p (decl))
8471 	    {
8472 	      error_at (OMP_CLAUSE_LOCATION (c),
8473 			"non-scalar variable %qD in conditional "
8474 			"%<lastprivate%> clause", decl);
8475 	      OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) = 0;
8476 	    }
8477 	  if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
8478 	    flags |= GOVD_LASTPRIVATE_CONDITIONAL;
8479 	  if (outer_ctx
8480 	      && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
8481 		  || ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
8482 		      == ORT_COMBINED_TEAMS))
8483 	      && splay_tree_lookup (outer_ctx->variables,
8484 				    (splay_tree_key) decl) == NULL)
8485 	    {
8486 	      omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
8487 	      if (outer_ctx->outer_context)
8488 		omp_notice_variable (outer_ctx->outer_context, decl, true);
8489 	    }
8490 	  else if (outer_ctx
8491 		   && (outer_ctx->region_type & ORT_TASK) != 0
8492 		   && outer_ctx->combined_loop
8493 		   && splay_tree_lookup (outer_ctx->variables,
8494 					 (splay_tree_key) decl) == NULL)
8495 	    {
8496 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8497 	      if (outer_ctx->outer_context)
8498 		omp_notice_variable (outer_ctx->outer_context, decl, true);
8499 	    }
8500 	  else if (outer_ctx
8501 		   && (outer_ctx->region_type == ORT_WORKSHARE
8502 		       || outer_ctx->region_type == ORT_ACC)
8503 		   && outer_ctx->combined_loop
8504 		   && splay_tree_lookup (outer_ctx->variables,
8505 					 (splay_tree_key) decl) == NULL
8506 		   && !omp_check_private (outer_ctx, decl, false))
8507 	    {
8508 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
8509 	      if (outer_ctx->outer_context
8510 		  && (outer_ctx->outer_context->region_type
8511 		      == ORT_COMBINED_PARALLEL)
8512 		  && splay_tree_lookup (outer_ctx->outer_context->variables,
8513 					(splay_tree_key) decl) == NULL)
8514 		{
8515 		  struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
8516 		  omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
8517 		  if (octx->outer_context)
8518 		    {
8519 		      octx = octx->outer_context;
8520 		      if (octx->region_type == ORT_WORKSHARE
8521 			  && octx->combined_loop
8522 			  && splay_tree_lookup (octx->variables,
8523 						(splay_tree_key) decl) == NULL
8524 			  && !omp_check_private (octx, decl, false))
8525 			{
8526 			  omp_add_variable (octx, decl,
8527 					    GOVD_LASTPRIVATE | GOVD_SEEN);
8528 			  octx = octx->outer_context;
8529 			  if (octx
8530 			      && ((octx->region_type & ORT_COMBINED_TEAMS)
8531 				  == ORT_COMBINED_TEAMS)
8532 			      && (splay_tree_lookup (octx->variables,
8533 						     (splay_tree_key) decl)
8534 				  == NULL))
8535 			    {
8536 			      omp_add_variable (octx, decl,
8537 						GOVD_SHARED | GOVD_SEEN);
8538 			      octx = octx->outer_context;
8539 			    }
8540 			}
8541 		      if (octx)
8542 			omp_notice_variable (octx, decl, true);
8543 		    }
8544 		}
8545 	      else if (outer_ctx->outer_context)
8546 		omp_notice_variable (outer_ctx->outer_context, decl, true);
8547 	    }
8548 	  goto do_add;
8549 	case OMP_CLAUSE_REDUCTION:
8550 	  if (OMP_CLAUSE_REDUCTION_TASK (c))
8551 	    {
8552 	      if (region_type == ORT_WORKSHARE)
8553 		{
8554 		  if (nowait == -1)
8555 		    nowait = omp_find_clause (*list_p,
8556 					      OMP_CLAUSE_NOWAIT) != NULL_TREE;
8557 		  if (nowait
8558 		      && (outer_ctx == NULL
8559 			  || outer_ctx->region_type != ORT_COMBINED_PARALLEL))
8560 		    {
8561 		      error_at (OMP_CLAUSE_LOCATION (c),
8562 				"%<task%> reduction modifier on a construct "
8563 				"with a %<nowait%> clause");
8564 		      OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8565 		    }
8566 		}
8567 	      else if ((region_type & ORT_PARALLEL) != ORT_PARALLEL)
8568 		{
8569 		  error_at (OMP_CLAUSE_LOCATION (c),
8570 			    "invalid %<task%> reduction modifier on construct "
8571 			    "other than %<parallel%>, %<for%> or %<sections%>");
8572 		  OMP_CLAUSE_REDUCTION_TASK (c) = 0;
8573 		}
8574 	    }
8575 	  if (OMP_CLAUSE_REDUCTION_INSCAN (c))
8576 	    switch (code)
8577 	      {
8578 	      case OMP_SECTIONS:
8579 		error_at (OMP_CLAUSE_LOCATION (c),
8580 			  "%<inscan%> %<reduction%> clause on "
8581 			  "%qs construct", "sections");
8582 		OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8583 		break;
8584 	      case OMP_PARALLEL:
8585 		error_at (OMP_CLAUSE_LOCATION (c),
8586 			  "%<inscan%> %<reduction%> clause on "
8587 			  "%qs construct", "parallel");
8588 		OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8589 		break;
8590 	      case OMP_TEAMS:
8591 		error_at (OMP_CLAUSE_LOCATION (c),
8592 			  "%<inscan%> %<reduction%> clause on "
8593 			  "%qs construct", "teams");
8594 		OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8595 		break;
8596 	      case OMP_TASKLOOP:
8597 		error_at (OMP_CLAUSE_LOCATION (c),
8598 			  "%<inscan%> %<reduction%> clause on "
8599 			  "%qs construct", "taskloop");
8600 		OMP_CLAUSE_REDUCTION_INSCAN (c) = 0;
8601 		break;
8602 	      default:
8603 		break;
8604 	      }
8605 	  /* FALLTHRU */
8606 	case OMP_CLAUSE_IN_REDUCTION:
8607 	case OMP_CLAUSE_TASK_REDUCTION:
8608 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
8609 	  /* OpenACC permits reductions on private variables.  */
8610 	  if (!(region_type & ORT_ACC)
8611 	      /* taskgroup is actually not a worksharing region.  */
8612 	      && code != OMP_TASKGROUP)
8613 	    check_non_private = omp_clause_code_name[OMP_CLAUSE_CODE (c)];
8614 	  decl = OMP_CLAUSE_DECL (c);
8615 	  if (TREE_CODE (decl) == MEM_REF)
8616 	    {
8617 	      tree type = TREE_TYPE (decl);
8618 	      bool saved_into_ssa = gimplify_ctxp->into_ssa;
8619 	      gimplify_ctxp->into_ssa = false;
8620 	      if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
8621 				 NULL, is_gimple_val, fb_rvalue, false)
8622 		  == GS_ERROR)
8623 		{
8624 		  gimplify_ctxp->into_ssa = saved_into_ssa;
8625 		  remove = true;
8626 		  break;
8627 		}
8628 	      gimplify_ctxp->into_ssa = saved_into_ssa;
8629 	      tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8630 	      if (DECL_P (v))
8631 		{
8632 		  omp_firstprivatize_variable (ctx, v);
8633 		  omp_notice_variable (ctx, v, true);
8634 		}
8635 	      decl = TREE_OPERAND (decl, 0);
8636 	      if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
8637 		{
8638 		  gimplify_ctxp->into_ssa = false;
8639 		  if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
8640 				     NULL, is_gimple_val, fb_rvalue, false)
8641 		      == GS_ERROR)
8642 		    {
8643 		      gimplify_ctxp->into_ssa = saved_into_ssa;
8644 		      remove = true;
8645 		      break;
8646 		    }
8647 		  gimplify_ctxp->into_ssa = saved_into_ssa;
8648 		  v = TREE_OPERAND (decl, 1);
8649 		  if (DECL_P (v))
8650 		    {
8651 		      omp_firstprivatize_variable (ctx, v);
8652 		      omp_notice_variable (ctx, v, true);
8653 		    }
8654 		  decl = TREE_OPERAND (decl, 0);
8655 		}
8656 	      if (TREE_CODE (decl) == ADDR_EXPR
8657 		  || TREE_CODE (decl) == INDIRECT_REF)
8658 		decl = TREE_OPERAND (decl, 0);
8659 	    }
8660 	  goto do_add_decl;
8661 	case OMP_CLAUSE_LINEAR:
8662 	  if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
8663 			     is_gimple_val, fb_rvalue) == GS_ERROR)
8664 	    {
8665 	      remove = true;
8666 	      break;
8667 	    }
8668 	  else
8669 	    {
8670 	      if (code == OMP_SIMD
8671 		  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8672 		{
8673 		  struct gimplify_omp_ctx *octx = outer_ctx;
8674 		  if (octx
8675 		      && octx->region_type == ORT_WORKSHARE
8676 		      && octx->combined_loop
8677 		      && !octx->distribute)
8678 		    {
8679 		      if (octx->outer_context
8680 			  && (octx->outer_context->region_type
8681 			      == ORT_COMBINED_PARALLEL))
8682 			octx = octx->outer_context->outer_context;
8683 		      else
8684 			octx = octx->outer_context;
8685 		    }
8686 		  if (octx
8687 		      && octx->region_type == ORT_WORKSHARE
8688 		      && octx->combined_loop
8689 		      && octx->distribute)
8690 		    {
8691 		      error_at (OMP_CLAUSE_LOCATION (c),
8692 				"%<linear%> clause for variable other than "
8693 				"loop iterator specified on construct "
8694 				"combined with %<distribute%>");
8695 		      remove = true;
8696 		      break;
8697 		    }
8698 		}
8699 	      /* For combined #pragma omp parallel for simd, need to put
8700 		 lastprivate and perhaps firstprivate too on the
8701 		 parallel.  Similarly for #pragma omp for simd.  */
8702 	      struct gimplify_omp_ctx *octx = outer_ctx;
8703 	      decl = NULL_TREE;
8704 	      do
8705 		{
8706 		  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8707 		      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8708 		    break;
8709 		  decl = OMP_CLAUSE_DECL (c);
8710 		  if (error_operand_p (decl))
8711 		    {
8712 		      decl = NULL_TREE;
8713 		      break;
8714 		    }
8715 		  flags = GOVD_SEEN;
8716 		  if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
8717 		    flags |= GOVD_FIRSTPRIVATE;
8718 		  if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8719 		    flags |= GOVD_LASTPRIVATE;
8720 		  if (octx
8721 		      && octx->region_type == ORT_WORKSHARE
8722 		      && octx->combined_loop)
8723 		    {
8724 		      if (octx->outer_context
8725 			  && (octx->outer_context->region_type
8726 			      == ORT_COMBINED_PARALLEL))
8727 			octx = octx->outer_context;
8728 		      else if (omp_check_private (octx, decl, false))
8729 			break;
8730 		    }
8731 		  else if (octx
8732 			   && (octx->region_type & ORT_TASK) != 0
8733 			   && octx->combined_loop)
8734 		    ;
8735 		  else if (octx
8736 			   && octx->region_type == ORT_COMBINED_PARALLEL
8737 			   && ctx->region_type == ORT_WORKSHARE
8738 			   && octx == outer_ctx)
8739 		    flags = GOVD_SEEN | GOVD_SHARED;
8740 		  else if (octx
8741 			   && ((octx->region_type & ORT_COMBINED_TEAMS)
8742 			       == ORT_COMBINED_TEAMS))
8743 		    flags = GOVD_SEEN | GOVD_SHARED;
8744 		  else if (octx
8745 			   && octx->region_type == ORT_COMBINED_TARGET)
8746 		    {
8747 		      flags &= ~GOVD_LASTPRIVATE;
8748 		      if (flags == GOVD_SEEN)
8749 			break;
8750 		    }
8751 		  else
8752 		    break;
8753 		  splay_tree_node on
8754 		    = splay_tree_lookup (octx->variables,
8755 					 (splay_tree_key) decl);
8756 		  if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
8757 		    {
8758 		      octx = NULL;
8759 		      break;
8760 		    }
8761 		  omp_add_variable (octx, decl, flags);
8762 		  if (octx->outer_context == NULL)
8763 		    break;
8764 		  octx = octx->outer_context;
8765 		}
8766 	      while (1);
8767 	      if (octx
8768 		  && decl
8769 		  && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8770 		      || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
8771 		omp_notice_variable (octx, decl, true);
8772 	    }
8773 	  flags = GOVD_LINEAR | GOVD_EXPLICIT;
8774 	  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
8775 	      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
8776 	    {
8777 	      notice_outer = false;
8778 	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
8779 	    }
8780 	  goto do_add;
8781 
8782 	case OMP_CLAUSE_MAP:
8783 	  decl = OMP_CLAUSE_DECL (c);
8784 	  if (error_operand_p (decl))
8785 	    remove = true;
8786 	  switch (code)
8787 	    {
8788 	    case OMP_TARGET:
8789 	      break;
8790 	    case OACC_DATA:
8791 	      if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
8792 		break;
8793 	      /* FALLTHRU */
8794 	    case OMP_TARGET_DATA:
8795 	    case OMP_TARGET_ENTER_DATA:
8796 	    case OMP_TARGET_EXIT_DATA:
8797 	    case OACC_ENTER_DATA:
8798 	    case OACC_EXIT_DATA:
8799 	    case OACC_HOST_DATA:
8800 	      if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8801 		  || (OMP_CLAUSE_MAP_KIND (c)
8802 		      == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8803 		/* For target {,enter ,exit }data only the array slice is
8804 		   mapped, but not the pointer to it.  */
8805 		remove = true;
8806 	      break;
8807 	    default:
8808 	      break;
8809 	    }
8810 	  /* For Fortran, not only the pointer to the data is mapped but also
8811 	     the address of the pointer, the array descriptor etc.; for
8812 	     'exit data' - and in particular for 'delete:' - having an 'alloc:'
8813 	     does not make sense.  Likewise, for 'update' only transferring the
8814 	     data itself is needed as the rest has been handled in previous
8815 	     directives.  However, for 'exit data', the array descriptor needs
8816 	     to be delete; hence, we turn the MAP_TO_PSET into a MAP_DELETE.
8817 
8818 	     NOTE: Generally, it is not safe to perform "enter data" operations
8819 	     on arrays where the data *or the descriptor* may go out of scope
8820 	     before a corresponding "exit data" operation -- and such a
8821 	     descriptor may be synthesized temporarily, e.g. to pass an
8822 	     explicit-shape array to a function expecting an assumed-shape
8823 	     argument.  Performing "enter data" inside the called function
8824 	     would thus be problematic.  */
8825 	  if (code == OMP_TARGET_EXIT_DATA
8826 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET)
8827 	    OMP_CLAUSE_SET_MAP_KIND (c, OMP_CLAUSE_MAP_KIND (*prev_list_p)
8828 					== GOMP_MAP_DELETE
8829 					? GOMP_MAP_DELETE : GOMP_MAP_RELEASE);
8830 	  else if ((code == OMP_TARGET_EXIT_DATA || code == OMP_TARGET_UPDATE)
8831 		   && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_POINTER
8832 		       || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_TO_PSET))
8833 	    remove = true;
8834 
8835 	  if (remove)
8836 	    break;
8837 	  if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
8838 	    {
8839 	      struct gimplify_omp_ctx *octx;
8840 	      for (octx = outer_ctx; octx; octx = octx->outer_context)
8841 	        {
8842 		  if (octx->region_type != ORT_ACC_HOST_DATA)
8843 		    break;
8844 		  splay_tree_node n2
8845 		    = splay_tree_lookup (octx->variables,
8846 					 (splay_tree_key) decl);
8847 		  if (n2)
8848 		    error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
8849 			      "declared in enclosing %<host_data%> region",
8850 			      DECL_NAME (decl));
8851 		}
8852 	    }
8853 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8854 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8855 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8856 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8857 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8858 	    {
8859 	      remove = true;
8860 	      break;
8861 	    }
8862 	  else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
8863 		    || (OMP_CLAUSE_MAP_KIND (c)
8864 			== GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8865 		   && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
8866 	    {
8867 	      OMP_CLAUSE_SIZE (c)
8868 		= get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
8869 					   false);
8870 	      omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
8871 				GOVD_FIRSTPRIVATE | GOVD_SEEN);
8872 	    }
8873 	  if (!DECL_P (decl))
8874 	    {
8875 	      tree d = decl, *pd;
8876 	      if (TREE_CODE (d) == ARRAY_REF)
8877 		{
8878 		  while (TREE_CODE (d) == ARRAY_REF)
8879 		    d = TREE_OPERAND (d, 0);
8880 		  if (TREE_CODE (d) == COMPONENT_REF
8881 		      && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
8882 		    decl = d;
8883 		}
8884 	      pd = &OMP_CLAUSE_DECL (c);
8885 	      if (d == decl
8886 		  && TREE_CODE (decl) == INDIRECT_REF
8887 		  && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
8888 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8889 		      == REFERENCE_TYPE))
8890 		{
8891 		  pd = &TREE_OPERAND (decl, 0);
8892 		  decl = TREE_OPERAND (decl, 0);
8893 		}
8894 	      bool indir_p = false;
8895 	      tree orig_decl = decl;
8896 	      tree decl_ref = NULL_TREE;
8897 	      if ((region_type & ORT_ACC) != 0
8898 		  && TREE_CODE (*pd) == COMPONENT_REF
8899 		  && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH
8900 		  && code != OACC_UPDATE)
8901 		{
8902 		  while (TREE_CODE (decl) == COMPONENT_REF)
8903 		    {
8904 		      decl = TREE_OPERAND (decl, 0);
8905 		      if ((TREE_CODE (decl) == MEM_REF
8906 			   && integer_zerop (TREE_OPERAND (decl, 1)))
8907 			  || INDIRECT_REF_P (decl))
8908 			{
8909 			  indir_p = true;
8910 			  decl = TREE_OPERAND (decl, 0);
8911 			}
8912 		      if (TREE_CODE (decl) == INDIRECT_REF
8913 			  && DECL_P (TREE_OPERAND (decl, 0))
8914 			  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8915 			      == REFERENCE_TYPE))
8916 			{
8917 			  decl_ref = decl;
8918 			  decl = TREE_OPERAND (decl, 0);
8919 			}
8920 		    }
8921 		}
8922 	      else if (TREE_CODE (decl) == COMPONENT_REF)
8923 		{
8924 		  while (TREE_CODE (decl) == COMPONENT_REF)
8925 		    decl = TREE_OPERAND (decl, 0);
8926 		  if (TREE_CODE (decl) == INDIRECT_REF
8927 		      && DECL_P (TREE_OPERAND (decl, 0))
8928 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
8929 			  == REFERENCE_TYPE))
8930 		    decl = TREE_OPERAND (decl, 0);
8931 		}
8932 	      if (decl != orig_decl && DECL_P (decl) && indir_p)
8933 		{
8934 		  gomp_map_kind k = (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
8935 							     : GOMP_MAP_ATTACH;
8936 		  /* We have a dereference of a struct member.  Make this an
8937 		     attach/detach operation, and ensure the base pointer is
8938 		     mapped as a FIRSTPRIVATE_POINTER.  */
8939 		  OMP_CLAUSE_SET_MAP_KIND (c, k);
8940 		  flags = GOVD_MAP | GOVD_SEEN | GOVD_EXPLICIT;
8941 		  tree next_clause = OMP_CLAUSE_CHAIN (c);
8942 		  if (k == GOMP_MAP_ATTACH
8943 		      && code != OACC_ENTER_DATA
8944 		      && (!next_clause
8945 			   || (OMP_CLAUSE_CODE (next_clause) != OMP_CLAUSE_MAP)
8946 			   || (OMP_CLAUSE_MAP_KIND (next_clause)
8947 			       != GOMP_MAP_POINTER)
8948 			   || OMP_CLAUSE_DECL (next_clause) != decl)
8949 		      && (!struct_deref_set
8950 			  || !struct_deref_set->contains (decl)))
8951 		    {
8952 		      if (!struct_deref_set)
8953 			struct_deref_set = new hash_set<tree> ();
8954 		      /* As well as the attach, we also need a
8955 			 FIRSTPRIVATE_POINTER clause to properly map the
8956 			 pointer to the struct base.  */
8957 		      tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8958 						  OMP_CLAUSE_MAP);
8959 		      OMP_CLAUSE_SET_MAP_KIND (c2, GOMP_MAP_ALLOC);
8960 		      OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (c2)
8961 			= 1;
8962 		      tree charptr_zero
8963 			= build_int_cst (build_pointer_type (char_type_node),
8964 					 0);
8965 		      OMP_CLAUSE_DECL (c2)
8966 			= build2 (MEM_REF, char_type_node,
8967 				  decl_ref ? decl_ref : decl, charptr_zero);
8968 		      OMP_CLAUSE_SIZE (c2) = size_zero_node;
8969 		      tree c3 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8970 						  OMP_CLAUSE_MAP);
8971 		      OMP_CLAUSE_SET_MAP_KIND (c3,
8972 					       GOMP_MAP_FIRSTPRIVATE_POINTER);
8973 		      OMP_CLAUSE_DECL (c3) = decl;
8974 		      OMP_CLAUSE_SIZE (c3) = size_zero_node;
8975 		      tree mapgrp = *prev_list_p;
8976 		      *prev_list_p = c2;
8977 		      OMP_CLAUSE_CHAIN (c3) = mapgrp;
8978 		      OMP_CLAUSE_CHAIN (c2) = c3;
8979 
8980 		      struct_deref_set->add (decl);
8981 		    }
8982 		  goto do_add_decl;
8983 		}
8984 	      /* An "attach/detach" operation on an update directive should
8985 		 behave as a GOMP_MAP_ALWAYS_POINTER.  Beware that
8986 		 unlike attach or detach map kinds, GOMP_MAP_ALWAYS_POINTER
8987 		 depends on the previous mapping.  */
8988 	      if (code == OACC_UPDATE
8989 		  && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
8990 		OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_ALWAYS_POINTER);
8991 	      if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
8992 		  == GS_ERROR)
8993 		{
8994 		  remove = true;
8995 		  break;
8996 		}
8997 	      if (DECL_P (decl)
8998 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
8999 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH
9000 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_DETACH
9001 		  && code != OACC_UPDATE)
9002 		{
9003 		  if (error_operand_p (decl))
9004 		    {
9005 		      remove = true;
9006 		      break;
9007 		    }
9008 
9009 		  tree stype = TREE_TYPE (decl);
9010 		  if (TREE_CODE (stype) == REFERENCE_TYPE)
9011 		    stype = TREE_TYPE (stype);
9012 		  if (TYPE_SIZE_UNIT (stype) == NULL
9013 		      || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
9014 		    {
9015 		      error_at (OMP_CLAUSE_LOCATION (c),
9016 				"mapping field %qE of variable length "
9017 				"structure", OMP_CLAUSE_DECL (c));
9018 		      remove = true;
9019 		      break;
9020 		    }
9021 
9022 		  if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER
9023 		      || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH_DETACH)
9024 		    {
9025 		      /* Error recovery.  */
9026 		      if (prev_list_p == NULL)
9027 			{
9028 			  remove = true;
9029 			  break;
9030 			}
9031 		      if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
9032 			{
9033 			  tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
9034 			  if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
9035 			    {
9036 			      remove = true;
9037 			      break;
9038 			    }
9039 			}
9040 		    }
9041 
9042 		  poly_offset_int offset1;
9043 		  poly_int64 bitpos1;
9044 		  tree base_ref;
9045 
9046 		  tree base
9047 		    = extract_base_bit_offset (OMP_CLAUSE_DECL (c), &base_ref,
9048 					       &bitpos1, &offset1);
9049 
9050 		  gcc_assert (base == decl);
9051 
9052 		  splay_tree_node n
9053 		    = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
9054 		  bool ptr = (OMP_CLAUSE_MAP_KIND (c)
9055 			      == GOMP_MAP_ALWAYS_POINTER);
9056 		  bool attach_detach = (OMP_CLAUSE_MAP_KIND (c)
9057 					== GOMP_MAP_ATTACH_DETACH);
9058 		  bool attach = OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ATTACH
9059 				|| OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DETACH;
9060 		  bool has_attachments = false;
9061 		  /* For OpenACC, pointers in structs should trigger an
9062 		     attach action.  */
9063 		  if (attach_detach && (region_type & ORT_ACC) != 0)
9064 		    {
9065 		      /* Turn a GOMP_MAP_ATTACH_DETACH clause into a
9066 			 GOMP_MAP_ATTACH or GOMP_MAP_DETACH clause after we
9067 			 have detected a case that needs a GOMP_MAP_STRUCT
9068 			 mapping added.  */
9069 		      gomp_map_kind k
9070 			= (code == OACC_EXIT_DATA) ? GOMP_MAP_DETACH
9071 						   : GOMP_MAP_ATTACH;
9072 		      OMP_CLAUSE_SET_MAP_KIND (c, k);
9073 		      has_attachments = true;
9074 		    }
9075 		  if (n == NULL || (n->value & GOVD_MAP) == 0)
9076 		    {
9077 		      tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9078 						 OMP_CLAUSE_MAP);
9079 		      gomp_map_kind k = attach ? GOMP_MAP_FORCE_PRESENT
9080 					       : GOMP_MAP_STRUCT;
9081 
9082 		      OMP_CLAUSE_SET_MAP_KIND (l, k);
9083 		      if (base_ref)
9084 			OMP_CLAUSE_DECL (l) = unshare_expr (base_ref);
9085 		      else
9086 			OMP_CLAUSE_DECL (l) = decl;
9087 		      OMP_CLAUSE_SIZE (l)
9088 			= (!attach
9089 			   ? size_int (1)
9090 			   : DECL_P (OMP_CLAUSE_DECL (l))
9091 			   ? DECL_SIZE_UNIT (OMP_CLAUSE_DECL (l))
9092 			   : TYPE_SIZE_UNIT (TREE_TYPE (OMP_CLAUSE_DECL (l))));
9093 		      if (struct_map_to_clause == NULL)
9094 			struct_map_to_clause = new hash_map<tree, tree>;
9095 		      struct_map_to_clause->put (decl, l);
9096 		      if (ptr || attach_detach)
9097 			{
9098 			  insert_struct_comp_map (code, c, l, *prev_list_p,
9099 						  NULL);
9100 			  *prev_list_p = l;
9101 			  prev_list_p = NULL;
9102 			}
9103 		      else
9104 			{
9105 			  OMP_CLAUSE_CHAIN (l) = c;
9106 			  *list_p = l;
9107 			  list_p = &OMP_CLAUSE_CHAIN (l);
9108 			}
9109 		      if (base_ref && code == OMP_TARGET)
9110 			{
9111 			  tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9112 						      OMP_CLAUSE_MAP);
9113 			  enum gomp_map_kind mkind
9114 			    = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
9115 			  OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
9116 			  OMP_CLAUSE_DECL (c2) = decl;
9117 			  OMP_CLAUSE_SIZE (c2) = size_zero_node;
9118 			  OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
9119 			  OMP_CLAUSE_CHAIN (l) = c2;
9120 			}
9121 		      flags = GOVD_MAP | GOVD_EXPLICIT;
9122 		      if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9123 			  || ptr
9124 			  || attach_detach)
9125 			flags |= GOVD_SEEN;
9126 		      if (has_attachments)
9127 			flags |= GOVD_MAP_HAS_ATTACHMENTS;
9128 		      goto do_add_decl;
9129 		    }
9130 		  else if (struct_map_to_clause)
9131 		    {
9132 		      tree *osc = struct_map_to_clause->get (decl);
9133 		      tree *sc = NULL, *scp = NULL;
9134 		      if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c))
9135 			  || ptr
9136 			  || attach_detach)
9137 			n->value |= GOVD_SEEN;
9138 		      sc = &OMP_CLAUSE_CHAIN (*osc);
9139 		      if (*sc != c
9140 			  && (OMP_CLAUSE_MAP_KIND (*sc)
9141 			      == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9142 			sc = &OMP_CLAUSE_CHAIN (*sc);
9143 		      /* Here "prev_list_p" is the end of the inserted
9144 			 alloc/release nodes after the struct node, OSC.  */
9145 		      for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
9146 			if ((ptr || attach_detach) && sc == prev_list_p)
9147 			  break;
9148 			else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9149 				 != COMPONENT_REF
9150 				 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9151 				     != INDIRECT_REF)
9152 				 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
9153 				     != ARRAY_REF))
9154 			  break;
9155 			else
9156 			  {
9157 			    tree sc_decl = OMP_CLAUSE_DECL (*sc);
9158 			    poly_offset_int offsetn;
9159 			    poly_int64 bitposn;
9160 			    tree base
9161 			      = extract_base_bit_offset (sc_decl, NULL,
9162 							 &bitposn, &offsetn);
9163 			    if (base != decl)
9164 			      break;
9165 			    if (scp)
9166 			      continue;
9167 			    tree d1 = OMP_CLAUSE_DECL (*sc);
9168 			    tree d2 = OMP_CLAUSE_DECL (c);
9169 			    while (TREE_CODE (d1) == ARRAY_REF)
9170 			      d1 = TREE_OPERAND (d1, 0);
9171 			    while (TREE_CODE (d2) == ARRAY_REF)
9172 			      d2 = TREE_OPERAND (d2, 0);
9173 			    if (TREE_CODE (d1) == INDIRECT_REF)
9174 			      d1 = TREE_OPERAND (d1, 0);
9175 			    if (TREE_CODE (d2) == INDIRECT_REF)
9176 			      d2 = TREE_OPERAND (d2, 0);
9177 			    while (TREE_CODE (d1) == COMPONENT_REF)
9178 			      if (TREE_CODE (d2) == COMPONENT_REF
9179 				  && TREE_OPERAND (d1, 1)
9180 				     == TREE_OPERAND (d2, 1))
9181 				{
9182 				  d1 = TREE_OPERAND (d1, 0);
9183 				  d2 = TREE_OPERAND (d2, 0);
9184 				}
9185 			      else
9186 				break;
9187 			    if (d1 == d2)
9188 			      {
9189 				error_at (OMP_CLAUSE_LOCATION (c),
9190 					  "%qE appears more than once in map "
9191 					  "clauses", OMP_CLAUSE_DECL (c));
9192 				remove = true;
9193 				break;
9194 			      }
9195 			    if (maybe_lt (offset1, offsetn)
9196 				|| (known_eq (offset1, offsetn)
9197 				    && maybe_lt (bitpos1, bitposn)))
9198 			      {
9199 				if (ptr || attach_detach)
9200 				  scp = sc;
9201 				else
9202 				  break;
9203 			      }
9204 			  }
9205 		      if (remove)
9206 			break;
9207 		      if (!attach)
9208 			OMP_CLAUSE_SIZE (*osc)
9209 			  = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
9210 					size_one_node);
9211 		      if (ptr || attach_detach)
9212 			{
9213 			  tree cl = insert_struct_comp_map (code, c, NULL,
9214 							    *prev_list_p, scp);
9215 			  if (sc == prev_list_p)
9216 			    {
9217 			      *sc = cl;
9218 			      prev_list_p = NULL;
9219 			    }
9220 			  else
9221 			    {
9222 			      *prev_list_p = OMP_CLAUSE_CHAIN (c);
9223 			      list_p = prev_list_p;
9224 			      prev_list_p = NULL;
9225 			      OMP_CLAUSE_CHAIN (c) = *sc;
9226 			      *sc = cl;
9227 			      continue;
9228 			    }
9229 			}
9230 		      else if (*sc != c)
9231 			{
9232 			  *list_p = OMP_CLAUSE_CHAIN (c);
9233 			  OMP_CLAUSE_CHAIN (c) = *sc;
9234 			  *sc = c;
9235 			  continue;
9236 			}
9237 		    }
9238 		}
9239 	      if (!remove
9240 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
9241 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ATTACH_DETACH
9242 		  && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_TO_PSET
9243 		  && OMP_CLAUSE_CHAIN (c)
9244 		  && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
9245 		  && ((OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9246 		       == GOMP_MAP_ALWAYS_POINTER)
9247 		      || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9248 			  == GOMP_MAP_ATTACH_DETACH)
9249 		      || (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
9250 			  == GOMP_MAP_TO_PSET)))
9251 		prev_list_p = list_p;
9252 
9253 	      break;
9254 	    }
9255 	  flags = GOVD_MAP | GOVD_EXPLICIT;
9256 	  if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
9257 	      || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
9258 	    flags |= GOVD_MAP_ALWAYS_TO;
9259 	  goto do_add;
9260 
9261 	case OMP_CLAUSE_DEPEND:
9262 	  if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
9263 	    {
9264 	      tree deps = OMP_CLAUSE_DECL (c);
9265 	      while (deps && TREE_CODE (deps) == TREE_LIST)
9266 		{
9267 		  if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
9268 		      && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
9269 		    gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
9270 				   pre_p, NULL, is_gimple_val, fb_rvalue);
9271 		  deps = TREE_CHAIN (deps);
9272 		}
9273 	      break;
9274 	    }
9275 	  else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
9276 	    break;
9277 	  if (handled_depend_iterators == -1)
9278 	    handled_depend_iterators = gimplify_omp_depend (list_p, pre_p);
9279 	  if (handled_depend_iterators)
9280 	    {
9281 	      if (handled_depend_iterators == 2)
9282 		remove = true;
9283 	      break;
9284 	    }
9285 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
9286 	    {
9287 	      gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
9288 			     NULL, is_gimple_val, fb_rvalue);
9289 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
9290 	    }
9291 	  if (error_operand_p (OMP_CLAUSE_DECL (c)))
9292 	    {
9293 	      remove = true;
9294 	      break;
9295 	    }
9296 	  OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
9297 	  if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
9298 			     is_gimple_val, fb_rvalue) == GS_ERROR)
9299 	    {
9300 	      remove = true;
9301 	      break;
9302 	    }
9303 	  if (code == OMP_TASK)
9304 	    ctx->has_depend = true;
9305 	  break;
9306 
9307 	case OMP_CLAUSE_TO:
9308 	case OMP_CLAUSE_FROM:
9309 	case OMP_CLAUSE__CACHE_:
9310 	  decl = OMP_CLAUSE_DECL (c);
9311 	  if (error_operand_p (decl))
9312 	    {
9313 	      remove = true;
9314 	      break;
9315 	    }
9316 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9317 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
9318 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
9319 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
9320 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
9321 	    {
9322 	      remove = true;
9323 	      break;
9324 	    }
9325 	  if (!DECL_P (decl))
9326 	    {
9327 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
9328 				 NULL, is_gimple_lvalue, fb_lvalue)
9329 		  == GS_ERROR)
9330 		{
9331 		  remove = true;
9332 		  break;
9333 		}
9334 	      break;
9335 	    }
9336 	  goto do_notice;
9337 
9338 	case OMP_CLAUSE_USE_DEVICE_PTR:
9339 	case OMP_CLAUSE_USE_DEVICE_ADDR:
9340 	  flags = GOVD_EXPLICIT;
9341 	  goto do_add;
9342 
9343 	case OMP_CLAUSE_IS_DEVICE_PTR:
9344 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
9345 	  goto do_add;
9346 
9347 	do_add:
9348 	  decl = OMP_CLAUSE_DECL (c);
9349 	do_add_decl:
9350 	  if (error_operand_p (decl))
9351 	    {
9352 	      remove = true;
9353 	      break;
9354 	    }
9355 	  if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
9356 	    {
9357 	      tree t = omp_member_access_dummy_var (decl);
9358 	      if (t)
9359 		{
9360 		  tree v = DECL_VALUE_EXPR (decl);
9361 		  DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
9362 		  if (outer_ctx)
9363 		    omp_notice_variable (outer_ctx, t, true);
9364 		}
9365 	    }
9366 	  if (code == OACC_DATA
9367 	      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9368 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9369 	    flags |= GOVD_MAP_0LEN_ARRAY;
9370 	  omp_add_variable (ctx, decl, flags);
9371 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9372 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_IN_REDUCTION
9373 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_TASK_REDUCTION)
9374 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
9375 	    {
9376 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
9377 				GOVD_LOCAL | GOVD_SEEN);
9378 	      if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
9379 		  && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
9380 				find_decl_expr,
9381 				OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9382 				NULL) == NULL_TREE)
9383 		omp_add_variable (ctx,
9384 				  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
9385 				  GOVD_LOCAL | GOVD_SEEN);
9386 	      gimplify_omp_ctxp = ctx;
9387 	      push_gimplify_context ();
9388 
9389 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
9390 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
9391 
9392 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
9393 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
9394 	      pop_gimplify_context
9395 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
9396 	      push_gimplify_context ();
9397 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
9398 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
9399 	      pop_gimplify_context
9400 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
9401 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
9402 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
9403 
9404 	      gimplify_omp_ctxp = outer_ctx;
9405 	    }
9406 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9407 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
9408 	    {
9409 	      gimplify_omp_ctxp = ctx;
9410 	      push_gimplify_context ();
9411 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
9412 		{
9413 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9414 				      NULL, NULL);
9415 		  TREE_SIDE_EFFECTS (bind) = 1;
9416 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
9417 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
9418 		}
9419 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
9420 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
9421 	      pop_gimplify_context
9422 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
9423 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
9424 
9425 	      gimplify_omp_ctxp = outer_ctx;
9426 	    }
9427 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9428 		   && OMP_CLAUSE_LINEAR_STMT (c))
9429 	    {
9430 	      gimplify_omp_ctxp = ctx;
9431 	      push_gimplify_context ();
9432 	      if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
9433 		{
9434 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
9435 				      NULL, NULL);
9436 		  TREE_SIDE_EFFECTS (bind) = 1;
9437 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
9438 		  OMP_CLAUSE_LINEAR_STMT (c) = bind;
9439 		}
9440 	      gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
9441 				&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
9442 	      pop_gimplify_context
9443 		(gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
9444 	      OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
9445 
9446 	      gimplify_omp_ctxp = outer_ctx;
9447 	    }
9448 	  if (notice_outer)
9449 	    goto do_notice;
9450 	  break;
9451 
9452 	case OMP_CLAUSE_COPYIN:
9453 	case OMP_CLAUSE_COPYPRIVATE:
9454 	  decl = OMP_CLAUSE_DECL (c);
9455 	  if (error_operand_p (decl))
9456 	    {
9457 	      remove = true;
9458 	      break;
9459 	    }
9460 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
9461 	      && !remove
9462 	      && !omp_check_private (ctx, decl, true))
9463 	    {
9464 	      remove = true;
9465 	      if (is_global_var (decl))
9466 		{
9467 		  if (DECL_THREAD_LOCAL_P (decl))
9468 		    remove = false;
9469 		  else if (DECL_HAS_VALUE_EXPR_P (decl))
9470 		    {
9471 		      tree value = get_base_address (DECL_VALUE_EXPR (decl));
9472 
9473 		      if (value
9474 			  && DECL_P (value)
9475 			  && DECL_THREAD_LOCAL_P (value))
9476 			remove = false;
9477 		    }
9478 		}
9479 	      if (remove)
9480 		error_at (OMP_CLAUSE_LOCATION (c),
9481 			  "copyprivate variable %qE is not threadprivate"
9482 			  " or private in outer context", DECL_NAME (decl));
9483 	    }
9484 	do_notice:
9485 	  if ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9486 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE
9487 	       || OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
9488 	      && outer_ctx
9489 	      && ((region_type & ORT_TASKLOOP) == ORT_TASKLOOP
9490 		   || (region_type == ORT_WORKSHARE
9491 		       && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9492 		       && (OMP_CLAUSE_REDUCTION_INSCAN (c)
9493 			   || code == OMP_LOOP)))
9494 	      && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
9495 		  || (code == OMP_LOOP
9496 		      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9497 		      && ((outer_ctx->region_type & ORT_COMBINED_TEAMS)
9498 			  == ORT_COMBINED_TEAMS))))
9499 	    {
9500 	      splay_tree_node on
9501 		= splay_tree_lookup (outer_ctx->variables,
9502 				     (splay_tree_key)decl);
9503 	      if (on == NULL || (on->value & GOVD_DATA_SHARE_CLASS) == 0)
9504 		{
9505 		  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
9506 		      && TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9507 		      && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
9508 			  || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
9509 			      && (TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
9510 				  == POINTER_TYPE))))
9511 		    omp_firstprivatize_variable (outer_ctx, decl);
9512 		  else
9513 		    {
9514 		      omp_add_variable (outer_ctx, decl,
9515 					GOVD_SEEN | GOVD_SHARED);
9516 		      if (outer_ctx->outer_context)
9517 			omp_notice_variable (outer_ctx->outer_context, decl,
9518 					     true);
9519 		    }
9520 		}
9521 	    }
9522 	  if (outer_ctx)
9523 	    omp_notice_variable (outer_ctx, decl, true);
9524 	  if (check_non_private
9525 	      && region_type == ORT_WORKSHARE
9526 	      && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
9527 		  || decl == OMP_CLAUSE_DECL (c)
9528 		  || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
9529 		      && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9530 			  == ADDR_EXPR
9531 			  || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
9532 			      == POINTER_PLUS_EXPR
9533 			      && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
9534 						(OMP_CLAUSE_DECL (c), 0), 0))
9535 				  == ADDR_EXPR)))))
9536 	      && omp_check_private (ctx, decl, false))
9537 	    {
9538 	      error ("%s variable %qE is private in outer context",
9539 		     check_non_private, DECL_NAME (decl));
9540 	      remove = true;
9541 	    }
9542 	  break;
9543 
9544 	case OMP_CLAUSE_IF:
9545 	  if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
9546 	      && OMP_CLAUSE_IF_MODIFIER (c) != code)
9547 	    {
9548 	      const char *p[2];
9549 	      for (int i = 0; i < 2; i++)
9550 		switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
9551 		  {
9552 		  case VOID_CST: p[i] = "cancel"; break;
9553 		  case OMP_PARALLEL: p[i] = "parallel"; break;
9554 		  case OMP_SIMD: p[i] = "simd"; break;
9555 		  case OMP_TASK: p[i] = "task"; break;
9556 		  case OMP_TASKLOOP: p[i] = "taskloop"; break;
9557 		  case OMP_TARGET_DATA: p[i] = "target data"; break;
9558 		  case OMP_TARGET: p[i] = "target"; break;
9559 		  case OMP_TARGET_UPDATE: p[i] = "target update"; break;
9560 		  case OMP_TARGET_ENTER_DATA:
9561 		    p[i] = "target enter data"; break;
9562 		  case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
9563 		  default: gcc_unreachable ();
9564 		  }
9565 	      error_at (OMP_CLAUSE_LOCATION (c),
9566 			"expected %qs %<if%> clause modifier rather than %qs",
9567 			p[0], p[1]);
9568 	      remove = true;
9569 	    }
9570 	  /* Fall through.  */
9571 
9572 	case OMP_CLAUSE_FINAL:
9573 	  OMP_CLAUSE_OPERAND (c, 0)
9574 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
9575 	  /* Fall through.  */
9576 
9577 	case OMP_CLAUSE_SCHEDULE:
9578 	case OMP_CLAUSE_NUM_THREADS:
9579 	case OMP_CLAUSE_NUM_TEAMS:
9580 	case OMP_CLAUSE_THREAD_LIMIT:
9581 	case OMP_CLAUSE_DIST_SCHEDULE:
9582 	case OMP_CLAUSE_DEVICE:
9583 	case OMP_CLAUSE_PRIORITY:
9584 	case OMP_CLAUSE_GRAINSIZE:
9585 	case OMP_CLAUSE_NUM_TASKS:
9586 	case OMP_CLAUSE_HINT:
9587 	case OMP_CLAUSE_ASYNC:
9588 	case OMP_CLAUSE_WAIT:
9589 	case OMP_CLAUSE_NUM_GANGS:
9590 	case OMP_CLAUSE_NUM_WORKERS:
9591 	case OMP_CLAUSE_VECTOR_LENGTH:
9592 	case OMP_CLAUSE_WORKER:
9593 	case OMP_CLAUSE_VECTOR:
9594 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9595 			     is_gimple_val, fb_rvalue) == GS_ERROR)
9596 	    remove = true;
9597 	  break;
9598 
9599 	case OMP_CLAUSE_GANG:
9600 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
9601 			     is_gimple_val, fb_rvalue) == GS_ERROR)
9602 	    remove = true;
9603 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
9604 			     is_gimple_val, fb_rvalue) == GS_ERROR)
9605 	    remove = true;
9606 	  break;
9607 
9608 	case OMP_CLAUSE_NOWAIT:
9609 	  nowait = 1;
9610 	  break;
9611 
9612 	case OMP_CLAUSE_ORDERED:
9613 	case OMP_CLAUSE_UNTIED:
9614 	case OMP_CLAUSE_COLLAPSE:
9615 	case OMP_CLAUSE_TILE:
9616 	case OMP_CLAUSE_AUTO:
9617 	case OMP_CLAUSE_SEQ:
9618 	case OMP_CLAUSE_INDEPENDENT:
9619 	case OMP_CLAUSE_MERGEABLE:
9620 	case OMP_CLAUSE_PROC_BIND:
9621 	case OMP_CLAUSE_SAFELEN:
9622 	case OMP_CLAUSE_SIMDLEN:
9623 	case OMP_CLAUSE_NOGROUP:
9624 	case OMP_CLAUSE_THREADS:
9625 	case OMP_CLAUSE_SIMD:
9626 	case OMP_CLAUSE_BIND:
9627 	case OMP_CLAUSE_IF_PRESENT:
9628 	case OMP_CLAUSE_FINALIZE:
9629 	  break;
9630 
9631 	case OMP_CLAUSE_ORDER:
9632 	  ctx->order_concurrent = true;
9633 	  break;
9634 
9635 	case OMP_CLAUSE_DEFAULTMAP:
9636 	  enum gimplify_defaultmap_kind gdmkmin, gdmkmax;
9637 	  switch (OMP_CLAUSE_DEFAULTMAP_CATEGORY (c))
9638 	    {
9639 	    case OMP_CLAUSE_DEFAULTMAP_CATEGORY_UNSPECIFIED:
9640 	      gdmkmin = GDMK_SCALAR;
9641 	      gdmkmax = GDMK_POINTER;
9642 	      break;
9643 	    case OMP_CLAUSE_DEFAULTMAP_CATEGORY_SCALAR:
9644 	      gdmkmin = gdmkmax = GDMK_SCALAR;
9645 	      break;
9646 	    case OMP_CLAUSE_DEFAULTMAP_CATEGORY_AGGREGATE:
9647 	      gdmkmin = gdmkmax = GDMK_AGGREGATE;
9648 	      break;
9649 	    case OMP_CLAUSE_DEFAULTMAP_CATEGORY_ALLOCATABLE:
9650 	      gdmkmin = gdmkmax = GDMK_ALLOCATABLE;
9651 	      break;
9652 	    case OMP_CLAUSE_DEFAULTMAP_CATEGORY_POINTER:
9653 	      gdmkmin = gdmkmax = GDMK_POINTER;
9654 	      break;
9655 	    default:
9656 	      gcc_unreachable ();
9657 	    }
9658 	  for (int gdmk = gdmkmin; gdmk <= gdmkmax; gdmk++)
9659 	    switch (OMP_CLAUSE_DEFAULTMAP_BEHAVIOR (c))
9660 	      {
9661 	      case OMP_CLAUSE_DEFAULTMAP_ALLOC:
9662 		ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_ALLOC_ONLY;
9663 		break;
9664 	      case OMP_CLAUSE_DEFAULTMAP_TO:
9665 		ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_TO_ONLY;
9666 		break;
9667 	      case OMP_CLAUSE_DEFAULTMAP_FROM:
9668 		ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_FROM_ONLY;
9669 		break;
9670 	      case OMP_CLAUSE_DEFAULTMAP_TOFROM:
9671 		ctx->defaultmap[gdmk] = GOVD_MAP;
9672 		break;
9673 	      case OMP_CLAUSE_DEFAULTMAP_FIRSTPRIVATE:
9674 		ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9675 		break;
9676 	      case OMP_CLAUSE_DEFAULTMAP_NONE:
9677 		ctx->defaultmap[gdmk] = 0;
9678 		break;
9679 	      case OMP_CLAUSE_DEFAULTMAP_DEFAULT:
9680 		switch (gdmk)
9681 		  {
9682 		  case GDMK_SCALAR:
9683 		    ctx->defaultmap[gdmk] = GOVD_FIRSTPRIVATE;
9684 		    break;
9685 		  case GDMK_AGGREGATE:
9686 		  case GDMK_ALLOCATABLE:
9687 		    ctx->defaultmap[gdmk] = GOVD_MAP;
9688 		    break;
9689 		  case GDMK_POINTER:
9690 		    ctx->defaultmap[gdmk] = GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
9691 		    break;
9692 		  default:
9693 		    gcc_unreachable ();
9694 		  }
9695 		break;
9696 	      default:
9697 		gcc_unreachable ();
9698 	      }
9699 	  break;
9700 
9701 	case OMP_CLAUSE_ALIGNED:
9702 	  decl = OMP_CLAUSE_DECL (c);
9703 	  if (error_operand_p (decl))
9704 	    {
9705 	      remove = true;
9706 	      break;
9707 	    }
9708 	  if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
9709 			     is_gimple_val, fb_rvalue) == GS_ERROR)
9710 	    {
9711 	      remove = true;
9712 	      break;
9713 	    }
9714 	  if (!is_global_var (decl)
9715 	      && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9716 	    omp_add_variable (ctx, decl, GOVD_ALIGNED);
9717 	  break;
9718 
9719 	case OMP_CLAUSE_NONTEMPORAL:
9720 	  decl = OMP_CLAUSE_DECL (c);
9721 	  if (error_operand_p (decl))
9722 	    {
9723 	      remove = true;
9724 	      break;
9725 	    }
9726 	  omp_add_variable (ctx, decl, GOVD_NONTEMPORAL);
9727 	  break;
9728 
9729 	case OMP_CLAUSE_DEFAULT:
9730 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
9731 	  break;
9732 
9733 	case OMP_CLAUSE_INCLUSIVE:
9734 	case OMP_CLAUSE_EXCLUSIVE:
9735 	  decl = OMP_CLAUSE_DECL (c);
9736 	  {
9737 	    splay_tree_node n = splay_tree_lookup (outer_ctx->variables,
9738 						   (splay_tree_key) decl);
9739 	    if (n == NULL || (n->value & GOVD_REDUCTION) == 0)
9740 	      {
9741 		error_at (OMP_CLAUSE_LOCATION (c),
9742 			  "%qD specified in %qs clause but not in %<inscan%> "
9743 			  "%<reduction%> clause on the containing construct",
9744 			  decl, omp_clause_code_name[OMP_CLAUSE_CODE (c)]);
9745 		remove = true;
9746 	      }
9747 	    else
9748 	      {
9749 		n->value |= GOVD_REDUCTION_INSCAN;
9750 		if (outer_ctx->region_type == ORT_SIMD
9751 		    && outer_ctx->outer_context
9752 		    && outer_ctx->outer_context->region_type == ORT_WORKSHARE)
9753 		  {
9754 		    n = splay_tree_lookup (outer_ctx->outer_context->variables,
9755 					   (splay_tree_key) decl);
9756 		    if (n && (n->value & GOVD_REDUCTION) != 0)
9757 		      n->value |= GOVD_REDUCTION_INSCAN;
9758 		  }
9759 	      }
9760 	  }
9761 	  break;
9762 
9763 	default:
9764 	  gcc_unreachable ();
9765 	}
9766 
9767       if (code == OACC_DATA
9768 	  && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
9769 	  && (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
9770 	      || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9771 	remove = true;
9772       if (remove)
9773 	*list_p = OMP_CLAUSE_CHAIN (c);
9774       else
9775 	list_p = &OMP_CLAUSE_CHAIN (c);
9776     }
9777 
9778   ctx->clauses = *orig_list_p;
9779   gimplify_omp_ctxp = ctx;
9780   if (struct_map_to_clause)
9781     delete struct_map_to_clause;
9782   if (struct_deref_set)
9783     delete struct_deref_set;
9784 }
9785 
9786 /* Return true if DECL is a candidate for shared to firstprivate
9787    optimization.  We only consider non-addressable scalars, not
9788    too big, and not references.  */
9789 
9790 static bool
omp_shared_to_firstprivate_optimizable_decl_p(tree decl)9791 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
9792 {
9793   if (TREE_ADDRESSABLE (decl))
9794     return false;
9795   tree type = TREE_TYPE (decl);
9796   if (!is_gimple_reg_type (type)
9797       || TREE_CODE (type) == REFERENCE_TYPE
9798       || TREE_ADDRESSABLE (type))
9799     return false;
9800   /* Don't optimize too large decls, as each thread/task will have
9801      its own.  */
9802   HOST_WIDE_INT len = int_size_in_bytes (type);
9803   if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
9804     return false;
9805   if (lang_hooks.decls.omp_privatize_by_reference (decl))
9806     return false;
9807   return true;
9808 }
9809 
9810 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
9811    For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
9812    GOVD_WRITTEN in outer contexts.  */
9813 
9814 static void
omp_mark_stores(struct gimplify_omp_ctx * ctx,tree decl)9815 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
9816 {
9817   for (; ctx; ctx = ctx->outer_context)
9818     {
9819       splay_tree_node n = splay_tree_lookup (ctx->variables,
9820 					     (splay_tree_key) decl);
9821       if (n == NULL)
9822 	continue;
9823       else if (n->value & GOVD_SHARED)
9824 	{
9825 	  n->value |= GOVD_WRITTEN;
9826 	  return;
9827 	}
9828       else if (n->value & GOVD_DATA_SHARE_CLASS)
9829 	return;
9830     }
9831 }
9832 
9833 /* Helper callback for walk_gimple_seq to discover possible stores
9834    to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9835    GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9836    for those.  */
9837 
9838 static tree
omp_find_stores_op(tree * tp,int * walk_subtrees,void * data)9839 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
9840 {
9841   struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
9842 
9843   *walk_subtrees = 0;
9844   if (!wi->is_lhs)
9845     return NULL_TREE;
9846 
9847   tree op = *tp;
9848   do
9849     {
9850       if (handled_component_p (op))
9851 	op = TREE_OPERAND (op, 0);
9852       else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
9853 	       && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
9854 	op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
9855       else
9856 	break;
9857     }
9858   while (1);
9859   if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
9860     return NULL_TREE;
9861 
9862   omp_mark_stores (gimplify_omp_ctxp, op);
9863   return NULL_TREE;
9864 }
9865 
9866 /* Helper callback for walk_gimple_seq to discover possible stores
9867    to omp_shared_to_firstprivate_optimizable_decl_p decls and set
9868    GOVD_WRITTEN if they are GOVD_SHARED in some outer context
9869    for those.  */
9870 
9871 static tree
omp_find_stores_stmt(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)9872 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
9873 		      bool *handled_ops_p,
9874 		      struct walk_stmt_info *wi)
9875 {
9876   gimple *stmt = gsi_stmt (*gsi_p);
9877   switch (gimple_code (stmt))
9878     {
9879     /* Don't recurse on OpenMP constructs for which
9880        gimplify_adjust_omp_clauses already handled the bodies,
9881        except handle gimple_omp_for_pre_body.  */
9882     case GIMPLE_OMP_FOR:
9883       *handled_ops_p = true;
9884       if (gimple_omp_for_pre_body (stmt))
9885 	walk_gimple_seq (gimple_omp_for_pre_body (stmt),
9886 			 omp_find_stores_stmt, omp_find_stores_op, wi);
9887       break;
9888     case GIMPLE_OMP_PARALLEL:
9889     case GIMPLE_OMP_TASK:
9890     case GIMPLE_OMP_SECTIONS:
9891     case GIMPLE_OMP_SINGLE:
9892     case GIMPLE_OMP_TARGET:
9893     case GIMPLE_OMP_TEAMS:
9894     case GIMPLE_OMP_CRITICAL:
9895       *handled_ops_p = true;
9896       break;
9897     default:
9898       break;
9899     }
9900   return NULL_TREE;
9901 }
9902 
9903 struct gimplify_adjust_omp_clauses_data
9904 {
9905   tree *list_p;
9906   gimple_seq *pre_p;
9907 };
9908 
9909 /* For all variables that were not actually used within the context,
9910    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
9911 
9912 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)9913 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
9914 {
9915   tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
9916   gimple_seq *pre_p
9917     = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
9918   tree decl = (tree) n->key;
9919   unsigned flags = n->value;
9920   enum omp_clause_code code;
9921   tree clause;
9922   bool private_debug;
9923 
9924   if (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
9925       && (flags & GOVD_LASTPRIVATE_CONDITIONAL) != 0)
9926     flags = GOVD_SHARED | GOVD_SEEN | GOVD_WRITTEN;
9927   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
9928     return 0;
9929   if ((flags & GOVD_SEEN) == 0)
9930     return 0;
9931   if ((flags & GOVD_MAP_HAS_ATTACHMENTS) != 0)
9932     return 0;
9933   if (flags & GOVD_DEBUG_PRIVATE)
9934     {
9935       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
9936       private_debug = true;
9937     }
9938   else if (flags & GOVD_MAP)
9939     private_debug = false;
9940   else
9941     private_debug
9942       = lang_hooks.decls.omp_private_debug_clause (decl,
9943 						   !!(flags & GOVD_SHARED));
9944   if (private_debug)
9945     code = OMP_CLAUSE_PRIVATE;
9946   else if (flags & GOVD_MAP)
9947     {
9948       code = OMP_CLAUSE_MAP;
9949       if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
9950 	  && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
9951 	{
9952 	  error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
9953 	  return 0;
9954 	}
9955       if (VAR_P (decl)
9956 	  && DECL_IN_CONSTANT_POOL (decl)
9957           && !lookup_attribute ("omp declare target",
9958 				DECL_ATTRIBUTES (decl)))
9959 	{
9960 	  tree id = get_identifier ("omp declare target");
9961 	  DECL_ATTRIBUTES (decl)
9962 	    = tree_cons (id, NULL_TREE, DECL_ATTRIBUTES (decl));
9963 	  varpool_node *node = varpool_node::get (decl);
9964 	  if (node)
9965 	    {
9966 	      node->offloadable = 1;
9967 	      if (ENABLE_OFFLOADING)
9968 		g->have_offload = true;
9969 	    }
9970 	}
9971     }
9972   else if (flags & GOVD_SHARED)
9973     {
9974       if (is_global_var (decl))
9975 	{
9976 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
9977 	  while (ctx != NULL)
9978 	    {
9979 	      splay_tree_node on
9980 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9981 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
9982 				      | GOVD_PRIVATE | GOVD_REDUCTION
9983 				      | GOVD_LINEAR | GOVD_MAP)) != 0)
9984 		break;
9985 	      ctx = ctx->outer_context;
9986 	    }
9987 	  if (ctx == NULL)
9988 	    return 0;
9989 	}
9990       code = OMP_CLAUSE_SHARED;
9991       /* Don't optimize shared into firstprivate for read-only vars
9992 	 on tasks with depend clause, we shouldn't try to copy them
9993 	 until the dependencies are satisfied.  */
9994       if (gimplify_omp_ctxp->has_depend)
9995 	flags |= GOVD_WRITTEN;
9996     }
9997   else if (flags & GOVD_PRIVATE)
9998     code = OMP_CLAUSE_PRIVATE;
9999   else if (flags & GOVD_FIRSTPRIVATE)
10000     {
10001       code = OMP_CLAUSE_FIRSTPRIVATE;
10002       if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
10003 	  && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
10004 	  && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
10005 	{
10006 	  error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
10007 		 "%<target%> construct", decl);
10008 	  return 0;
10009 	}
10010     }
10011   else if (flags & GOVD_LASTPRIVATE)
10012     code = OMP_CLAUSE_LASTPRIVATE;
10013   else if (flags & (GOVD_ALIGNED | GOVD_NONTEMPORAL))
10014     return 0;
10015   else if (flags & GOVD_CONDTEMP)
10016     {
10017       code = OMP_CLAUSE__CONDTEMP_;
10018       gimple_add_tmp_var (decl);
10019     }
10020   else
10021     gcc_unreachable ();
10022 
10023   if (((flags & GOVD_LASTPRIVATE)
10024        || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
10025       && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10026     omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10027 
10028   tree chain = *list_p;
10029   clause = build_omp_clause (input_location, code);
10030   OMP_CLAUSE_DECL (clause) = decl;
10031   OMP_CLAUSE_CHAIN (clause) = chain;
10032   if (private_debug)
10033     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
10034   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
10035     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
10036   else if (code == OMP_CLAUSE_SHARED
10037 	   && (flags & GOVD_WRITTEN) == 0
10038 	   && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10039     OMP_CLAUSE_SHARED_READONLY (clause) = 1;
10040   else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
10041     OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
10042   else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
10043     {
10044       tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
10045       OMP_CLAUSE_DECL (nc) = decl;
10046       if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
10047 	  && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
10048 	OMP_CLAUSE_DECL (clause)
10049 	  = build_simple_mem_ref_loc (input_location, decl);
10050       OMP_CLAUSE_DECL (clause)
10051 	= build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
10052 		  build_int_cst (build_pointer_type (char_type_node), 0));
10053       OMP_CLAUSE_SIZE (clause) = size_zero_node;
10054       OMP_CLAUSE_SIZE (nc) = size_zero_node;
10055       OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
10056       OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
10057       OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10058       OMP_CLAUSE_CHAIN (nc) = chain;
10059       OMP_CLAUSE_CHAIN (clause) = nc;
10060       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10061       gimplify_omp_ctxp = ctx->outer_context;
10062       gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
10063 		     pre_p, NULL, is_gimple_val, fb_rvalue);
10064       gimplify_omp_ctxp = ctx;
10065     }
10066   else if (code == OMP_CLAUSE_MAP)
10067     {
10068       int kind;
10069       /* Not all combinations of these GOVD_MAP flags are actually valid.  */
10070       switch (flags & (GOVD_MAP_TO_ONLY
10071 		       | GOVD_MAP_FORCE
10072 		       | GOVD_MAP_FORCE_PRESENT
10073 		       | GOVD_MAP_ALLOC_ONLY
10074 		       | GOVD_MAP_FROM_ONLY))
10075 	{
10076 	case 0:
10077 	  kind = GOMP_MAP_TOFROM;
10078 	  break;
10079 	case GOVD_MAP_FORCE:
10080 	  kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
10081 	  break;
10082 	case GOVD_MAP_TO_ONLY:
10083 	  kind = GOMP_MAP_TO;
10084 	  break;
10085 	case GOVD_MAP_FROM_ONLY:
10086 	  kind = GOMP_MAP_FROM;
10087 	  break;
10088 	case GOVD_MAP_ALLOC_ONLY:
10089 	  kind = GOMP_MAP_ALLOC;
10090 	  break;
10091 	case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
10092 	  kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
10093 	  break;
10094 	case GOVD_MAP_FORCE_PRESENT:
10095 	  kind = GOMP_MAP_FORCE_PRESENT;
10096 	  break;
10097 	default:
10098 	  gcc_unreachable ();
10099 	}
10100       OMP_CLAUSE_SET_MAP_KIND (clause, kind);
10101       if (DECL_SIZE (decl)
10102 	  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10103 	{
10104 	  tree decl2 = DECL_VALUE_EXPR (decl);
10105 	  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10106 	  decl2 = TREE_OPERAND (decl2, 0);
10107 	  gcc_assert (DECL_P (decl2));
10108 	  tree mem = build_simple_mem_ref (decl2);
10109 	  OMP_CLAUSE_DECL (clause) = mem;
10110 	  OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10111 	  if (gimplify_omp_ctxp->outer_context)
10112 	    {
10113 	      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
10114 	      omp_notice_variable (ctx, decl2, true);
10115 	      omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
10116 	    }
10117 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10118 				      OMP_CLAUSE_MAP);
10119 	  OMP_CLAUSE_DECL (nc) = decl;
10120 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
10121 	  if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
10122 	    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
10123 	  else
10124 	    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10125 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10126 	  OMP_CLAUSE_CHAIN (clause) = nc;
10127 	}
10128       else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
10129 	       && lang_hooks.decls.omp_privatize_by_reference (decl))
10130 	{
10131 	  OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
10132 	  OMP_CLAUSE_SIZE (clause)
10133 	    = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
10134 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10135 	  gimplify_omp_ctxp = ctx->outer_context;
10136 	  gimplify_expr (&OMP_CLAUSE_SIZE (clause),
10137 			 pre_p, NULL, is_gimple_val, fb_rvalue);
10138 	  gimplify_omp_ctxp = ctx;
10139 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
10140 				      OMP_CLAUSE_MAP);
10141 	  OMP_CLAUSE_DECL (nc) = decl;
10142 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
10143 	  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
10144 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
10145 	  OMP_CLAUSE_CHAIN (clause) = nc;
10146 	}
10147       else
10148 	OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
10149     }
10150   if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
10151     {
10152       tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
10153       OMP_CLAUSE_DECL (nc) = decl;
10154       OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
10155       OMP_CLAUSE_CHAIN (nc) = chain;
10156       OMP_CLAUSE_CHAIN (clause) = nc;
10157       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10158       gimplify_omp_ctxp = ctx->outer_context;
10159       lang_hooks.decls.omp_finish_clause (nc, pre_p);
10160       gimplify_omp_ctxp = ctx;
10161     }
10162   *list_p = clause;
10163   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10164   gimplify_omp_ctxp = ctx->outer_context;
10165   lang_hooks.decls.omp_finish_clause (clause, pre_p);
10166   if (gimplify_omp_ctxp)
10167     for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
10168       if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
10169 	  && DECL_P (OMP_CLAUSE_SIZE (clause)))
10170 	omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
10171 			     true);
10172   gimplify_omp_ctxp = ctx;
10173   return 0;
10174 }
10175 
10176 static void
gimplify_adjust_omp_clauses(gimple_seq * pre_p,gimple_seq body,tree * list_p,enum tree_code code)10177 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
10178 			     enum tree_code code)
10179 {
10180   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10181   tree *orig_list_p = list_p;
10182   tree c, decl;
10183   bool has_inscan_reductions = false;
10184 
10185   if (body)
10186     {
10187       struct gimplify_omp_ctx *octx;
10188       for (octx = ctx; octx; octx = octx->outer_context)
10189 	if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
10190 	  break;
10191       if (octx)
10192 	{
10193 	  struct walk_stmt_info wi;
10194 	  memset (&wi, 0, sizeof (wi));
10195 	  walk_gimple_seq (body, omp_find_stores_stmt,
10196 			   omp_find_stores_op, &wi);
10197 	}
10198     }
10199 
10200   if (ctx->add_safelen1)
10201     {
10202       /* If there are VLAs in the body of simd loop, prevent
10203 	 vectorization.  */
10204       gcc_assert (ctx->region_type == ORT_SIMD);
10205       c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
10206       OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
10207       OMP_CLAUSE_CHAIN (c) = *list_p;
10208       *list_p = c;
10209       list_p = &OMP_CLAUSE_CHAIN (c);
10210     }
10211 
10212   if (ctx->region_type == ORT_WORKSHARE
10213       && ctx->outer_context
10214       && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL)
10215     {
10216       for (c = ctx->outer_context->clauses; c; c = OMP_CLAUSE_CHAIN (c))
10217 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10218 	    && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
10219 	  {
10220 	    decl = OMP_CLAUSE_DECL (c);
10221 	    splay_tree_node n
10222 	      = splay_tree_lookup (ctx->outer_context->variables,
10223 				   (splay_tree_key) decl);
10224 	    gcc_checking_assert (!splay_tree_lookup (ctx->variables,
10225 						     (splay_tree_key) decl));
10226 	    omp_add_variable (ctx, decl, n->value);
10227 	    tree c2 = copy_node (c);
10228 	    OMP_CLAUSE_CHAIN (c2) = *list_p;
10229 	    *list_p = c2;
10230 	    if ((n->value & GOVD_FIRSTPRIVATE) == 0)
10231 	      continue;
10232 	    c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10233 				   OMP_CLAUSE_FIRSTPRIVATE);
10234 	    OMP_CLAUSE_DECL (c2) = decl;
10235 	    OMP_CLAUSE_CHAIN (c2) = *list_p;
10236 	    *list_p = c2;
10237 	  }
10238     }
10239   while ((c = *list_p) != NULL)
10240     {
10241       splay_tree_node n;
10242       bool remove = false;
10243 
10244       switch (OMP_CLAUSE_CODE (c))
10245 	{
10246 	case OMP_CLAUSE_FIRSTPRIVATE:
10247 	  if ((ctx->region_type & ORT_TARGET)
10248 	      && (ctx->region_type & ORT_ACC) == 0
10249 	      && TYPE_ATOMIC (strip_array_types
10250 					(TREE_TYPE (OMP_CLAUSE_DECL (c)))))
10251 	    {
10252 	      error_at (OMP_CLAUSE_LOCATION (c),
10253 			"%<_Atomic%> %qD in %<firstprivate%> clause on "
10254 			"%<target%> construct", OMP_CLAUSE_DECL (c));
10255 	      remove = true;
10256 	      break;
10257 	    }
10258 	  /* FALLTHRU */
10259 	case OMP_CLAUSE_PRIVATE:
10260 	case OMP_CLAUSE_SHARED:
10261 	case OMP_CLAUSE_LINEAR:
10262 	  decl = OMP_CLAUSE_DECL (c);
10263 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10264 	  remove = !(n->value & GOVD_SEEN);
10265 	  if ((n->value & GOVD_LASTPRIVATE_CONDITIONAL) != 0
10266 	      && code == OMP_PARALLEL
10267 	      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_FIRSTPRIVATE)
10268 	    remove = true;
10269 	  if (! remove)
10270 	    {
10271 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
10272 	      if ((n->value & GOVD_DEBUG_PRIVATE)
10273 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
10274 		{
10275 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
10276 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
10277 				  == GOVD_SHARED));
10278 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
10279 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
10280 		}
10281               if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10282 		  && ctx->has_depend
10283 		  && DECL_P (decl))
10284 		n->value |= GOVD_WRITTEN;
10285 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10286 		  && (n->value & GOVD_WRITTEN) == 0
10287 		  && DECL_P (decl)
10288 		  && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10289 		OMP_CLAUSE_SHARED_READONLY (c) = 1;
10290 	      else if (DECL_P (decl)
10291 		       && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
10292 			    && (n->value & GOVD_WRITTEN) != 0)
10293 			   || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10294 			       && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
10295 		       && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10296 		omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10297 	    }
10298 	  break;
10299 
10300 	case OMP_CLAUSE_LASTPRIVATE:
10301 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
10302 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
10303 	  decl = OMP_CLAUSE_DECL (c);
10304 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10305 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
10306 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
10307 	  if (code == OMP_DISTRIBUTE
10308 	      && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10309 	    {
10310 	      remove = true;
10311 	      error_at (OMP_CLAUSE_LOCATION (c),
10312 			"same variable used in %<firstprivate%> and "
10313 			"%<lastprivate%> clauses on %<distribute%> "
10314 			"construct");
10315 	    }
10316 	  if (!remove
10317 	      && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10318 	      && DECL_P (decl)
10319 	      && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10320 	    omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10321 	  if (OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c) && code == OMP_PARALLEL)
10322 	    remove = true;
10323 	  break;
10324 
10325 	case OMP_CLAUSE_ALIGNED:
10326 	  decl = OMP_CLAUSE_DECL (c);
10327 	  if (!is_global_var (decl))
10328 	    {
10329 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10330 	      remove = n == NULL || !(n->value & GOVD_SEEN);
10331 	      if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
10332 		{
10333 		  struct gimplify_omp_ctx *octx;
10334 		  if (n != NULL
10335 		      && (n->value & (GOVD_DATA_SHARE_CLASS
10336 				      & ~GOVD_FIRSTPRIVATE)))
10337 		    remove = true;
10338 		  else
10339 		    for (octx = ctx->outer_context; octx;
10340 			 octx = octx->outer_context)
10341 		      {
10342 			n = splay_tree_lookup (octx->variables,
10343 					       (splay_tree_key) decl);
10344 			if (n == NULL)
10345 			  continue;
10346 			if (n->value & GOVD_LOCAL)
10347 			  break;
10348 			/* We have to avoid assigning a shared variable
10349 			   to itself when trying to add
10350 			   __builtin_assume_aligned.  */
10351 			if (n->value & GOVD_SHARED)
10352 			  {
10353 			    remove = true;
10354 			    break;
10355 			  }
10356 		      }
10357 		}
10358 	    }
10359 	  else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
10360 	    {
10361 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10362 	      if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
10363 		remove = true;
10364 	    }
10365 	  break;
10366 
10367 	case OMP_CLAUSE_NONTEMPORAL:
10368 	  decl = OMP_CLAUSE_DECL (c);
10369 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10370 	  remove = n == NULL || !(n->value & GOVD_SEEN);
10371 	  break;
10372 
10373 	case OMP_CLAUSE_MAP:
10374 	  if (code == OMP_TARGET_EXIT_DATA
10375 	      && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
10376 	    {
10377 	      remove = true;
10378 	      break;
10379 	    }
10380 	  decl = OMP_CLAUSE_DECL (c);
10381 	  /* Data clauses associated with reductions must be
10382 	     compatible with present_or_copy.  Warn and adjust the clause
10383 	     if that is not the case.  */
10384 	  if (ctx->region_type == ORT_ACC_PARALLEL
10385 	      || ctx->region_type == ORT_ACC_SERIAL)
10386 	    {
10387 	      tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
10388 	      n = NULL;
10389 
10390 	      if (DECL_P (t))
10391 		n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
10392 
10393 	      if (n && (n->value & GOVD_REDUCTION))
10394 		{
10395 		  enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
10396 
10397 		  OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
10398 		  if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
10399 		      && kind != GOMP_MAP_FORCE_PRESENT
10400 		      && kind != GOMP_MAP_POINTER)
10401 		    {
10402 		      warning_at (OMP_CLAUSE_LOCATION (c), 0,
10403 				  "incompatible data clause with reduction "
10404 				  "on %qE; promoting to %<present_or_copy%>",
10405 				  DECL_NAME (t));
10406 		      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
10407 		    }
10408 		}
10409 	    }
10410 	  if (!DECL_P (decl))
10411 	    {
10412 	      if ((ctx->region_type & ORT_TARGET) != 0
10413 		  && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
10414 		{
10415 		  if (TREE_CODE (decl) == INDIRECT_REF
10416 		      && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
10417 		      && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
10418 			  == REFERENCE_TYPE))
10419 		    decl = TREE_OPERAND (decl, 0);
10420 		  if (TREE_CODE (decl) == COMPONENT_REF)
10421 		    {
10422 		      while (TREE_CODE (decl) == COMPONENT_REF)
10423 			decl = TREE_OPERAND (decl, 0);
10424 		      if (DECL_P (decl))
10425 			{
10426 			  n = splay_tree_lookup (ctx->variables,
10427 						 (splay_tree_key) decl);
10428 			  if (!(n->value & GOVD_SEEN))
10429 			    remove = true;
10430 			}
10431 		    }
10432 		}
10433 	      break;
10434 	    }
10435 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10436 	  if ((ctx->region_type & ORT_TARGET) != 0
10437 	      && !(n->value & GOVD_SEEN)
10438 	      && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
10439 	      && (!is_global_var (decl)
10440 		  || !lookup_attribute ("omp declare target link",
10441 					DECL_ATTRIBUTES (decl))))
10442 	    {
10443 	      remove = true;
10444 	      /* For struct element mapping, if struct is never referenced
10445 		 in target block and none of the mapping has always modifier,
10446 		 remove all the struct element mappings, which immediately
10447 		 follow the GOMP_MAP_STRUCT map clause.  */
10448 	      if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
10449 		{
10450 		  HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
10451 		  while (cnt--)
10452 		    OMP_CLAUSE_CHAIN (c)
10453 		      = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
10454 		}
10455 	    }
10456 	  else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
10457 		   && (code == OMP_TARGET_EXIT_DATA
10458 		       || code == OACC_EXIT_DATA))
10459 	    remove = true;
10460 	  else if (DECL_SIZE (decl)
10461 		   && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
10462 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
10463 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
10464 		   && (OMP_CLAUSE_MAP_KIND (c)
10465 		       != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
10466 	    {
10467 	      /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
10468 		 for these, TREE_CODE (DECL_SIZE (decl)) will always be
10469 		 INTEGER_CST.  */
10470 	      gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
10471 
10472 	      tree decl2 = DECL_VALUE_EXPR (decl);
10473 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10474 	      decl2 = TREE_OPERAND (decl2, 0);
10475 	      gcc_assert (DECL_P (decl2));
10476 	      tree mem = build_simple_mem_ref (decl2);
10477 	      OMP_CLAUSE_DECL (c) = mem;
10478 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10479 	      if (ctx->outer_context)
10480 		{
10481 		  omp_notice_variable (ctx->outer_context, decl2, true);
10482 		  omp_notice_variable (ctx->outer_context,
10483 				       OMP_CLAUSE_SIZE (c), true);
10484 		}
10485 	      if (((ctx->region_type & ORT_TARGET) != 0
10486 		   || !ctx->target_firstprivatize_array_bases)
10487 		  && ((n->value & GOVD_SEEN) == 0
10488 		      || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
10489 		{
10490 		  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10491 					      OMP_CLAUSE_MAP);
10492 		  OMP_CLAUSE_DECL (nc) = decl;
10493 		  OMP_CLAUSE_SIZE (nc) = size_zero_node;
10494 		  if (ctx->target_firstprivatize_array_bases)
10495 		    OMP_CLAUSE_SET_MAP_KIND (nc,
10496 					     GOMP_MAP_FIRSTPRIVATE_POINTER);
10497 		  else
10498 		    OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
10499 		  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
10500 		  OMP_CLAUSE_CHAIN (c) = nc;
10501 		  c = nc;
10502 		}
10503 	    }
10504 	  else
10505 	    {
10506 	      if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10507 		OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10508 	      gcc_assert ((n->value & GOVD_SEEN) == 0
10509 			  || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10510 			      == 0));
10511 	    }
10512 	  break;
10513 
10514 	case OMP_CLAUSE_TO:
10515 	case OMP_CLAUSE_FROM:
10516 	case OMP_CLAUSE__CACHE_:
10517 	  decl = OMP_CLAUSE_DECL (c);
10518 	  if (!DECL_P (decl))
10519 	    break;
10520 	  if (DECL_SIZE (decl)
10521 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
10522 	    {
10523 	      tree decl2 = DECL_VALUE_EXPR (decl);
10524 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
10525 	      decl2 = TREE_OPERAND (decl2, 0);
10526 	      gcc_assert (DECL_P (decl2));
10527 	      tree mem = build_simple_mem_ref (decl2);
10528 	      OMP_CLAUSE_DECL (c) = mem;
10529 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
10530 	      if (ctx->outer_context)
10531 		{
10532 		  omp_notice_variable (ctx->outer_context, decl2, true);
10533 		  omp_notice_variable (ctx->outer_context,
10534 				       OMP_CLAUSE_SIZE (c), true);
10535 		}
10536 	    }
10537 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
10538 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
10539 	  break;
10540 
10541 	case OMP_CLAUSE_REDUCTION:
10542 	  if (OMP_CLAUSE_REDUCTION_INSCAN (c))
10543 	    {
10544 	      decl = OMP_CLAUSE_DECL (c);
10545 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10546 	      if ((n->value & GOVD_REDUCTION_INSCAN) == 0)
10547 		{
10548 		  remove = true;
10549 		  error_at (OMP_CLAUSE_LOCATION (c),
10550 			    "%qD specified in %<inscan%> %<reduction%> clause "
10551 			    "but not in %<scan%> directive clause", decl);
10552 		  break;
10553 		}
10554 	      has_inscan_reductions = true;
10555 	    }
10556 	  /* FALLTHRU */
10557 	case OMP_CLAUSE_IN_REDUCTION:
10558 	case OMP_CLAUSE_TASK_REDUCTION:
10559 	  decl = OMP_CLAUSE_DECL (c);
10560 	  /* OpenACC reductions need a present_or_copy data clause.
10561 	     Add one if necessary.  Emit error when the reduction is private.  */
10562 	  if (ctx->region_type == ORT_ACC_PARALLEL
10563 	      || ctx->region_type == ORT_ACC_SERIAL)
10564 	    {
10565 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
10566 	      if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
10567 		{
10568 		  remove = true;
10569 		  error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
10570 			    "reduction on %qE", DECL_NAME (decl));
10571 		}
10572 	      else if ((n->value & GOVD_MAP) == 0)
10573 		{
10574 		  tree next = OMP_CLAUSE_CHAIN (c);
10575 		  tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
10576 		  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
10577 		  OMP_CLAUSE_DECL (nc) = decl;
10578 		  OMP_CLAUSE_CHAIN (c) = nc;
10579 		  lang_hooks.decls.omp_finish_clause (nc, pre_p);
10580 		  while (1)
10581 		    {
10582 		      OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
10583 		      if (OMP_CLAUSE_CHAIN (nc) == NULL)
10584 			break;
10585 		      nc = OMP_CLAUSE_CHAIN (nc);
10586 		    }
10587 		  OMP_CLAUSE_CHAIN (nc) = next;
10588 		  n->value |= GOVD_MAP;
10589 		}
10590 	    }
10591 	  if (DECL_P (decl)
10592 	      && omp_shared_to_firstprivate_optimizable_decl_p (decl))
10593 	    omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
10594 	  break;
10595 	case OMP_CLAUSE_COPYIN:
10596 	case OMP_CLAUSE_COPYPRIVATE:
10597 	case OMP_CLAUSE_IF:
10598 	case OMP_CLAUSE_NUM_THREADS:
10599 	case OMP_CLAUSE_NUM_TEAMS:
10600 	case OMP_CLAUSE_THREAD_LIMIT:
10601 	case OMP_CLAUSE_DIST_SCHEDULE:
10602 	case OMP_CLAUSE_DEVICE:
10603 	case OMP_CLAUSE_SCHEDULE:
10604 	case OMP_CLAUSE_NOWAIT:
10605 	case OMP_CLAUSE_ORDERED:
10606 	case OMP_CLAUSE_DEFAULT:
10607 	case OMP_CLAUSE_UNTIED:
10608 	case OMP_CLAUSE_COLLAPSE:
10609 	case OMP_CLAUSE_FINAL:
10610 	case OMP_CLAUSE_MERGEABLE:
10611 	case OMP_CLAUSE_PROC_BIND:
10612 	case OMP_CLAUSE_SAFELEN:
10613 	case OMP_CLAUSE_SIMDLEN:
10614 	case OMP_CLAUSE_DEPEND:
10615 	case OMP_CLAUSE_PRIORITY:
10616 	case OMP_CLAUSE_GRAINSIZE:
10617 	case OMP_CLAUSE_NUM_TASKS:
10618 	case OMP_CLAUSE_NOGROUP:
10619 	case OMP_CLAUSE_THREADS:
10620 	case OMP_CLAUSE_SIMD:
10621 	case OMP_CLAUSE_HINT:
10622 	case OMP_CLAUSE_DEFAULTMAP:
10623 	case OMP_CLAUSE_ORDER:
10624 	case OMP_CLAUSE_BIND:
10625 	case OMP_CLAUSE_USE_DEVICE_PTR:
10626 	case OMP_CLAUSE_USE_DEVICE_ADDR:
10627 	case OMP_CLAUSE_IS_DEVICE_PTR:
10628 	case OMP_CLAUSE_ASYNC:
10629 	case OMP_CLAUSE_WAIT:
10630 	case OMP_CLAUSE_INDEPENDENT:
10631 	case OMP_CLAUSE_NUM_GANGS:
10632 	case OMP_CLAUSE_NUM_WORKERS:
10633 	case OMP_CLAUSE_VECTOR_LENGTH:
10634 	case OMP_CLAUSE_GANG:
10635 	case OMP_CLAUSE_WORKER:
10636 	case OMP_CLAUSE_VECTOR:
10637 	case OMP_CLAUSE_AUTO:
10638 	case OMP_CLAUSE_SEQ:
10639 	case OMP_CLAUSE_TILE:
10640 	case OMP_CLAUSE_IF_PRESENT:
10641 	case OMP_CLAUSE_FINALIZE:
10642 	case OMP_CLAUSE_INCLUSIVE:
10643 	case OMP_CLAUSE_EXCLUSIVE:
10644 	  break;
10645 
10646 	default:
10647 	  gcc_unreachable ();
10648 	}
10649 
10650       if (remove)
10651 	*list_p = OMP_CLAUSE_CHAIN (c);
10652       else
10653 	list_p = &OMP_CLAUSE_CHAIN (c);
10654     }
10655 
10656   /* Add in any implicit data sharing.  */
10657   struct gimplify_adjust_omp_clauses_data data;
10658   data.list_p = list_p;
10659   data.pre_p = pre_p;
10660   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
10661 
10662   if (has_inscan_reductions)
10663     for (c = *orig_list_p; c; c = OMP_CLAUSE_CHAIN (c))
10664       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10665 	  && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
10666 	{
10667 	  error_at (OMP_CLAUSE_LOCATION (c),
10668 		    "%<inscan%> %<reduction%> clause used together with "
10669 		    "%<linear%> clause for a variable other than loop "
10670 		    "iterator");
10671 	  break;
10672 	}
10673 
10674   gimplify_omp_ctxp = ctx->outer_context;
10675   delete_omp_context (ctx);
10676 }
10677 
10678 /* Return 0 if CONSTRUCTS selectors don't match the OpenMP context,
10679    -1 if unknown yet (simd is involved, won't be known until vectorization)
10680    and 1 if they do.  If SCORES is non-NULL, it should point to an array
10681    of at least 2*NCONSTRUCTS+2 ints, and will be filled with the positions
10682    of the CONSTRUCTS (position -1 if it will never match) followed by
10683    number of constructs in the OpenMP context construct trait.  If the
10684    score depends on whether it will be in a declare simd clone or not,
10685    the function returns 2 and there will be two sets of the scores, the first
10686    one for the case that it is not in a declare simd clone, the other
10687    that it is in a declare simd clone.  */
10688 
10689 int
omp_construct_selector_matches(enum tree_code * constructs,int nconstructs,int * scores)10690 omp_construct_selector_matches (enum tree_code *constructs, int nconstructs,
10691 				int *scores)
10692 {
10693   int matched = 0, cnt = 0;
10694   bool simd_seen = false;
10695   bool target_seen = false;
10696   int declare_simd_cnt = -1;
10697   auto_vec<enum tree_code, 16> codes;
10698   for (struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp; ctx;)
10699     {
10700       if (((ctx->region_type & ORT_PARALLEL) && ctx->code == OMP_PARALLEL)
10701 	  || ((ctx->region_type & (ORT_TARGET | ORT_IMPLICIT_TARGET | ORT_ACC))
10702 	      == ORT_TARGET && ctx->code == OMP_TARGET)
10703 	  || ((ctx->region_type & ORT_TEAMS) && ctx->code == OMP_TEAMS)
10704 	  || (ctx->region_type == ORT_WORKSHARE && ctx->code == OMP_FOR)
10705 	  || (ctx->region_type == ORT_SIMD
10706 	      && ctx->code == OMP_SIMD
10707 	      && !omp_find_clause (ctx->clauses, OMP_CLAUSE_BIND)))
10708 	{
10709 	  ++cnt;
10710 	  if (scores)
10711 	    codes.safe_push (ctx->code);
10712 	  else if (matched < nconstructs && ctx->code == constructs[matched])
10713 	    {
10714 	      if (ctx->code == OMP_SIMD)
10715 		{
10716 		  if (matched)
10717 		    return 0;
10718 		  simd_seen = true;
10719 		}
10720 	      ++matched;
10721 	    }
10722 	  if (ctx->code == OMP_TARGET)
10723 	    {
10724 	      if (scores == NULL)
10725 		return matched < nconstructs ? 0 : simd_seen ? -1 : 1;
10726 	      target_seen = true;
10727 	      break;
10728 	    }
10729 	}
10730       else if (ctx->region_type == ORT_WORKSHARE
10731 	       && ctx->code == OMP_LOOP
10732 	       && ctx->outer_context
10733 	       && ctx->outer_context->region_type == ORT_COMBINED_PARALLEL
10734 	       && ctx->outer_context->outer_context
10735 	       && ctx->outer_context->outer_context->code == OMP_LOOP
10736 	       && ctx->outer_context->outer_context->distribute)
10737 	ctx = ctx->outer_context->outer_context;
10738       ctx = ctx->outer_context;
10739     }
10740   if (!target_seen
10741       && lookup_attribute ("omp declare simd",
10742 			   DECL_ATTRIBUTES (current_function_decl)))
10743     {
10744       /* Declare simd is a maybe case, it is supposed to be added only to the
10745 	 omp-simd-clone.c added clones and not to the base function.  */
10746       declare_simd_cnt = cnt++;
10747       if (scores)
10748 	codes.safe_push (OMP_SIMD);
10749       else if (cnt == 0
10750 	       && constructs[0] == OMP_SIMD)
10751 	{
10752 	  gcc_assert (matched == 0);
10753 	  simd_seen = true;
10754 	  if (++matched == nconstructs)
10755 	    return -1;
10756 	}
10757     }
10758   if (tree attr = lookup_attribute ("omp declare variant variant",
10759 				    DECL_ATTRIBUTES (current_function_decl)))
10760     {
10761       enum tree_code variant_constructs[5];
10762       int variant_nconstructs = 0;
10763       if (!target_seen)
10764 	variant_nconstructs
10765 	  = omp_constructor_traits_to_codes (TREE_VALUE (attr),
10766 					     variant_constructs);
10767       for (int i = 0; i < variant_nconstructs; i++)
10768 	{
10769 	  ++cnt;
10770 	  if (scores)
10771 	    codes.safe_push (variant_constructs[i]);
10772 	  else if (matched < nconstructs
10773 		   && variant_constructs[i] == constructs[matched])
10774 	    {
10775 	      if (variant_constructs[i] == OMP_SIMD)
10776 		{
10777 		  if (matched)
10778 		    return 0;
10779 		  simd_seen = true;
10780 		}
10781 	      ++matched;
10782 	    }
10783 	}
10784     }
10785   if (!target_seen
10786       && lookup_attribute ("omp declare target block",
10787 			   DECL_ATTRIBUTES (current_function_decl)))
10788     {
10789       if (scores)
10790 	codes.safe_push (OMP_TARGET);
10791       else if (matched < nconstructs && constructs[matched] == OMP_TARGET)
10792 	++matched;
10793     }
10794   if (scores)
10795     {
10796       for (int pass = 0; pass < (declare_simd_cnt == -1 ? 1 : 2); pass++)
10797 	{
10798 	  int j = codes.length () - 1;
10799 	  for (int i = nconstructs - 1; i >= 0; i--)
10800 	    {
10801 	      while (j >= 0
10802 		     && (pass != 0 || declare_simd_cnt != j)
10803 		     && constructs[i] != codes[j])
10804 		--j;
10805 	      if (pass == 0 && declare_simd_cnt != -1 && j > declare_simd_cnt)
10806 		*scores++ = j - 1;
10807 	      else
10808 		*scores++ = j;
10809 	    }
10810 	  *scores++ = ((pass == 0 && declare_simd_cnt != -1)
10811 		       ? codes.length () - 1 : codes.length ());
10812 	}
10813       return declare_simd_cnt == -1 ? 1 : 2;
10814     }
10815   if (matched == nconstructs)
10816     return simd_seen ? -1 : 1;
10817   return 0;
10818 }
10819 
10820 /* Gimplify OACC_CACHE.  */
10821 
10822 static void
gimplify_oacc_cache(tree * expr_p,gimple_seq * pre_p)10823 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
10824 {
10825   tree expr = *expr_p;
10826 
10827   gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
10828 			     OACC_CACHE);
10829   gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
10830 			       OACC_CACHE);
10831 
10832   /* TODO: Do something sensible with this information.  */
10833 
10834   *expr_p = NULL_TREE;
10835 }
10836 
10837 /* Helper function of gimplify_oacc_declare.  The helper's purpose is to,
10838    if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
10839    kind.  The entry kind will replace the one in CLAUSE, while the exit
10840    kind will be used in a new omp_clause and returned to the caller.  */
10841 
10842 static tree
gimplify_oacc_declare_1(tree clause)10843 gimplify_oacc_declare_1 (tree clause)
10844 {
10845   HOST_WIDE_INT kind, new_op;
10846   bool ret = false;
10847   tree c = NULL;
10848 
10849   kind = OMP_CLAUSE_MAP_KIND (clause);
10850 
10851   switch (kind)
10852     {
10853       case GOMP_MAP_ALLOC:
10854 	new_op = GOMP_MAP_RELEASE;
10855 	ret = true;
10856 	break;
10857 
10858       case GOMP_MAP_FROM:
10859 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
10860 	new_op = GOMP_MAP_FROM;
10861 	ret = true;
10862 	break;
10863 
10864       case GOMP_MAP_TOFROM:
10865 	OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
10866 	new_op = GOMP_MAP_FROM;
10867 	ret = true;
10868 	break;
10869 
10870       case GOMP_MAP_DEVICE_RESIDENT:
10871       case GOMP_MAP_FORCE_DEVICEPTR:
10872       case GOMP_MAP_FORCE_PRESENT:
10873       case GOMP_MAP_LINK:
10874       case GOMP_MAP_POINTER:
10875       case GOMP_MAP_TO:
10876 	break;
10877 
10878       default:
10879 	gcc_unreachable ();
10880 	break;
10881     }
10882 
10883   if (ret)
10884     {
10885       c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
10886       OMP_CLAUSE_SET_MAP_KIND (c, new_op);
10887       OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
10888     }
10889 
10890   return c;
10891 }
10892 
10893 /* Gimplify OACC_DECLARE.  */
10894 
10895 static void
gimplify_oacc_declare(tree * expr_p,gimple_seq * pre_p)10896 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
10897 {
10898   tree expr = *expr_p;
10899   gomp_target *stmt;
10900   tree clauses, t, decl;
10901 
10902   clauses = OACC_DECLARE_CLAUSES (expr);
10903 
10904   gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
10905   gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
10906 
10907   for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
10908     {
10909       decl = OMP_CLAUSE_DECL (t);
10910 
10911       if (TREE_CODE (decl) == MEM_REF)
10912 	decl = TREE_OPERAND (decl, 0);
10913 
10914       if (VAR_P (decl) && !is_oacc_declared (decl))
10915 	{
10916 	  tree attr = get_identifier ("oacc declare target");
10917 	  DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
10918 					      DECL_ATTRIBUTES (decl));
10919 	}
10920 
10921       if (VAR_P (decl)
10922 	  && !is_global_var (decl)
10923 	  && DECL_CONTEXT (decl) == current_function_decl)
10924 	{
10925 	  tree c = gimplify_oacc_declare_1 (t);
10926 	  if (c)
10927 	    {
10928 	      if (oacc_declare_returns == NULL)
10929 		oacc_declare_returns = new hash_map<tree, tree>;
10930 
10931 	      oacc_declare_returns->put (decl, c);
10932 	    }
10933 	}
10934 
10935       if (gimplify_omp_ctxp)
10936 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
10937     }
10938 
10939   stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
10940 				  clauses);
10941 
10942   gimplify_seq_add_stmt (pre_p, stmt);
10943 
10944   *expr_p = NULL_TREE;
10945 }
10946 
10947 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
10948    gimplification of the body, as well as scanning the body for used
10949    variables.  We need to do this scan now, because variable-sized
10950    decls will be decomposed during gimplification.  */
10951 
10952 static void
gimplify_omp_parallel(tree * expr_p,gimple_seq * pre_p)10953 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
10954 {
10955   tree expr = *expr_p;
10956   gimple *g;
10957   gimple_seq body = NULL;
10958 
10959   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
10960 			     OMP_PARALLEL_COMBINED (expr)
10961 			     ? ORT_COMBINED_PARALLEL
10962 			     : ORT_PARALLEL, OMP_PARALLEL);
10963 
10964   push_gimplify_context ();
10965 
10966   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
10967   if (gimple_code (g) == GIMPLE_BIND)
10968     pop_gimplify_context (g);
10969   else
10970     pop_gimplify_context (NULL);
10971 
10972   gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
10973 			       OMP_PARALLEL);
10974 
10975   g = gimple_build_omp_parallel (body,
10976 				 OMP_PARALLEL_CLAUSES (expr),
10977 				 NULL_TREE, NULL_TREE);
10978   if (OMP_PARALLEL_COMBINED (expr))
10979     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
10980   gimplify_seq_add_stmt (pre_p, g);
10981   *expr_p = NULL_TREE;
10982 }
10983 
10984 /* Gimplify the contents of an OMP_TASK statement.  This involves
10985    gimplification of the body, as well as scanning the body for used
10986    variables.  We need to do this scan now, because variable-sized
10987    decls will be decomposed during gimplification.  */
10988 
10989 static void
gimplify_omp_task(tree * expr_p,gimple_seq * pre_p)10990 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
10991 {
10992   tree expr = *expr_p;
10993   gimple *g;
10994   gimple_seq body = NULL;
10995 
10996   if (OMP_TASK_BODY (expr) == NULL_TREE)
10997     for (tree c = OMP_TASK_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
10998       if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
10999 	  && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_MUTEXINOUTSET)
11000 	{
11001 	  error_at (OMP_CLAUSE_LOCATION (c),
11002 		    "%<mutexinoutset%> kind in %<depend%> clause on a "
11003 		    "%<taskwait%> construct");
11004 	  break;
11005 	}
11006 
11007   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
11008 			     omp_find_clause (OMP_TASK_CLAUSES (expr),
11009 					      OMP_CLAUSE_UNTIED)
11010 			     ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
11011 
11012   if (OMP_TASK_BODY (expr))
11013     {
11014       push_gimplify_context ();
11015 
11016       g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
11017       if (gimple_code (g) == GIMPLE_BIND)
11018 	pop_gimplify_context (g);
11019       else
11020 	pop_gimplify_context (NULL);
11021     }
11022 
11023   gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
11024 			       OMP_TASK);
11025 
11026   g = gimple_build_omp_task (body,
11027 			     OMP_TASK_CLAUSES (expr),
11028 			     NULL_TREE, NULL_TREE,
11029 			     NULL_TREE, NULL_TREE, NULL_TREE);
11030   if (OMP_TASK_BODY (expr) == NULL_TREE)
11031     gimple_omp_task_set_taskwait_p (g, true);
11032   gimplify_seq_add_stmt (pre_p, g);
11033   *expr_p = NULL_TREE;
11034 }
11035 
11036 /* Gimplify the gross structure of an OMP_FOR statement.  */
11037 
11038 static enum gimplify_status
gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)11039 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
11040 {
11041   tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
11042   enum gimplify_status ret = GS_ALL_DONE;
11043   enum gimplify_status tret;
11044   gomp_for *gfor;
11045   gimple_seq for_body, for_pre_body;
11046   int i;
11047   bitmap has_decl_expr = NULL;
11048   enum omp_region_type ort = ORT_WORKSHARE;
11049 
11050   orig_for_stmt = for_stmt = *expr_p;
11051 
11052   bool loop_p = (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_BIND)
11053 		 != NULL_TREE);
11054   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11055     {
11056       tree *data[4] = { NULL, NULL, NULL, NULL };
11057       gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
11058       inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
11059 				  find_combined_omp_for, data, NULL);
11060       if (inner_for_stmt == NULL_TREE)
11061 	{
11062 	  gcc_assert (seen_error ());
11063 	  *expr_p = NULL_TREE;
11064 	  return GS_ERROR;
11065 	}
11066       if (data[2] && OMP_FOR_PRE_BODY (*data[2]))
11067 	{
11068 	  append_to_statement_list_force (OMP_FOR_PRE_BODY (*data[2]),
11069 					  &OMP_FOR_PRE_BODY (for_stmt));
11070 	  OMP_FOR_PRE_BODY (*data[2]) = NULL_TREE;
11071 	}
11072       if (OMP_FOR_PRE_BODY (inner_for_stmt))
11073 	{
11074 	  append_to_statement_list_force (OMP_FOR_PRE_BODY (inner_for_stmt),
11075 					  &OMP_FOR_PRE_BODY (for_stmt));
11076 	  OMP_FOR_PRE_BODY (inner_for_stmt) = NULL_TREE;
11077 	}
11078 
11079       if (data[0])
11080 	{
11081 	  /* We have some statements or variable declarations in between
11082 	     the composite construct directives.  Move them around the
11083 	     inner_for_stmt.  */
11084 	  data[0] = expr_p;
11085 	  for (i = 0; i < 3; i++)
11086 	    if (data[i])
11087 	      {
11088 		tree t = *data[i];
11089 		if (i < 2 && data[i + 1] == &OMP_BODY (t))
11090 		  data[i + 1] = data[i];
11091 		*data[i] = OMP_BODY (t);
11092 		tree body = build3 (BIND_EXPR, void_type_node, NULL_TREE,
11093 				    NULL_TREE, make_node (BLOCK));
11094 		OMP_BODY (t) = body;
11095 		append_to_statement_list_force (inner_for_stmt,
11096 						&BIND_EXPR_BODY (body));
11097 		*data[3] = t;
11098 		data[3] = tsi_stmt_ptr (tsi_start (BIND_EXPR_BODY (body)));
11099 		gcc_assert (*data[3] == inner_for_stmt);
11100 	      }
11101 	  return GS_OK;
11102 	}
11103 
11104       for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11105 	if (!loop_p
11106 	    && OMP_FOR_ORIG_DECLS (inner_for_stmt)
11107 	    && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11108 					i)) == TREE_LIST
11109 	    && TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11110 					   i)))
11111 	  {
11112 	    tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11113 	    /* Class iterators aren't allowed on OMP_SIMD, so the only
11114 	       case we need to solve is distribute parallel for.  They are
11115 	       allowed on the loop construct, but that is already handled
11116 	       in gimplify_omp_loop.  */
11117 	    gcc_assert (TREE_CODE (inner_for_stmt) == OMP_FOR
11118 			&& TREE_CODE (for_stmt) == OMP_DISTRIBUTE
11119 			&& data[1]);
11120 	    tree orig_decl = TREE_PURPOSE (orig);
11121 	    tree last = TREE_VALUE (orig);
11122 	    tree *pc;
11123 	    for (pc = &OMP_FOR_CLAUSES (inner_for_stmt);
11124 		 *pc; pc = &OMP_CLAUSE_CHAIN (*pc))
11125 	      if ((OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE
11126 		   || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_LASTPRIVATE)
11127 		  && OMP_CLAUSE_DECL (*pc) == orig_decl)
11128 		break;
11129 	    if (*pc == NULL_TREE)
11130 	      {
11131 		tree *spc;
11132 		for (spc = &OMP_PARALLEL_CLAUSES (*data[1]);
11133 		     *spc; spc = &OMP_CLAUSE_CHAIN (*spc))
11134 		  if (OMP_CLAUSE_CODE (*spc) == OMP_CLAUSE_PRIVATE
11135 		      && OMP_CLAUSE_DECL (*spc) == orig_decl)
11136 		    break;
11137 		if (*spc)
11138 		  {
11139 		    tree c = *spc;
11140 		    *spc = OMP_CLAUSE_CHAIN (c);
11141 		    OMP_CLAUSE_CHAIN (c) = NULL_TREE;
11142 		    *pc = c;
11143 		  }
11144 	      }
11145 	    if (*pc == NULL_TREE)
11146 	      ;
11147 	    else if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_PRIVATE)
11148 	      {
11149 		/* private clause will appear only on inner_for_stmt.
11150 		   Change it into firstprivate, and add private clause
11151 		   on for_stmt.  */
11152 		tree c = copy_node (*pc);
11153 		OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11154 		OMP_FOR_CLAUSES (for_stmt) = c;
11155 		OMP_CLAUSE_CODE (*pc) = OMP_CLAUSE_FIRSTPRIVATE;
11156 		lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11157 	      }
11158 	    else
11159 	      {
11160 		/* lastprivate clause will appear on both inner_for_stmt
11161 		   and for_stmt.  Add firstprivate clause to
11162 		   inner_for_stmt.  */
11163 		tree c = build_omp_clause (OMP_CLAUSE_LOCATION (*pc),
11164 					   OMP_CLAUSE_FIRSTPRIVATE);
11165 		OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (*pc);
11166 		OMP_CLAUSE_CHAIN (c) = *pc;
11167 		*pc = c;
11168 		lang_hooks.decls.omp_finish_clause (*pc, pre_p);
11169 	      }
11170 	    tree c = build_omp_clause (UNKNOWN_LOCATION,
11171 				       OMP_CLAUSE_FIRSTPRIVATE);
11172 	    OMP_CLAUSE_DECL (c) = last;
11173 	    OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11174 	    OMP_PARALLEL_CLAUSES (*data[1]) = c;
11175 	    c = build_omp_clause (UNKNOWN_LOCATION,
11176 				  *pc ? OMP_CLAUSE_SHARED
11177 				      : OMP_CLAUSE_FIRSTPRIVATE);
11178 	    OMP_CLAUSE_DECL (c) = orig_decl;
11179 	    OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11180 	    OMP_PARALLEL_CLAUSES (*data[1]) = c;
11181 	  }
11182       /* Similarly, take care of C++ range for temporaries, those should
11183 	 be firstprivate on OMP_PARALLEL if any.  */
11184       if (data[1])
11185 	for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (inner_for_stmt)); i++)
11186 	  if (OMP_FOR_ORIG_DECLS (inner_for_stmt)
11187 	      && TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11188 					  i)) == TREE_LIST
11189 	      && TREE_CHAIN (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt),
11190 					   i)))
11191 	    {
11192 	      tree orig
11193 		= TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (inner_for_stmt), i);
11194 	      tree v = TREE_CHAIN (orig);
11195 	      tree c = build_omp_clause (UNKNOWN_LOCATION,
11196 					 OMP_CLAUSE_FIRSTPRIVATE);
11197 	      /* First add firstprivate clause for the __for_end artificial
11198 		 decl.  */
11199 	      OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 1);
11200 	      if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11201 		  == REFERENCE_TYPE)
11202 		OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11203 	      OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11204 	      OMP_PARALLEL_CLAUSES (*data[1]) = c;
11205 	      if (TREE_VEC_ELT (v, 0))
11206 		{
11207 		  /* And now the same for __for_range artificial decl if it
11208 		     exists.  */
11209 		  c = build_omp_clause (UNKNOWN_LOCATION,
11210 					OMP_CLAUSE_FIRSTPRIVATE);
11211 		  OMP_CLAUSE_DECL (c) = TREE_VEC_ELT (v, 0);
11212 		  if (TREE_CODE (TREE_TYPE (OMP_CLAUSE_DECL (c)))
11213 		      == REFERENCE_TYPE)
11214 		    OMP_CLAUSE_FIRSTPRIVATE_NO_REFERENCE (c) = 1;
11215 		  OMP_CLAUSE_CHAIN (c) = OMP_PARALLEL_CLAUSES (*data[1]);
11216 		  OMP_PARALLEL_CLAUSES (*data[1]) = c;
11217 		}
11218 	    }
11219     }
11220 
11221   switch (TREE_CODE (for_stmt))
11222     {
11223     case OMP_FOR:
11224     case OMP_DISTRIBUTE:
11225       break;
11226     case OACC_LOOP:
11227       ort = ORT_ACC;
11228       break;
11229     case OMP_TASKLOOP:
11230       if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
11231 	ort = ORT_UNTIED_TASKLOOP;
11232       else
11233 	ort = ORT_TASKLOOP;
11234       break;
11235     case OMP_SIMD:
11236       ort = ORT_SIMD;
11237       break;
11238     default:
11239       gcc_unreachable ();
11240     }
11241 
11242   /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
11243      clause for the IV.  */
11244   if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11245     {
11246       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
11247       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11248       decl = TREE_OPERAND (t, 0);
11249       for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
11250 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11251 	    && OMP_CLAUSE_DECL (c) == decl)
11252 	  {
11253 	    OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11254 	    break;
11255 	  }
11256     }
11257 
11258   if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
11259     gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
11260 			       loop_p && TREE_CODE (for_stmt) != OMP_SIMD
11261 			       ? OMP_LOOP : TREE_CODE (for_stmt));
11262 
11263   if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
11264     gimplify_omp_ctxp->distribute = true;
11265 
11266   /* Handle OMP_FOR_INIT.  */
11267   for_pre_body = NULL;
11268   if ((ort == ORT_SIMD
11269        || (inner_for_stmt && TREE_CODE (inner_for_stmt) == OMP_SIMD))
11270       && OMP_FOR_PRE_BODY (for_stmt))
11271     {
11272       has_decl_expr = BITMAP_ALLOC (NULL);
11273       if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
11274 	  && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
11275 	     == VAR_DECL)
11276 	{
11277 	  t = OMP_FOR_PRE_BODY (for_stmt);
11278 	  bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11279 	}
11280       else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
11281 	{
11282 	  tree_stmt_iterator si;
11283 	  for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
11284 	       tsi_next (&si))
11285 	    {
11286 	      t = tsi_stmt (si);
11287 	      if (TREE_CODE (t) == DECL_EXPR
11288 		  && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
11289 		bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
11290 	    }
11291 	}
11292     }
11293   if (OMP_FOR_PRE_BODY (for_stmt))
11294     {
11295       if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
11296 	gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11297       else
11298 	{
11299 	  struct gimplify_omp_ctx ctx;
11300 	  memset (&ctx, 0, sizeof (ctx));
11301 	  ctx.region_type = ORT_NONE;
11302 	  gimplify_omp_ctxp = &ctx;
11303 	  gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
11304 	  gimplify_omp_ctxp = NULL;
11305 	}
11306     }
11307   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
11308 
11309   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
11310     for_stmt = inner_for_stmt;
11311 
11312   /* For taskloop, need to gimplify the start, end and step before the
11313      taskloop, outside of the taskloop omp context.  */
11314   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11315     {
11316       for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11317 	{
11318 	  t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11319 	  if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11320 	    {
11321 	      tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11322 	      TREE_OPERAND (t, 1)
11323 		= get_initialized_tmp_var (TREE_OPERAND (t, 1),
11324 					   gimple_seq_empty_p (for_pre_body)
11325 					   ? pre_p : &for_pre_body, NULL,
11326 					   false);
11327 	      /* Reference to pointer conversion is considered useless,
11328 		 but is significant for firstprivate clause.  Force it
11329 		 here.  */
11330 	      if (TREE_CODE (type) == POINTER_TYPE
11331 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11332 		      == REFERENCE_TYPE))
11333 		{
11334 		  tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11335 		  tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11336 				   TREE_OPERAND (t, 1));
11337 		  gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11338 				       ? pre_p : &for_pre_body);
11339 		  TREE_OPERAND (t, 1) = v;
11340 		}
11341 	      tree c = build_omp_clause (input_location,
11342 					 OMP_CLAUSE_FIRSTPRIVATE);
11343 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11344 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11345 	      OMP_FOR_CLAUSES (orig_for_stmt) = c;
11346 	    }
11347 
11348 	  /* Handle OMP_FOR_COND.  */
11349 	  t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11350 	  if (!is_gimple_constant (TREE_OPERAND (t, 1)))
11351 	    {
11352 	      tree type = TREE_TYPE (TREE_OPERAND (t, 0));
11353 	      TREE_OPERAND (t, 1)
11354 		= get_initialized_tmp_var (TREE_OPERAND (t, 1),
11355 					   gimple_seq_empty_p (for_pre_body)
11356 					   ? pre_p : &for_pre_body, NULL,
11357 					   false);
11358 	      /* Reference to pointer conversion is considered useless,
11359 		 but is significant for firstprivate clause.  Force it
11360 		 here.  */
11361 	      if (TREE_CODE (type) == POINTER_TYPE
11362 		  && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
11363 		      == REFERENCE_TYPE))
11364 		{
11365 		  tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
11366 		  tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
11367 				   TREE_OPERAND (t, 1));
11368 		  gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
11369 				       ? pre_p : &for_pre_body);
11370 		  TREE_OPERAND (t, 1) = v;
11371 		}
11372 	      tree c = build_omp_clause (input_location,
11373 					 OMP_CLAUSE_FIRSTPRIVATE);
11374 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
11375 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11376 	      OMP_FOR_CLAUSES (orig_for_stmt) = c;
11377 	    }
11378 
11379 	  /* Handle OMP_FOR_INCR.  */
11380 	  t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11381 	  if (TREE_CODE (t) == MODIFY_EXPR)
11382 	    {
11383 	      decl = TREE_OPERAND (t, 0);
11384 	      t = TREE_OPERAND (t, 1);
11385 	      tree *tp = &TREE_OPERAND (t, 1);
11386 	      if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
11387 		tp = &TREE_OPERAND (t, 0);
11388 
11389 	      if (!is_gimple_constant (*tp))
11390 		{
11391 		  gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
11392 				    ? pre_p : &for_pre_body;
11393 		  *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
11394 		  tree c = build_omp_clause (input_location,
11395 					     OMP_CLAUSE_FIRSTPRIVATE);
11396 		  OMP_CLAUSE_DECL (c) = *tp;
11397 		  OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
11398 		  OMP_FOR_CLAUSES (orig_for_stmt) = c;
11399 		}
11400 	    }
11401 	}
11402 
11403       gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
11404 				 OMP_TASKLOOP);
11405     }
11406 
11407   if (orig_for_stmt != for_stmt)
11408     gimplify_omp_ctxp->combined_loop = true;
11409 
11410   for_body = NULL;
11411   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11412 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
11413   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11414 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
11415 
11416   tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
11417   bool is_doacross = false;
11418   if (c && OMP_CLAUSE_ORDERED_EXPR (c))
11419     {
11420       is_doacross = true;
11421       gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
11422 						 (OMP_FOR_INIT (for_stmt))
11423 					       * 2);
11424     }
11425   int collapse = 1, tile = 0;
11426   c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
11427   if (c)
11428     collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
11429   c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
11430   if (c)
11431     tile = list_length (OMP_CLAUSE_TILE_LIST (c));
11432   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11433     {
11434       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11435       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11436       decl = TREE_OPERAND (t, 0);
11437       gcc_assert (DECL_P (decl));
11438       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
11439 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
11440       if (is_doacross)
11441 	{
11442 	  if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
11443 	    {
11444 	      tree orig_decl = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11445 	      if (TREE_CODE (orig_decl) == TREE_LIST)
11446 		{
11447 		  orig_decl = TREE_PURPOSE (orig_decl);
11448 		  if (!orig_decl)
11449 		    orig_decl = decl;
11450 		}
11451 	      gimplify_omp_ctxp->loop_iter_var.quick_push (orig_decl);
11452 	    }
11453 	  else
11454 	    gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11455 	  gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
11456 	}
11457 
11458       /* Make sure the iteration variable is private.  */
11459       tree c = NULL_TREE;
11460       tree c2 = NULL_TREE;
11461       if (orig_for_stmt != for_stmt)
11462 	{
11463 	  /* Preserve this information until we gimplify the inner simd.  */
11464 	  if (has_decl_expr
11465 	      && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11466 	    TREE_PRIVATE (t) = 1;
11467 	}
11468       else if (ort == ORT_SIMD)
11469 	{
11470 	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11471 						 (splay_tree_key) decl);
11472 	  omp_is_private (gimplify_omp_ctxp, decl,
11473 			  1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
11474 			       != 1));
11475 	  if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
11476 	    {
11477 	      omp_notice_variable (gimplify_omp_ctxp, decl, true);
11478 	      if (n->value & GOVD_LASTPRIVATE_CONDITIONAL)
11479 		for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11480 						OMP_CLAUSE_LASTPRIVATE);
11481 		     c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11482 					       OMP_CLAUSE_LASTPRIVATE))
11483 		  if (OMP_CLAUSE_DECL (c3) == decl)
11484 		    {
11485 		      warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11486 				  "conditional %<lastprivate%> on loop "
11487 				  "iterator %qD ignored", decl);
11488 		      OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11489 		      n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11490 		    }
11491 	    }
11492 	  else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1 && !loop_p)
11493 	    {
11494 	      c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11495 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
11496 	      unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
11497 	      if ((has_decl_expr
11498 		   && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
11499 		  || TREE_PRIVATE (t))
11500 		{
11501 		  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11502 		  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11503 		}
11504 	      struct gimplify_omp_ctx *outer
11505 		= gimplify_omp_ctxp->outer_context;
11506 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11507 		{
11508 		  if (outer->region_type == ORT_WORKSHARE
11509 		      && outer->combined_loop)
11510 		    {
11511 		      n = splay_tree_lookup (outer->variables,
11512 					     (splay_tree_key)decl);
11513 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11514 			{
11515 			  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11516 			  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11517 			}
11518 		      else
11519 			{
11520 			  struct gimplify_omp_ctx *octx = outer->outer_context;
11521 			  if (octx
11522 			      && octx->region_type == ORT_COMBINED_PARALLEL
11523 			      && octx->outer_context
11524 			      && (octx->outer_context->region_type
11525 				  == ORT_WORKSHARE)
11526 			      && octx->outer_context->combined_loop)
11527 			    {
11528 			      octx = octx->outer_context;
11529 			      n = splay_tree_lookup (octx->variables,
11530 						     (splay_tree_key)decl);
11531 			      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11532 				{
11533 				  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
11534 				  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
11535 				}
11536 			    }
11537 			}
11538 		    }
11539 		}
11540 
11541 	      OMP_CLAUSE_DECL (c) = decl;
11542 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11543 	      OMP_FOR_CLAUSES (for_stmt) = c;
11544 	      omp_add_variable (gimplify_omp_ctxp, decl, flags);
11545 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
11546 		{
11547 		  if (outer->region_type == ORT_WORKSHARE
11548 		      && outer->combined_loop)
11549 		    {
11550 		      if (outer->outer_context
11551 			  && (outer->outer_context->region_type
11552 			      == ORT_COMBINED_PARALLEL))
11553 			outer = outer->outer_context;
11554 		      else if (omp_check_private (outer, decl, false))
11555 			outer = NULL;
11556 		    }
11557 		  else if (((outer->region_type & ORT_TASKLOOP)
11558 			    == ORT_TASKLOOP)
11559 			   && outer->combined_loop
11560 			   && !omp_check_private (gimplify_omp_ctxp,
11561 						  decl, false))
11562 		    ;
11563 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
11564 		    {
11565 		      omp_notice_variable (outer, decl, true);
11566 		      outer = NULL;
11567 		    }
11568 		  if (outer)
11569 		    {
11570 		      n = splay_tree_lookup (outer->variables,
11571 					     (splay_tree_key)decl);
11572 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11573 			{
11574 			  omp_add_variable (outer, decl,
11575 					    GOVD_LASTPRIVATE | GOVD_SEEN);
11576 			  if (outer->region_type == ORT_COMBINED_PARALLEL
11577 			      && outer->outer_context
11578 			      && (outer->outer_context->region_type
11579 				  == ORT_WORKSHARE)
11580 			      && outer->outer_context->combined_loop)
11581 			    {
11582 			      outer = outer->outer_context;
11583 			      n = splay_tree_lookup (outer->variables,
11584 						     (splay_tree_key)decl);
11585 			      if (omp_check_private (outer, decl, false))
11586 				outer = NULL;
11587 			      else if (n == NULL
11588 				       || ((n->value & GOVD_DATA_SHARE_CLASS)
11589 					   == 0))
11590 				omp_add_variable (outer, decl,
11591 						  GOVD_LASTPRIVATE
11592 						  | GOVD_SEEN);
11593 			      else
11594 				outer = NULL;
11595 			    }
11596 			  if (outer && outer->outer_context
11597 			      && ((outer->outer_context->region_type
11598 				   & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11599 				  || (((outer->region_type & ORT_TASKLOOP)
11600 				       == ORT_TASKLOOP)
11601 				      && (outer->outer_context->region_type
11602 					  == ORT_COMBINED_PARALLEL))))
11603 			    {
11604 			      outer = outer->outer_context;
11605 			      n = splay_tree_lookup (outer->variables,
11606 						     (splay_tree_key)decl);
11607 			      if (n == NULL
11608 				  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11609 				omp_add_variable (outer, decl,
11610 						  GOVD_SHARED | GOVD_SEEN);
11611 			      else
11612 				outer = NULL;
11613 			    }
11614 			  if (outer && outer->outer_context)
11615 			    omp_notice_variable (outer->outer_context, decl,
11616 						 true);
11617 			}
11618 		    }
11619 		}
11620 	    }
11621 	  else
11622 	    {
11623 	      bool lastprivate
11624 		= (!has_decl_expr
11625 		   || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
11626 	      if (TREE_PRIVATE (t))
11627 		lastprivate = false;
11628 	      if (loop_p && OMP_FOR_ORIG_DECLS (for_stmt))
11629 		{
11630 		  tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
11631 		  if (TREE_CODE (elt) == TREE_LIST && TREE_PURPOSE (elt))
11632 		    lastprivate = false;
11633 		}
11634 
11635 	      struct gimplify_omp_ctx *outer
11636 		= gimplify_omp_ctxp->outer_context;
11637 	      if (outer && lastprivate)
11638 		{
11639 		  if (outer->region_type == ORT_WORKSHARE
11640 		      && outer->combined_loop)
11641 		    {
11642 		      n = splay_tree_lookup (outer->variables,
11643 					     (splay_tree_key)decl);
11644 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
11645 			{
11646 			  lastprivate = false;
11647 			  outer = NULL;
11648 			}
11649 		      else if (outer->outer_context
11650 			       && (outer->outer_context->region_type
11651 				   == ORT_COMBINED_PARALLEL))
11652 			outer = outer->outer_context;
11653 		      else if (omp_check_private (outer, decl, false))
11654 			outer = NULL;
11655 		    }
11656 		  else if (((outer->region_type & ORT_TASKLOOP)
11657 			    == ORT_TASKLOOP)
11658 			   && outer->combined_loop
11659 			   && !omp_check_private (gimplify_omp_ctxp,
11660 						  decl, false))
11661 		    ;
11662 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
11663 		    {
11664 		      omp_notice_variable (outer, decl, true);
11665 		      outer = NULL;
11666 		    }
11667 		  if (outer)
11668 		    {
11669 		      n = splay_tree_lookup (outer->variables,
11670 					     (splay_tree_key)decl);
11671 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11672 			{
11673 			  omp_add_variable (outer, decl,
11674 					    GOVD_LASTPRIVATE | GOVD_SEEN);
11675 			  if (outer->region_type == ORT_COMBINED_PARALLEL
11676 			      && outer->outer_context
11677 			      && (outer->outer_context->region_type
11678 				  == ORT_WORKSHARE)
11679 			      && outer->outer_context->combined_loop)
11680 			    {
11681 			      outer = outer->outer_context;
11682 			      n = splay_tree_lookup (outer->variables,
11683 						     (splay_tree_key)decl);
11684 			      if (omp_check_private (outer, decl, false))
11685 				outer = NULL;
11686 			      else if (n == NULL
11687 				       || ((n->value & GOVD_DATA_SHARE_CLASS)
11688 					   == 0))
11689 				omp_add_variable (outer, decl,
11690 						  GOVD_LASTPRIVATE
11691 						  | GOVD_SEEN);
11692 			      else
11693 				outer = NULL;
11694 			    }
11695 			  if (outer && outer->outer_context
11696 			      && ((outer->outer_context->region_type
11697 				   & ORT_COMBINED_TEAMS) == ORT_COMBINED_TEAMS
11698 				  || (((outer->region_type & ORT_TASKLOOP)
11699 				       == ORT_TASKLOOP)
11700 				      && (outer->outer_context->region_type
11701 					  == ORT_COMBINED_PARALLEL))))
11702 			    {
11703 			      outer = outer->outer_context;
11704 			      n = splay_tree_lookup (outer->variables,
11705 						     (splay_tree_key)decl);
11706 			      if (n == NULL
11707 				  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
11708 				omp_add_variable (outer, decl,
11709 						  GOVD_SHARED | GOVD_SEEN);
11710 			      else
11711 				outer = NULL;
11712 			    }
11713 			  if (outer && outer->outer_context)
11714 			    omp_notice_variable (outer->outer_context, decl,
11715 						 true);
11716 			}
11717 		    }
11718 		}
11719 
11720 	      c = build_omp_clause (input_location,
11721 				    lastprivate ? OMP_CLAUSE_LASTPRIVATE
11722 						: OMP_CLAUSE_PRIVATE);
11723 	      OMP_CLAUSE_DECL (c) = decl;
11724 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
11725 	      OMP_FOR_CLAUSES (for_stmt) = c;
11726 	      omp_add_variable (gimplify_omp_ctxp, decl,
11727 				(lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
11728 				| GOVD_EXPLICIT | GOVD_SEEN);
11729 	      c = NULL_TREE;
11730 	    }
11731 	}
11732       else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
11733 	{
11734 	  omp_notice_variable (gimplify_omp_ctxp, decl, true);
11735 	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
11736 						 (splay_tree_key) decl);
11737 	  if (n && (n->value & GOVD_LASTPRIVATE_CONDITIONAL))
11738 	    for (tree c3 = omp_find_clause (OMP_FOR_CLAUSES (for_stmt),
11739 					    OMP_CLAUSE_LASTPRIVATE);
11740 		 c3; c3 = omp_find_clause (OMP_CLAUSE_CHAIN (c3),
11741 					   OMP_CLAUSE_LASTPRIVATE))
11742 	      if (OMP_CLAUSE_DECL (c3) == decl)
11743 		{
11744 		  warning_at (OMP_CLAUSE_LOCATION (c3), 0,
11745 			      "conditional %<lastprivate%> on loop "
11746 			      "iterator %qD ignored", decl);
11747 		  OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c3) = 0;
11748 		  n->value &= ~GOVD_LASTPRIVATE_CONDITIONAL;
11749 		}
11750 	}
11751       else
11752 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
11753 
11754       /* If DECL is not a gimple register, create a temporary variable to act
11755 	 as an iteration counter.  This is valid, since DECL cannot be
11756 	 modified in the body of the loop.  Similarly for any iteration vars
11757 	 in simd with collapse > 1 where the iterator vars must be
11758 	 lastprivate.  */
11759       if (orig_for_stmt != for_stmt)
11760 	var = decl;
11761       else if (!is_gimple_reg (decl)
11762 	       || (ort == ORT_SIMD
11763 		   && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
11764 	{
11765 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11766 	  /* Make sure omp_add_variable is not called on it prematurely.
11767 	     We call it ourselves a few lines later.  */
11768 	  gimplify_omp_ctxp = NULL;
11769 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11770 	  gimplify_omp_ctxp = ctx;
11771 	  TREE_OPERAND (t, 0) = var;
11772 
11773 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
11774 
11775 	  if (ort == ORT_SIMD
11776 	      && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
11777 	    {
11778 	      c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
11779 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
11780 	      OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
11781 	      OMP_CLAUSE_DECL (c2) = var;
11782 	      OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
11783 	      OMP_FOR_CLAUSES (for_stmt) = c2;
11784 	      omp_add_variable (gimplify_omp_ctxp, var,
11785 				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
11786 	      if (c == NULL_TREE)
11787 		{
11788 		  c = c2;
11789 		  c2 = NULL_TREE;
11790 		}
11791 	    }
11792 	  else
11793 	    omp_add_variable (gimplify_omp_ctxp, var,
11794 			      GOVD_PRIVATE | GOVD_SEEN);
11795 	}
11796       else
11797 	var = decl;
11798 
11799       gimplify_omp_ctxp->in_for_exprs = true;
11800       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11801 			    is_gimple_val, fb_rvalue, false);
11802       gimplify_omp_ctxp->in_for_exprs = false;
11803       ret = MIN (ret, tret);
11804       if (ret == GS_ERROR)
11805 	return ret;
11806 
11807       /* Handle OMP_FOR_COND.  */
11808       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
11809       gcc_assert (COMPARISON_CLASS_P (t));
11810       gcc_assert (TREE_OPERAND (t, 0) == decl);
11811 
11812       gimplify_omp_ctxp->in_for_exprs = true;
11813       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11814 			    is_gimple_val, fb_rvalue, false);
11815       gimplify_omp_ctxp->in_for_exprs = false;
11816       ret = MIN (ret, tret);
11817 
11818       /* Handle OMP_FOR_INCR.  */
11819       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11820       switch (TREE_CODE (t))
11821 	{
11822 	case PREINCREMENT_EXPR:
11823 	case POSTINCREMENT_EXPR:
11824 	  {
11825 	    tree decl = TREE_OPERAND (t, 0);
11826 	    /* c_omp_for_incr_canonicalize_ptr() should have been
11827 	       called to massage things appropriately.  */
11828 	    gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11829 
11830 	    if (orig_for_stmt != for_stmt)
11831 	      break;
11832 	    t = build_int_cst (TREE_TYPE (decl), 1);
11833 	    if (c)
11834 	      OMP_CLAUSE_LINEAR_STEP (c) = t;
11835 	    t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11836 	    t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11837 	    TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11838 	    break;
11839 	  }
11840 
11841 	case PREDECREMENT_EXPR:
11842 	case POSTDECREMENT_EXPR:
11843 	  /* c_omp_for_incr_canonicalize_ptr() should have been
11844 	     called to massage things appropriately.  */
11845 	  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
11846 	  if (orig_for_stmt != for_stmt)
11847 	    break;
11848 	  t = build_int_cst (TREE_TYPE (decl), -1);
11849 	  if (c)
11850 	    OMP_CLAUSE_LINEAR_STEP (c) = t;
11851 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
11852 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
11853 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
11854 	  break;
11855 
11856 	case MODIFY_EXPR:
11857 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
11858 	  TREE_OPERAND (t, 0) = var;
11859 
11860 	  t = TREE_OPERAND (t, 1);
11861 	  switch (TREE_CODE (t))
11862 	    {
11863 	    case PLUS_EXPR:
11864 	      if (TREE_OPERAND (t, 1) == decl)
11865 		{
11866 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
11867 		  TREE_OPERAND (t, 0) = var;
11868 		  break;
11869 		}
11870 
11871 	      /* Fallthru.  */
11872 	    case MINUS_EXPR:
11873 	    case POINTER_PLUS_EXPR:
11874 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
11875 	      TREE_OPERAND (t, 0) = var;
11876 	      break;
11877 	    default:
11878 	      gcc_unreachable ();
11879 	    }
11880 
11881 	  gimplify_omp_ctxp->in_for_exprs = true;
11882 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
11883 				is_gimple_val, fb_rvalue, false);
11884 	  ret = MIN (ret, tret);
11885 	  if (c)
11886 	    {
11887 	      tree step = TREE_OPERAND (t, 1);
11888 	      tree stept = TREE_TYPE (decl);
11889 	      if (POINTER_TYPE_P (stept))
11890 		stept = sizetype;
11891 	      step = fold_convert (stept, step);
11892 	      if (TREE_CODE (t) == MINUS_EXPR)
11893 		step = fold_build1 (NEGATE_EXPR, stept, step);
11894 	      OMP_CLAUSE_LINEAR_STEP (c) = step;
11895 	      if (step != TREE_OPERAND (t, 1))
11896 		{
11897 		  tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
11898 					&for_pre_body, NULL,
11899 					is_gimple_val, fb_rvalue, false);
11900 		  ret = MIN (ret, tret);
11901 		}
11902 	    }
11903 	  gimplify_omp_ctxp->in_for_exprs = false;
11904 	  break;
11905 
11906 	default:
11907 	  gcc_unreachable ();
11908 	}
11909 
11910       if (c2)
11911 	{
11912 	  gcc_assert (c);
11913 	  OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
11914 	}
11915 
11916       if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
11917 	{
11918 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
11919 	    if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
11920 		  && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
11921 		 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
11922 		     && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
11923 		     && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
11924 		&& OMP_CLAUSE_DECL (c) == decl)
11925 	      {
11926 		if (is_doacross && (collapse == 1 || i >= collapse))
11927 		  t = var;
11928 		else
11929 		  {
11930 		    t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
11931 		    gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
11932 		    gcc_assert (TREE_OPERAND (t, 0) == var);
11933 		    t = TREE_OPERAND (t, 1);
11934 		    gcc_assert (TREE_CODE (t) == PLUS_EXPR
11935 				|| TREE_CODE (t) == MINUS_EXPR
11936 				|| TREE_CODE (t) == POINTER_PLUS_EXPR);
11937 		    gcc_assert (TREE_OPERAND (t, 0) == var);
11938 		    t = build2 (TREE_CODE (t), TREE_TYPE (decl),
11939 				is_doacross ? var : decl,
11940 				TREE_OPERAND (t, 1));
11941 		  }
11942 		gimple_seq *seq;
11943 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
11944 		  seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
11945 		else
11946 		  seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
11947 		push_gimplify_context ();
11948 		gimplify_assign (decl, t, seq);
11949 		gimple *bind = NULL;
11950 		if (gimplify_ctxp->temps)
11951 		  {
11952 		    bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
11953 		    *seq = NULL;
11954 		    gimplify_seq_add_stmt (seq, bind);
11955 		  }
11956 		pop_gimplify_context (bind);
11957 	      }
11958 	}
11959     }
11960 
11961   BITMAP_FREE (has_decl_expr);
11962 
11963   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11964       || (loop_p && orig_for_stmt == for_stmt))
11965     {
11966       push_gimplify_context ();
11967       if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
11968 	{
11969 	  OMP_FOR_BODY (orig_for_stmt)
11970 	    = build3 (BIND_EXPR, void_type_node, NULL,
11971 		      OMP_FOR_BODY (orig_for_stmt), NULL);
11972 	  TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
11973 	}
11974     }
11975 
11976   gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
11977 					 &for_body);
11978 
11979   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP
11980       || (loop_p && orig_for_stmt == for_stmt))
11981     {
11982       if (gimple_code (g) == GIMPLE_BIND)
11983 	pop_gimplify_context (g);
11984       else
11985 	pop_gimplify_context (NULL);
11986     }
11987 
11988   if (orig_for_stmt != for_stmt)
11989     for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
11990       {
11991 	t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
11992 	decl = TREE_OPERAND (t, 0);
11993 	struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
11994 	if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
11995 	  gimplify_omp_ctxp = ctx->outer_context;
11996 	var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
11997 	gimplify_omp_ctxp = ctx;
11998 	omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
11999 	TREE_OPERAND (t, 0) = var;
12000 	t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12001 	TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
12002 	TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
12003       }
12004 
12005   gimplify_adjust_omp_clauses (pre_p, for_body,
12006 			       &OMP_FOR_CLAUSES (orig_for_stmt),
12007 			       TREE_CODE (orig_for_stmt));
12008 
12009   int kind;
12010   switch (TREE_CODE (orig_for_stmt))
12011     {
12012     case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
12013     case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
12014     case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
12015     case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
12016     case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
12017     default:
12018       gcc_unreachable ();
12019     }
12020   if (loop_p && kind == GF_OMP_FOR_KIND_SIMD)
12021     {
12022       gimplify_seq_add_seq (pre_p, for_pre_body);
12023       for_pre_body = NULL;
12024     }
12025   gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
12026 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
12027 			       for_pre_body);
12028   if (orig_for_stmt != for_stmt)
12029     gimple_omp_for_set_combined_p (gfor, true);
12030   if (gimplify_omp_ctxp
12031       && (gimplify_omp_ctxp->combined_loop
12032 	  || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
12033 	      && gimplify_omp_ctxp->outer_context
12034 	      && gimplify_omp_ctxp->outer_context->combined_loop)))
12035     {
12036       gimple_omp_for_set_combined_into_p (gfor, true);
12037       if (gimplify_omp_ctxp->combined_loop)
12038 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
12039       else
12040 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
12041     }
12042 
12043   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12044     {
12045       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12046       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
12047       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
12048       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
12049       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
12050       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
12051       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
12052       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
12053     }
12054 
12055   /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
12056      constructs with GIMPLE_OMP_TASK sandwiched in between them.
12057      The outer taskloop stands for computing the number of iterations,
12058      counts for collapsed loops and holding taskloop specific clauses.
12059      The task construct stands for the effect of data sharing on the
12060      explicit task it creates and the inner taskloop stands for expansion
12061      of the static loop inside of the explicit task construct.  */
12062   if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
12063     {
12064       tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
12065       tree task_clauses = NULL_TREE;
12066       tree c = *gfor_clauses_ptr;
12067       tree *gtask_clauses_ptr = &task_clauses;
12068       tree outer_for_clauses = NULL_TREE;
12069       tree *gforo_clauses_ptr = &outer_for_clauses;
12070       for (; c; c = OMP_CLAUSE_CHAIN (c))
12071 	switch (OMP_CLAUSE_CODE (c))
12072 	  {
12073 	  /* These clauses are allowed on task, move them there.  */
12074 	  case OMP_CLAUSE_SHARED:
12075 	  case OMP_CLAUSE_FIRSTPRIVATE:
12076 	  case OMP_CLAUSE_DEFAULT:
12077 	  case OMP_CLAUSE_IF:
12078 	  case OMP_CLAUSE_UNTIED:
12079 	  case OMP_CLAUSE_FINAL:
12080 	  case OMP_CLAUSE_MERGEABLE:
12081 	  case OMP_CLAUSE_PRIORITY:
12082 	  case OMP_CLAUSE_REDUCTION:
12083 	  case OMP_CLAUSE_IN_REDUCTION:
12084 	    *gtask_clauses_ptr = c;
12085 	    gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12086 	    break;
12087 	  case OMP_CLAUSE_PRIVATE:
12088 	    if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
12089 	      {
12090 		/* We want private on outer for and firstprivate
12091 		   on task.  */
12092 		*gtask_clauses_ptr
12093 		  = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12094 				      OMP_CLAUSE_FIRSTPRIVATE);
12095 		OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12096 		lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12097 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12098 		*gforo_clauses_ptr = c;
12099 		gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12100 	      }
12101 	    else
12102 	      {
12103 		*gtask_clauses_ptr = c;
12104 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12105 	      }
12106 	    break;
12107 	  /* These clauses go into outer taskloop clauses.  */
12108 	  case OMP_CLAUSE_GRAINSIZE:
12109 	  case OMP_CLAUSE_NUM_TASKS:
12110 	  case OMP_CLAUSE_NOGROUP:
12111 	    *gforo_clauses_ptr = c;
12112 	    gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12113 	    break;
12114 	  /* Taskloop clause we duplicate on both taskloops.  */
12115 	  case OMP_CLAUSE_COLLAPSE:
12116 	    *gfor_clauses_ptr = c;
12117 	    gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12118 	    *gforo_clauses_ptr = copy_node (c);
12119 	    gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12120 	    break;
12121 	  /* For lastprivate, keep the clause on inner taskloop, and add
12122 	     a shared clause on task.  If the same decl is also firstprivate,
12123 	     add also firstprivate clause on the inner taskloop.  */
12124 	  case OMP_CLAUSE_LASTPRIVATE:
12125 	    if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12126 	      {
12127 		/* For taskloop C++ lastprivate IVs, we want:
12128 		   1) private on outer taskloop
12129 		   2) firstprivate and shared on task
12130 		   3) lastprivate on inner taskloop  */
12131 		*gtask_clauses_ptr
12132 		  = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12133 				      OMP_CLAUSE_FIRSTPRIVATE);
12134 		OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12135 		lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
12136 		gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12137 		OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
12138 		*gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12139 						       OMP_CLAUSE_PRIVATE);
12140 		OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
12141 		OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
12142 		TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
12143 		gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
12144 	      }
12145 	    *gfor_clauses_ptr = c;
12146 	    gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
12147 	    *gtask_clauses_ptr
12148 	      = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
12149 	    OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
12150 	    if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
12151 	      OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
12152 	    gtask_clauses_ptr
12153 	      = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
12154 	    break;
12155 	  default:
12156 	    gcc_unreachable ();
12157 	  }
12158       *gfor_clauses_ptr = NULL_TREE;
12159       *gtask_clauses_ptr = NULL_TREE;
12160       *gforo_clauses_ptr = NULL_TREE;
12161       g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
12162       g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
12163 				 NULL_TREE, NULL_TREE, NULL_TREE);
12164       gimple_omp_task_set_taskloop_p (g, true);
12165       g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
12166       gomp_for *gforo
12167 	= gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
12168 				gimple_omp_for_collapse (gfor),
12169 				gimple_omp_for_pre_body (gfor));
12170       gimple_omp_for_set_pre_body (gfor, NULL);
12171       gimple_omp_for_set_combined_p (gforo, true);
12172       gimple_omp_for_set_combined_into_p (gfor, true);
12173       for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
12174 	{
12175 	  tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
12176 	  tree v = create_tmp_var (type);
12177 	  gimple_omp_for_set_index (gforo, i, v);
12178 	  t = unshare_expr (gimple_omp_for_initial (gfor, i));
12179 	  gimple_omp_for_set_initial (gforo, i, t);
12180 	  gimple_omp_for_set_cond (gforo, i,
12181 				   gimple_omp_for_cond (gfor, i));
12182 	  t = unshare_expr (gimple_omp_for_final (gfor, i));
12183 	  gimple_omp_for_set_final (gforo, i, t);
12184 	  t = unshare_expr (gimple_omp_for_incr (gfor, i));
12185 	  gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
12186 	  TREE_OPERAND (t, 0) = v;
12187 	  gimple_omp_for_set_incr (gforo, i, t);
12188 	  t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
12189 	  OMP_CLAUSE_DECL (t) = v;
12190 	  OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
12191 	  gimple_omp_for_set_clauses (gforo, t);
12192 	}
12193       gimplify_seq_add_stmt (pre_p, gforo);
12194     }
12195   else
12196     gimplify_seq_add_stmt (pre_p, gfor);
12197 
12198   if (TREE_CODE (orig_for_stmt) == OMP_FOR)
12199     {
12200       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
12201       unsigned lastprivate_conditional = 0;
12202       while (ctx
12203 	     && (ctx->region_type == ORT_TARGET_DATA
12204 		 || ctx->region_type == ORT_TASKGROUP))
12205 	ctx = ctx->outer_context;
12206       if (ctx && (ctx->region_type & ORT_PARALLEL) != 0)
12207 	for (tree c = gimple_omp_for_clauses (gfor);
12208 	     c; c = OMP_CLAUSE_CHAIN (c))
12209 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12210 	      && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12211 	    ++lastprivate_conditional;
12212       if (lastprivate_conditional)
12213 	{
12214 	  struct omp_for_data fd;
12215 	  omp_extract_for_data (gfor, &fd, NULL);
12216 	  tree type = build_array_type_nelts (unsigned_type_for (fd.iter_type),
12217 					      lastprivate_conditional);
12218 	  tree var = create_tmp_var_raw (type);
12219 	  tree c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE__CONDTEMP_);
12220 	  OMP_CLAUSE_DECL (c) = var;
12221 	  OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12222 	  gimple_omp_for_set_clauses (gfor, c);
12223 	  omp_add_variable (ctx, var, GOVD_CONDTEMP | GOVD_SEEN);
12224 	}
12225     }
12226   else if (TREE_CODE (orig_for_stmt) == OMP_SIMD)
12227     {
12228       unsigned lastprivate_conditional = 0;
12229       for (tree c = gimple_omp_for_clauses (gfor); c; c = OMP_CLAUSE_CHAIN (c))
12230 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
12231 	    && OMP_CLAUSE_LASTPRIVATE_CONDITIONAL (c))
12232 	  ++lastprivate_conditional;
12233       if (lastprivate_conditional)
12234 	{
12235 	  struct omp_for_data fd;
12236 	  omp_extract_for_data (gfor, &fd, NULL);
12237 	  tree type = unsigned_type_for (fd.iter_type);
12238 	  while (lastprivate_conditional--)
12239 	    {
12240 	      tree c = build_omp_clause (UNKNOWN_LOCATION,
12241 					 OMP_CLAUSE__CONDTEMP_);
12242 	      OMP_CLAUSE_DECL (c) = create_tmp_var (type);
12243 	      OMP_CLAUSE_CHAIN (c) = gimple_omp_for_clauses (gfor);
12244 	      gimple_omp_for_set_clauses (gfor, c);
12245 	    }
12246 	}
12247     }
12248 
12249   if (ret != GS_ALL_DONE)
12250     return GS_ERROR;
12251   *expr_p = NULL_TREE;
12252   return GS_ALL_DONE;
12253 }
12254 
12255 /* Helper for gimplify_omp_loop, called through walk_tree.  */
12256 
12257 static tree
note_no_context_vars(tree * tp,int *,void * data)12258 note_no_context_vars (tree *tp, int *, void *data)
12259 {
12260   if (VAR_P (*tp)
12261       && DECL_CONTEXT (*tp) == NULL_TREE
12262       && !is_global_var (*tp))
12263     {
12264       vec<tree> *d = (vec<tree> *) data;
12265       d->safe_push (*tp);
12266       DECL_CONTEXT (*tp) = current_function_decl;
12267     }
12268   return NULL_TREE;
12269 }
12270 
12271 /* Gimplify the gross structure of an OMP_LOOP statement.  */
12272 
12273 static enum gimplify_status
gimplify_omp_loop(tree * expr_p,gimple_seq * pre_p)12274 gimplify_omp_loop (tree *expr_p, gimple_seq *pre_p)
12275 {
12276   tree for_stmt = *expr_p;
12277   tree clauses = OMP_FOR_CLAUSES (for_stmt);
12278   struct gimplify_omp_ctx *octx = gimplify_omp_ctxp;
12279   enum omp_clause_bind_kind kind = OMP_CLAUSE_BIND_THREAD;
12280   int i;
12281 
12282   /* If order is not present, the behavior is as if order(concurrent)
12283      appeared.  */
12284   tree order = omp_find_clause (clauses, OMP_CLAUSE_ORDER);
12285   if (order == NULL_TREE)
12286     {
12287       order = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_ORDER);
12288       OMP_CLAUSE_CHAIN (order) = clauses;
12289       OMP_FOR_CLAUSES (for_stmt) = clauses = order;
12290     }
12291 
12292   tree bind = omp_find_clause (clauses, OMP_CLAUSE_BIND);
12293   if (bind == NULL_TREE)
12294     {
12295       if (!flag_openmp) /* flag_openmp_simd */
12296 	;
12297       else if (octx && (octx->region_type & ORT_TEAMS) != 0)
12298 	kind = OMP_CLAUSE_BIND_TEAMS;
12299       else if (octx && (octx->region_type & ORT_PARALLEL) != 0)
12300 	kind = OMP_CLAUSE_BIND_PARALLEL;
12301       else
12302 	{
12303 	  for (; octx; octx = octx->outer_context)
12304 	    {
12305 	      if ((octx->region_type & ORT_ACC) != 0
12306 		  || octx->region_type == ORT_NONE
12307 		  || octx->region_type == ORT_IMPLICIT_TARGET)
12308 		continue;
12309 	      break;
12310 	    }
12311 	  if (octx == NULL && !in_omp_construct)
12312 	    error_at (EXPR_LOCATION (for_stmt),
12313 		      "%<bind%> clause not specified on a %<loop%> "
12314 		      "construct not nested inside another OpenMP construct");
12315 	}
12316       bind = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_BIND);
12317       OMP_CLAUSE_CHAIN (bind) = clauses;
12318       OMP_CLAUSE_BIND_KIND (bind) = kind;
12319       OMP_FOR_CLAUSES (for_stmt) = bind;
12320     }
12321   else
12322     switch (OMP_CLAUSE_BIND_KIND (bind))
12323       {
12324       case OMP_CLAUSE_BIND_THREAD:
12325 	break;
12326       case OMP_CLAUSE_BIND_PARALLEL:
12327 	if (!flag_openmp) /* flag_openmp_simd */
12328 	  {
12329 	    OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12330 	    break;
12331 	  }
12332 	for (; octx; octx = octx->outer_context)
12333 	  if (octx->region_type == ORT_SIMD
12334 	      && omp_find_clause (octx->clauses, OMP_CLAUSE_BIND) == NULL_TREE)
12335 	    {
12336 	      error_at (EXPR_LOCATION (for_stmt),
12337 			"%<bind(parallel)%> on a %<loop%> construct nested "
12338 			"inside %<simd%> construct");
12339 	      OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12340 	      break;
12341 	    }
12342 	kind = OMP_CLAUSE_BIND_PARALLEL;
12343 	break;
12344       case OMP_CLAUSE_BIND_TEAMS:
12345 	if (!flag_openmp) /* flag_openmp_simd */
12346 	  {
12347 	    OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12348 	    break;
12349 	  }
12350 	if ((octx
12351 	     && octx->region_type != ORT_IMPLICIT_TARGET
12352 	     && octx->region_type != ORT_NONE
12353 	     && (octx->region_type & ORT_TEAMS) == 0)
12354 	    || in_omp_construct)
12355 	  {
12356 	    error_at (EXPR_LOCATION (for_stmt),
12357 		      "%<bind(teams)%> on a %<loop%> region not strictly "
12358 		      "nested inside of a %<teams%> region");
12359 	    OMP_CLAUSE_BIND_KIND (bind) = OMP_CLAUSE_BIND_THREAD;
12360 	    break;
12361 	  }
12362 	kind = OMP_CLAUSE_BIND_TEAMS;
12363 	break;
12364       default:
12365 	gcc_unreachable ();
12366       }
12367 
12368   for (tree *pc = &OMP_FOR_CLAUSES (for_stmt); *pc; )
12369     switch (OMP_CLAUSE_CODE (*pc))
12370       {
12371       case OMP_CLAUSE_REDUCTION:
12372 	if (OMP_CLAUSE_REDUCTION_INSCAN (*pc))
12373 	  {
12374 	    error_at (OMP_CLAUSE_LOCATION (*pc),
12375 		      "%<inscan%> %<reduction%> clause on "
12376 		      "%qs construct", "loop");
12377 	    OMP_CLAUSE_REDUCTION_INSCAN (*pc) = 0;
12378 	  }
12379 	if (OMP_CLAUSE_REDUCTION_TASK (*pc))
12380 	  {
12381 	    error_at (OMP_CLAUSE_LOCATION (*pc),
12382 		      "invalid %<task%> reduction modifier on construct "
12383 		      "other than %<parallel%>, %<for%> or %<sections%>");
12384 	    OMP_CLAUSE_REDUCTION_TASK (*pc) = 0;
12385 	  }
12386 	pc = &OMP_CLAUSE_CHAIN (*pc);
12387 	break;
12388       case OMP_CLAUSE_LASTPRIVATE:
12389 	for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12390 	  {
12391 	    tree t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
12392 	    gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
12393 	    if (OMP_CLAUSE_DECL (*pc) == TREE_OPERAND (t, 0))
12394 	      break;
12395 	    if (OMP_FOR_ORIG_DECLS (for_stmt)
12396 		&& TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12397 					    i)) == TREE_LIST
12398 		&& TREE_PURPOSE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt),
12399 					       i)))
12400 	      {
12401 		tree orig = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12402 		if (OMP_CLAUSE_DECL (*pc) == TREE_PURPOSE (orig))
12403 		  break;
12404 	      }
12405 	  }
12406 	if (i == TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)))
12407 	  {
12408 	    error_at (OMP_CLAUSE_LOCATION (*pc),
12409 		      "%<lastprivate%> clause on a %<loop%> construct refers "
12410 		      "to a variable %qD which is not the loop iterator",
12411 		      OMP_CLAUSE_DECL (*pc));
12412 	    *pc = OMP_CLAUSE_CHAIN (*pc);
12413 	    break;
12414 	  }
12415 	pc = &OMP_CLAUSE_CHAIN (*pc);
12416 	break;
12417       default:
12418 	pc = &OMP_CLAUSE_CHAIN (*pc);
12419 	break;
12420     }
12421 
12422   TREE_SET_CODE (for_stmt, OMP_SIMD);
12423 
12424   int last;
12425   switch (kind)
12426     {
12427     case OMP_CLAUSE_BIND_THREAD: last = 0; break;
12428     case OMP_CLAUSE_BIND_PARALLEL: last = 1; break;
12429     case OMP_CLAUSE_BIND_TEAMS: last = 2; break;
12430     }
12431   for (int pass = 1; pass <= last; pass++)
12432     {
12433       if (pass == 2)
12434 	{
12435 	  tree bind = build3 (BIND_EXPR, void_type_node, NULL, NULL,
12436 			      make_node (BLOCK));
12437 	  append_to_statement_list (*expr_p, &BIND_EXPR_BODY (bind));
12438 	  *expr_p = make_node (OMP_PARALLEL);
12439 	  TREE_TYPE (*expr_p) = void_type_node;
12440 	  OMP_PARALLEL_BODY (*expr_p) = bind;
12441 	  OMP_PARALLEL_COMBINED (*expr_p) = 1;
12442 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (for_stmt));
12443 	  tree *pc = &OMP_PARALLEL_CLAUSES (*expr_p);
12444 	  for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
12445 	    if (OMP_FOR_ORIG_DECLS (for_stmt)
12446 		&& (TREE_CODE (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i))
12447 		    == TREE_LIST))
12448 	      {
12449 		tree elt = TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i);
12450 		if (TREE_PURPOSE (elt) && TREE_VALUE (elt))
12451 		  {
12452 		    *pc = build_omp_clause (UNKNOWN_LOCATION,
12453 					    OMP_CLAUSE_FIRSTPRIVATE);
12454 		    OMP_CLAUSE_DECL (*pc) = TREE_VALUE (elt);
12455 		    pc = &OMP_CLAUSE_CHAIN (*pc);
12456 		  }
12457 	      }
12458 	}
12459       tree t = make_node (pass == 2 ? OMP_DISTRIBUTE : OMP_FOR);
12460       tree *pc = &OMP_FOR_CLAUSES (t);
12461       TREE_TYPE (t) = void_type_node;
12462       OMP_FOR_BODY (t) = *expr_p;
12463       SET_EXPR_LOCATION (t, EXPR_LOCATION (for_stmt));
12464       for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
12465 	switch (OMP_CLAUSE_CODE (c))
12466 	  {
12467 	  case OMP_CLAUSE_BIND:
12468 	  case OMP_CLAUSE_ORDER:
12469 	  case OMP_CLAUSE_COLLAPSE:
12470 	    *pc = copy_node (c);
12471 	    pc = &OMP_CLAUSE_CHAIN (*pc);
12472 	    break;
12473 	  case OMP_CLAUSE_PRIVATE:
12474 	  case OMP_CLAUSE_FIRSTPRIVATE:
12475 	    /* Only needed on innermost.  */
12476 	    break;
12477 	  case OMP_CLAUSE_LASTPRIVATE:
12478 	    if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c) && pass != last)
12479 	      {
12480 		*pc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
12481 					OMP_CLAUSE_FIRSTPRIVATE);
12482 		OMP_CLAUSE_DECL (*pc) = OMP_CLAUSE_DECL (c);
12483 		lang_hooks.decls.omp_finish_clause (*pc, NULL);
12484 		pc = &OMP_CLAUSE_CHAIN (*pc);
12485 	      }
12486 	    *pc = copy_node (c);
12487 	    OMP_CLAUSE_LASTPRIVATE_STMT (*pc) = NULL_TREE;
12488 	    TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12489 	    if (OMP_CLAUSE_LASTPRIVATE_LOOP_IV (c))
12490 	      {
12491 		if (pass != last)
12492 		  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (*pc) = 1;
12493 		else
12494 		  lang_hooks.decls.omp_finish_clause (*pc, NULL);
12495 		OMP_CLAUSE_LASTPRIVATE_LOOP_IV (*pc) = 0;
12496 	      }
12497 	    pc = &OMP_CLAUSE_CHAIN (*pc);
12498 	    break;
12499 	  case OMP_CLAUSE_REDUCTION:
12500 	    *pc = copy_node (c);
12501 	    OMP_CLAUSE_DECL (*pc) = unshare_expr (OMP_CLAUSE_DECL (c));
12502 	    TREE_TYPE (*pc) = unshare_expr (TREE_TYPE (c));
12503 	    if (OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc))
12504 	      {
12505 		auto_vec<tree> no_context_vars;
12506 		int walk_subtrees = 0;
12507 		note_no_context_vars (&OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
12508 				      &walk_subtrees, &no_context_vars);
12509 		if (tree p = OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c))
12510 		  note_no_context_vars (&p, &walk_subtrees, &no_context_vars);
12511 		walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_INIT (c),
12512 					      note_no_context_vars,
12513 					      &no_context_vars);
12514 		walk_tree_without_duplicates (&OMP_CLAUSE_REDUCTION_MERGE (c),
12515 					      note_no_context_vars,
12516 					      &no_context_vars);
12517 
12518 		OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc)
12519 		  = copy_node (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c));
12520 		if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12521 		  OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc)
12522 		    = copy_node (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c));
12523 
12524 		hash_map<tree, tree> decl_map;
12525 		decl_map.put (OMP_CLAUSE_DECL (c), OMP_CLAUSE_DECL (c));
12526 		decl_map.put (OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
12527 			      OMP_CLAUSE_REDUCTION_PLACEHOLDER (*pc));
12528 		if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc))
12529 		  decl_map.put (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
12530 				OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (*pc));
12531 
12532 		copy_body_data id;
12533 		memset (&id, 0, sizeof (id));
12534 		id.src_fn = current_function_decl;
12535 		id.dst_fn = current_function_decl;
12536 		id.src_cfun = cfun;
12537 		id.decl_map = &decl_map;
12538 		id.copy_decl = copy_decl_no_change;
12539 		id.transform_call_graph_edges = CB_CGE_DUPLICATE;
12540 		id.transform_new_cfg = true;
12541 		id.transform_return_to_modify = false;
12542 		id.transform_lang_insert_block = NULL;
12543 		id.eh_lp_nr = 0;
12544 		walk_tree (&OMP_CLAUSE_REDUCTION_INIT (*pc), copy_tree_body_r,
12545 			   &id, NULL);
12546 		walk_tree (&OMP_CLAUSE_REDUCTION_MERGE (*pc), copy_tree_body_r,
12547 			   &id, NULL);
12548 
12549 		tree d;
12550 		unsigned i;
12551 		FOR_EACH_VEC_ELT (no_context_vars, i, d)
12552 		  {
12553 		    DECL_CONTEXT (d) = NULL_TREE;
12554 		    DECL_CONTEXT (*decl_map.get (d)) = NULL_TREE;
12555 		  }
12556 	      }
12557 	    else
12558 	      {
12559 		OMP_CLAUSE_REDUCTION_INIT (*pc)
12560 		  = unshare_expr (OMP_CLAUSE_REDUCTION_INIT (c));
12561 		OMP_CLAUSE_REDUCTION_MERGE (*pc)
12562 		  = unshare_expr (OMP_CLAUSE_REDUCTION_MERGE (c));
12563 	      }
12564 	    pc = &OMP_CLAUSE_CHAIN (*pc);
12565 	    break;
12566 	  default:
12567 	    gcc_unreachable ();
12568 	  }
12569       *pc = NULL_TREE;
12570       *expr_p = t;
12571     }
12572   return gimplify_omp_for (expr_p, pre_p);
12573 }
12574 
12575 
12576 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
12577    of OMP_TARGET's body.  */
12578 
12579 static tree
find_omp_teams(tree * tp,int * walk_subtrees,void *)12580 find_omp_teams (tree *tp, int *walk_subtrees, void *)
12581 {
12582   *walk_subtrees = 0;
12583   switch (TREE_CODE (*tp))
12584     {
12585     case OMP_TEAMS:
12586       return *tp;
12587     case BIND_EXPR:
12588     case STATEMENT_LIST:
12589       *walk_subtrees = 1;
12590       break;
12591     default:
12592       break;
12593     }
12594   return NULL_TREE;
12595 }
12596 
12597 /* Helper function of optimize_target_teams, determine if the expression
12598    can be computed safely before the target construct on the host.  */
12599 
12600 static tree
computable_teams_clause(tree * tp,int * walk_subtrees,void *)12601 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
12602 {
12603   splay_tree_node n;
12604 
12605   if (TYPE_P (*tp))
12606     {
12607       *walk_subtrees = 0;
12608       return NULL_TREE;
12609     }
12610   switch (TREE_CODE (*tp))
12611     {
12612     case VAR_DECL:
12613     case PARM_DECL:
12614     case RESULT_DECL:
12615       *walk_subtrees = 0;
12616       if (error_operand_p (*tp)
12617 	  || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
12618 	  || DECL_HAS_VALUE_EXPR_P (*tp)
12619 	  || DECL_THREAD_LOCAL_P (*tp)
12620 	  || TREE_SIDE_EFFECTS (*tp)
12621 	  || TREE_THIS_VOLATILE (*tp))
12622 	return *tp;
12623       if (is_global_var (*tp)
12624 	  && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
12625 	      || lookup_attribute ("omp declare target link",
12626 				   DECL_ATTRIBUTES (*tp))))
12627 	return *tp;
12628       if (VAR_P (*tp)
12629 	  && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
12630 	  && !is_global_var (*tp)
12631 	  && decl_function_context (*tp) == current_function_decl)
12632 	return *tp;
12633       n = splay_tree_lookup (gimplify_omp_ctxp->variables,
12634 			     (splay_tree_key) *tp);
12635       if (n == NULL)
12636 	{
12637 	  if (gimplify_omp_ctxp->defaultmap[GDMK_SCALAR] & GOVD_FIRSTPRIVATE)
12638 	    return NULL_TREE;
12639 	  return *tp;
12640 	}
12641       else if (n->value & GOVD_LOCAL)
12642 	return *tp;
12643       else if (n->value & GOVD_FIRSTPRIVATE)
12644 	return NULL_TREE;
12645       else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12646 	       == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
12647 	return NULL_TREE;
12648       return *tp;
12649     case INTEGER_CST:
12650       if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12651 	return *tp;
12652       return NULL_TREE;
12653     case TARGET_EXPR:
12654       if (TARGET_EXPR_INITIAL (*tp)
12655 	  || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
12656 	return *tp;
12657       return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
12658 				      walk_subtrees, NULL);
12659     /* Allow some reasonable subset of integral arithmetics.  */
12660     case PLUS_EXPR:
12661     case MINUS_EXPR:
12662     case MULT_EXPR:
12663     case TRUNC_DIV_EXPR:
12664     case CEIL_DIV_EXPR:
12665     case FLOOR_DIV_EXPR:
12666     case ROUND_DIV_EXPR:
12667     case TRUNC_MOD_EXPR:
12668     case CEIL_MOD_EXPR:
12669     case FLOOR_MOD_EXPR:
12670     case ROUND_MOD_EXPR:
12671     case RDIV_EXPR:
12672     case EXACT_DIV_EXPR:
12673     case MIN_EXPR:
12674     case MAX_EXPR:
12675     case LSHIFT_EXPR:
12676     case RSHIFT_EXPR:
12677     case BIT_IOR_EXPR:
12678     case BIT_XOR_EXPR:
12679     case BIT_AND_EXPR:
12680     case NEGATE_EXPR:
12681     case ABS_EXPR:
12682     case BIT_NOT_EXPR:
12683     case NON_LVALUE_EXPR:
12684     CASE_CONVERT:
12685       if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
12686 	return *tp;
12687       return NULL_TREE;
12688     /* And disallow anything else, except for comparisons.  */
12689     default:
12690       if (COMPARISON_CLASS_P (*tp))
12691 	return NULL_TREE;
12692       return *tp;
12693     }
12694 }
12695 
12696 /* Try to determine if the num_teams and/or thread_limit expressions
12697    can have their values determined already before entering the
12698    target construct.
12699    INTEGER_CSTs trivially are,
12700    integral decls that are firstprivate (explicitly or implicitly)
12701    or explicitly map(always, to:) or map(always, tofrom:) on the target
12702    region too, and expressions involving simple arithmetics on those
12703    too, function calls are not ok, dereferencing something neither etc.
12704    Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
12705    EXPR based on what we find:
12706    0 stands for clause not specified at all, use implementation default
12707    -1 stands for value that can't be determined easily before entering
12708       the target construct.
12709    If teams construct is not present at all, use 1 for num_teams
12710    and 0 for thread_limit (only one team is involved, and the thread
12711    limit is implementation defined.  */
12712 
12713 static void
optimize_target_teams(tree target,gimple_seq * pre_p)12714 optimize_target_teams (tree target, gimple_seq *pre_p)
12715 {
12716   tree body = OMP_BODY (target);
12717   tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
12718   tree num_teams = integer_zero_node;
12719   tree thread_limit = integer_zero_node;
12720   location_t num_teams_loc = EXPR_LOCATION (target);
12721   location_t thread_limit_loc = EXPR_LOCATION (target);
12722   tree c, *p, expr;
12723   struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
12724 
12725   if (teams == NULL_TREE)
12726     num_teams = integer_one_node;
12727   else
12728     for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
12729       {
12730 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
12731 	  {
12732 	    p = &num_teams;
12733 	    num_teams_loc = OMP_CLAUSE_LOCATION (c);
12734 	  }
12735 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
12736 	  {
12737 	    p = &thread_limit;
12738 	    thread_limit_loc = OMP_CLAUSE_LOCATION (c);
12739 	  }
12740 	else
12741 	  continue;
12742 	expr = OMP_CLAUSE_OPERAND (c, 0);
12743 	if (TREE_CODE (expr) == INTEGER_CST)
12744 	  {
12745 	    *p = expr;
12746 	    continue;
12747 	  }
12748 	if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
12749 	  {
12750 	    *p = integer_minus_one_node;
12751 	    continue;
12752 	  }
12753 	*p = expr;
12754 	gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
12755 	if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
12756 	    == GS_ERROR)
12757 	  {
12758 	    gimplify_omp_ctxp = target_ctx;
12759 	    *p = integer_minus_one_node;
12760 	    continue;
12761 	  }
12762 	gimplify_omp_ctxp = target_ctx;
12763 	if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
12764 	  OMP_CLAUSE_OPERAND (c, 0) = *p;
12765       }
12766   c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
12767   OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
12768   OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12769   OMP_TARGET_CLAUSES (target) = c;
12770   c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
12771   OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
12772   OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
12773   OMP_TARGET_CLAUSES (target) = c;
12774 }
12775 
12776 /* Gimplify the gross structure of several OMP constructs.  */
12777 
12778 static void
gimplify_omp_workshare(tree * expr_p,gimple_seq * pre_p)12779 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
12780 {
12781   tree expr = *expr_p;
12782   gimple *stmt;
12783   gimple_seq body = NULL;
12784   enum omp_region_type ort;
12785 
12786   switch (TREE_CODE (expr))
12787     {
12788     case OMP_SECTIONS:
12789     case OMP_SINGLE:
12790       ort = ORT_WORKSHARE;
12791       break;
12792     case OMP_TARGET:
12793       ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
12794       break;
12795     case OACC_KERNELS:
12796       ort = ORT_ACC_KERNELS;
12797       break;
12798     case OACC_PARALLEL:
12799       ort = ORT_ACC_PARALLEL;
12800       break;
12801     case OACC_SERIAL:
12802       ort = ORT_ACC_SERIAL;
12803       break;
12804     case OACC_DATA:
12805       ort = ORT_ACC_DATA;
12806       break;
12807     case OMP_TARGET_DATA:
12808       ort = ORT_TARGET_DATA;
12809       break;
12810     case OMP_TEAMS:
12811       ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
12812       if (gimplify_omp_ctxp == NULL
12813 	  || gimplify_omp_ctxp->region_type == ORT_IMPLICIT_TARGET)
12814 	ort = (enum omp_region_type) (ort | ORT_HOST_TEAMS);
12815       break;
12816     case OACC_HOST_DATA:
12817       ort = ORT_ACC_HOST_DATA;
12818       break;
12819     default:
12820       gcc_unreachable ();
12821     }
12822 
12823   bool save_in_omp_construct = in_omp_construct;
12824   if ((ort & ORT_ACC) == 0)
12825     in_omp_construct = false;
12826   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
12827 			     TREE_CODE (expr));
12828   if (TREE_CODE (expr) == OMP_TARGET)
12829     optimize_target_teams (expr, pre_p);
12830   if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0
12831       || (ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12832     {
12833       push_gimplify_context ();
12834       gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
12835       if (gimple_code (g) == GIMPLE_BIND)
12836 	pop_gimplify_context (g);
12837       else
12838 	pop_gimplify_context (NULL);
12839       if ((ort & ORT_TARGET_DATA) != 0)
12840 	{
12841 	  enum built_in_function end_ix;
12842 	  switch (TREE_CODE (expr))
12843 	    {
12844 	    case OACC_DATA:
12845 	    case OACC_HOST_DATA:
12846 	      end_ix = BUILT_IN_GOACC_DATA_END;
12847 	      break;
12848 	    case OMP_TARGET_DATA:
12849 	      end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
12850 	      break;
12851 	    default:
12852 	      gcc_unreachable ();
12853 	    }
12854 	  tree fn = builtin_decl_explicit (end_ix);
12855 	  g = gimple_build_call (fn, 0);
12856 	  gimple_seq cleanup = NULL;
12857 	  gimple_seq_add_stmt (&cleanup, g);
12858 	  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12859 	  body = NULL;
12860 	  gimple_seq_add_stmt (&body, g);
12861 	}
12862     }
12863   else
12864     gimplify_and_add (OMP_BODY (expr), &body);
12865   gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
12866 			       TREE_CODE (expr));
12867   in_omp_construct = save_in_omp_construct;
12868 
12869   switch (TREE_CODE (expr))
12870     {
12871     case OACC_DATA:
12872       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
12873 				      OMP_CLAUSES (expr));
12874       break;
12875     case OACC_HOST_DATA:
12876       if (omp_find_clause (OMP_CLAUSES (expr), OMP_CLAUSE_IF_PRESENT))
12877 	{
12878 	  for (tree c = OMP_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12879 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_USE_DEVICE_PTR)
12880 	      OMP_CLAUSE_USE_DEVICE_PTR_IF_PRESENT (c) = 1;
12881 	}
12882 
12883       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
12884 				      OMP_CLAUSES (expr));
12885       break;
12886     case OACC_KERNELS:
12887       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
12888 				      OMP_CLAUSES (expr));
12889       break;
12890     case OACC_PARALLEL:
12891       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
12892 				      OMP_CLAUSES (expr));
12893       break;
12894     case OACC_SERIAL:
12895       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_SERIAL,
12896 				      OMP_CLAUSES (expr));
12897       break;
12898     case OMP_SECTIONS:
12899       stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
12900       break;
12901     case OMP_SINGLE:
12902       stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
12903       break;
12904     case OMP_TARGET:
12905       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
12906 				      OMP_CLAUSES (expr));
12907       break;
12908     case OMP_TARGET_DATA:
12909       /* Put use_device_{ptr,addr} clauses last, as map clauses are supposed
12910 	 to be evaluated before the use_device_{ptr,addr} clauses if they
12911 	 refer to the same variables.  */
12912       {
12913 	tree use_device_clauses;
12914 	tree *pc, *uc = &use_device_clauses;
12915 	for (pc = &OMP_CLAUSES (expr); *pc; )
12916 	  if (OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_PTR
12917 	      || OMP_CLAUSE_CODE (*pc) == OMP_CLAUSE_USE_DEVICE_ADDR)
12918 	    {
12919 	      *uc = *pc;
12920 	      *pc = OMP_CLAUSE_CHAIN (*pc);
12921 	      uc = &OMP_CLAUSE_CHAIN (*uc);
12922 	    }
12923 	  else
12924 	    pc = &OMP_CLAUSE_CHAIN (*pc);
12925 	*uc = NULL_TREE;
12926 	*pc = use_device_clauses;
12927 	stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
12928 					OMP_CLAUSES (expr));
12929       }
12930       break;
12931     case OMP_TEAMS:
12932       stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
12933       if ((ort & ORT_HOST_TEAMS) == ORT_HOST_TEAMS)
12934 	gimple_omp_teams_set_host (as_a <gomp_teams *> (stmt), true);
12935       break;
12936     default:
12937       gcc_unreachable ();
12938     }
12939 
12940   gimplify_seq_add_stmt (pre_p, stmt);
12941   *expr_p = NULL_TREE;
12942 }
12943 
12944 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
12945    target update constructs.  */
12946 
12947 static void
gimplify_omp_target_update(tree * expr_p,gimple_seq * pre_p)12948 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
12949 {
12950   tree expr = *expr_p;
12951   int kind;
12952   gomp_target *stmt;
12953   enum omp_region_type ort = ORT_WORKSHARE;
12954 
12955   switch (TREE_CODE (expr))
12956     {
12957     case OACC_ENTER_DATA:
12958     case OACC_EXIT_DATA:
12959       kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
12960       ort = ORT_ACC;
12961       break;
12962     case OACC_UPDATE:
12963       kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
12964       ort = ORT_ACC;
12965       break;
12966     case OMP_TARGET_UPDATE:
12967       kind = GF_OMP_TARGET_KIND_UPDATE;
12968       break;
12969     case OMP_TARGET_ENTER_DATA:
12970       kind = GF_OMP_TARGET_KIND_ENTER_DATA;
12971       break;
12972     case OMP_TARGET_EXIT_DATA:
12973       kind = GF_OMP_TARGET_KIND_EXIT_DATA;
12974       break;
12975     default:
12976       gcc_unreachable ();
12977     }
12978   gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
12979 			     ort, TREE_CODE (expr));
12980   gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
12981 			       TREE_CODE (expr));
12982   if (TREE_CODE (expr) == OACC_UPDATE
12983       && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
12984 			  OMP_CLAUSE_IF_PRESENT))
12985     {
12986       /* The runtime uses GOMP_MAP_{TO,FROM} to denote the if_present
12987 	 clause.  */
12988       for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
12989 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
12990 	  switch (OMP_CLAUSE_MAP_KIND (c))
12991 	    {
12992 	    case GOMP_MAP_FORCE_TO:
12993 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TO);
12994 	      break;
12995 	    case GOMP_MAP_FORCE_FROM:
12996 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FROM);
12997 	      break;
12998 	    default:
12999 	      break;
13000 	    }
13001     }
13002   else if (TREE_CODE (expr) == OACC_EXIT_DATA
13003 	   && omp_find_clause (OMP_STANDALONE_CLAUSES (expr),
13004 			       OMP_CLAUSE_FINALIZE))
13005     {
13006       /* Use GOMP_MAP_DELETE/GOMP_MAP_FORCE_FROM to denote "finalize"
13007 	 semantics.  */
13008       bool have_clause = false;
13009       for (tree c = OMP_STANDALONE_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13010 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP)
13011 	  switch (OMP_CLAUSE_MAP_KIND (c))
13012 	    {
13013 	    case GOMP_MAP_FROM:
13014 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_FROM);
13015 	      have_clause = true;
13016 	      break;
13017 	    case GOMP_MAP_RELEASE:
13018 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_DELETE);
13019 	      have_clause = true;
13020 	      break;
13021 	    case GOMP_MAP_POINTER:
13022 	    case GOMP_MAP_TO_PSET:
13023 	      /* TODO PR92929: we may see these here, but they'll always follow
13024 		 one of the clauses above, and will be handled by libgomp as
13025 		 one group, so no handling required here.  */
13026 	      gcc_assert (have_clause);
13027 	      break;
13028 	    case GOMP_MAP_DETACH:
13029 	      OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_FORCE_DETACH);
13030 	      have_clause = false;
13031 	      break;
13032 	    case GOMP_MAP_STRUCT:
13033 	      have_clause = false;
13034 	      break;
13035 	    default:
13036 	      gcc_unreachable ();
13037 	    }
13038     }
13039   stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
13040 
13041   gimplify_seq_add_stmt (pre_p, stmt);
13042   *expr_p = NULL_TREE;
13043 }
13044 
13045 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
13046    stabilized the lhs of the atomic operation as *ADDR.  Return true if
13047    EXPR is this stabilized form.  */
13048 
13049 static bool
goa_lhs_expr_p(tree expr,tree addr)13050 goa_lhs_expr_p (tree expr, tree addr)
13051 {
13052   /* Also include casts to other type variants.  The C front end is fond
13053      of adding these for e.g. volatile variables.  This is like
13054      STRIP_TYPE_NOPS but includes the main variant lookup.  */
13055   STRIP_USELESS_TYPE_CONVERSION (expr);
13056 
13057   if (TREE_CODE (expr) == INDIRECT_REF)
13058     {
13059       expr = TREE_OPERAND (expr, 0);
13060       while (expr != addr
13061 	     && (CONVERT_EXPR_P (expr)
13062 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
13063 	     && TREE_CODE (expr) == TREE_CODE (addr)
13064 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
13065 	{
13066 	  expr = TREE_OPERAND (expr, 0);
13067 	  addr = TREE_OPERAND (addr, 0);
13068 	}
13069       if (expr == addr)
13070 	return true;
13071       return (TREE_CODE (addr) == ADDR_EXPR
13072 	      && TREE_CODE (expr) == ADDR_EXPR
13073 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
13074     }
13075   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
13076     return true;
13077   return false;
13078 }
13079 
13080 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
13081    expression does not involve the lhs, evaluate it into a temporary.
13082    Return 1 if the lhs appeared as a subexpression, 0 if it did not,
13083    or -1 if an error was encountered.  */
13084 
13085 static int
goa_stabilize_expr(tree * expr_p,gimple_seq * pre_p,tree lhs_addr,tree lhs_var)13086 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
13087 		    tree lhs_var)
13088 {
13089   tree expr = *expr_p;
13090   int saw_lhs;
13091 
13092   if (goa_lhs_expr_p (expr, lhs_addr))
13093     {
13094       *expr_p = lhs_var;
13095       return 1;
13096     }
13097   if (is_gimple_val (expr))
13098     return 0;
13099 
13100   saw_lhs = 0;
13101   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
13102     {
13103     case tcc_binary:
13104     case tcc_comparison:
13105       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
13106 				     lhs_var);
13107       /* FALLTHRU */
13108     case tcc_unary:
13109       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
13110 				     lhs_var);
13111       break;
13112     case tcc_expression:
13113       switch (TREE_CODE (expr))
13114 	{
13115 	case TRUTH_ANDIF_EXPR:
13116 	case TRUTH_ORIF_EXPR:
13117 	case TRUTH_AND_EXPR:
13118 	case TRUTH_OR_EXPR:
13119 	case TRUTH_XOR_EXPR:
13120 	case BIT_INSERT_EXPR:
13121 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
13122 					 lhs_addr, lhs_var);
13123 	  /* FALLTHRU */
13124 	case TRUTH_NOT_EXPR:
13125 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13126 					 lhs_addr, lhs_var);
13127 	  break;
13128 	case COMPOUND_EXPR:
13129 	  /* Break out any preevaluations from cp_build_modify_expr.  */
13130 	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
13131 	       expr = TREE_OPERAND (expr, 1))
13132 	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
13133 	  *expr_p = expr;
13134 	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
13135 	default:
13136 	  break;
13137 	}
13138       break;
13139     case tcc_reference:
13140       if (TREE_CODE (expr) == BIT_FIELD_REF)
13141 	saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
13142 				       lhs_addr, lhs_var);
13143       break;
13144     default:
13145       break;
13146     }
13147 
13148   if (saw_lhs == 0)
13149     {
13150       enum gimplify_status gs;
13151       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
13152       if (gs != GS_ALL_DONE)
13153 	saw_lhs = -1;
13154     }
13155 
13156   return saw_lhs;
13157 }
13158 
13159 /* Gimplify an OMP_ATOMIC statement.  */
13160 
13161 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,gimple_seq * pre_p)13162 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
13163 {
13164   tree addr = TREE_OPERAND (*expr_p, 0);
13165   tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
13166 	     ? NULL : TREE_OPERAND (*expr_p, 1);
13167   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
13168   tree tmp_load;
13169   gomp_atomic_load *loadstmt;
13170   gomp_atomic_store *storestmt;
13171 
13172   tmp_load = create_tmp_reg (type);
13173   if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
13174     return GS_ERROR;
13175 
13176   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
13177       != GS_ALL_DONE)
13178     return GS_ERROR;
13179 
13180   loadstmt = gimple_build_omp_atomic_load (tmp_load, addr,
13181 					   OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13182   gimplify_seq_add_stmt (pre_p, loadstmt);
13183   if (rhs)
13184     {
13185       /* BIT_INSERT_EXPR is not valid for non-integral bitfield
13186 	 representatives.  Use BIT_FIELD_REF on the lhs instead.  */
13187       if (TREE_CODE (rhs) == BIT_INSERT_EXPR
13188 	  && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
13189 	{
13190 	  tree bitpos = TREE_OPERAND (rhs, 2);
13191 	  tree op1 = TREE_OPERAND (rhs, 1);
13192 	  tree bitsize;
13193 	  tree tmp_store = tmp_load;
13194 	  if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
13195 	    tmp_store = get_initialized_tmp_var (tmp_load, pre_p);
13196 	  if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
13197 	    bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
13198 	  else
13199 	    bitsize = TYPE_SIZE (TREE_TYPE (op1));
13200 	  gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
13201 	  tree t = build2_loc (EXPR_LOCATION (rhs),
13202 			       MODIFY_EXPR, void_type_node,
13203 			       build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
13204 					   TREE_TYPE (op1), tmp_store, bitsize,
13205 					   bitpos), op1);
13206 	  gimplify_and_add (t, pre_p);
13207 	  rhs = tmp_store;
13208 	}
13209       if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
13210 	  != GS_ALL_DONE)
13211 	return GS_ERROR;
13212     }
13213 
13214   if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
13215     rhs = tmp_load;
13216   storestmt
13217     = gimple_build_omp_atomic_store (rhs, OMP_ATOMIC_MEMORY_ORDER (*expr_p));
13218   gimplify_seq_add_stmt (pre_p, storestmt);
13219   switch (TREE_CODE (*expr_p))
13220     {
13221     case OMP_ATOMIC_READ:
13222     case OMP_ATOMIC_CAPTURE_OLD:
13223       *expr_p = tmp_load;
13224       gimple_omp_atomic_set_need_value (loadstmt);
13225       break;
13226     case OMP_ATOMIC_CAPTURE_NEW:
13227       *expr_p = rhs;
13228       gimple_omp_atomic_set_need_value (storestmt);
13229       break;
13230     default:
13231       *expr_p = NULL;
13232       break;
13233     }
13234 
13235   return GS_ALL_DONE;
13236 }
13237 
13238 /* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
13239    body, and adding some EH bits.  */
13240 
13241 static enum gimplify_status
gimplify_transaction(tree * expr_p,gimple_seq * pre_p)13242 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
13243 {
13244   tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
13245   gimple *body_stmt;
13246   gtransaction *trans_stmt;
13247   gimple_seq body = NULL;
13248   int subcode = 0;
13249 
13250   /* Wrap the transaction body in a BIND_EXPR so we have a context
13251      where to put decls for OMP.  */
13252   if (TREE_CODE (tbody) != BIND_EXPR)
13253     {
13254       tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
13255       TREE_SIDE_EFFECTS (bind) = 1;
13256       SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
13257       TRANSACTION_EXPR_BODY (expr) = bind;
13258     }
13259 
13260   push_gimplify_context ();
13261   temp = voidify_wrapper_expr (*expr_p, NULL);
13262 
13263   body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
13264   pop_gimplify_context (body_stmt);
13265 
13266   trans_stmt = gimple_build_transaction (body);
13267   if (TRANSACTION_EXPR_OUTER (expr))
13268     subcode = GTMA_IS_OUTER;
13269   else if (TRANSACTION_EXPR_RELAXED (expr))
13270     subcode = GTMA_IS_RELAXED;
13271   gimple_transaction_set_subcode (trans_stmt, subcode);
13272 
13273   gimplify_seq_add_stmt (pre_p, trans_stmt);
13274 
13275   if (temp)
13276     {
13277       *expr_p = temp;
13278       return GS_OK;
13279     }
13280 
13281   *expr_p = NULL_TREE;
13282   return GS_ALL_DONE;
13283 }
13284 
13285 /* Gimplify an OMP_ORDERED construct.  EXPR is the tree version.  BODY
13286    is the OMP_BODY of the original EXPR (which has already been
13287    gimplified so it's not present in the EXPR).
13288 
13289    Return the gimplified GIMPLE_OMP_ORDERED tuple.  */
13290 
13291 static gimple *
gimplify_omp_ordered(tree expr,gimple_seq body)13292 gimplify_omp_ordered (tree expr, gimple_seq body)
13293 {
13294   tree c, decls;
13295   int failures = 0;
13296   unsigned int i;
13297   tree source_c = NULL_TREE;
13298   tree sink_c = NULL_TREE;
13299 
13300   if (gimplify_omp_ctxp)
13301     {
13302       for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
13303 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13304 	    && gimplify_omp_ctxp->loop_iter_var.is_empty ()
13305 	    && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
13306 		|| OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
13307 	  {
13308 	    error_at (OMP_CLAUSE_LOCATION (c),
13309 		      "%<ordered%> construct with %<depend%> clause must be "
13310 		      "closely nested inside a loop with %<ordered%> clause "
13311 		      "with a parameter");
13312 	    failures++;
13313 	  }
13314 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13315 		 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
13316 	  {
13317 	    bool fail = false;
13318 	    for (decls = OMP_CLAUSE_DECL (c), i = 0;
13319 		 decls && TREE_CODE (decls) == TREE_LIST;
13320 		 decls = TREE_CHAIN (decls), ++i)
13321 	      if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
13322 		continue;
13323 	      else if (TREE_VALUE (decls)
13324 		       != gimplify_omp_ctxp->loop_iter_var[2 * i])
13325 		{
13326 		  error_at (OMP_CLAUSE_LOCATION (c),
13327 			    "variable %qE is not an iteration "
13328 			    "of outermost loop %d, expected %qE",
13329 			    TREE_VALUE (decls), i + 1,
13330 			    gimplify_omp_ctxp->loop_iter_var[2 * i]);
13331 		  fail = true;
13332 		  failures++;
13333 		}
13334 	      else
13335 		TREE_VALUE (decls)
13336 		  = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
13337 	    if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
13338 	      {
13339 		error_at (OMP_CLAUSE_LOCATION (c),
13340 			  "number of variables in %<depend%> clause with "
13341 			  "%<sink%> modifier does not match number of "
13342 			  "iteration variables");
13343 		failures++;
13344 	      }
13345 	    sink_c = c;
13346 	  }
13347 	else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
13348 		 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
13349 	  {
13350 	    if (source_c)
13351 	      {
13352 		error_at (OMP_CLAUSE_LOCATION (c),
13353 			  "more than one %<depend%> clause with %<source%> "
13354 			  "modifier on an %<ordered%> construct");
13355 		failures++;
13356 	      }
13357 	    else
13358 	      source_c = c;
13359 	  }
13360     }
13361   if (source_c && sink_c)
13362     {
13363       error_at (OMP_CLAUSE_LOCATION (source_c),
13364 		"%<depend%> clause with %<source%> modifier specified "
13365 		"together with %<depend%> clauses with %<sink%> modifier "
13366 		"on the same construct");
13367       failures++;
13368     }
13369 
13370   if (failures)
13371     return gimple_build_nop ();
13372   return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
13373 }
13374 
13375 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
13376    expression produces a value to be used as an operand inside a GIMPLE
13377    statement, the value will be stored back in *EXPR_P.  This value will
13378    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
13379    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
13380    emitted in PRE_P and POST_P.
13381 
13382    Additionally, this process may overwrite parts of the input
13383    expression during gimplification.  Ideally, it should be
13384    possible to do non-destructive gimplification.
13385 
13386    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
13387       the expression needs to evaluate to a value to be used as
13388       an operand in a GIMPLE statement, this value will be stored in
13389       *EXPR_P on exit.  This happens when the caller specifies one
13390       of fb_lvalue or fb_rvalue fallback flags.
13391 
13392    PRE_P will contain the sequence of GIMPLE statements corresponding
13393        to the evaluation of EXPR and all the side-effects that must
13394        be executed before the main expression.  On exit, the last
13395        statement of PRE_P is the core statement being gimplified.  For
13396        instance, when gimplifying 'if (++a)' the last statement in
13397        PRE_P will be 'if (t.1)' where t.1 is the result of
13398        pre-incrementing 'a'.
13399 
13400    POST_P will contain the sequence of GIMPLE statements corresponding
13401        to the evaluation of all the side-effects that must be executed
13402        after the main expression.  If this is NULL, the post
13403        side-effects are stored at the end of PRE_P.
13404 
13405        The reason why the output is split in two is to handle post
13406        side-effects explicitly.  In some cases, an expression may have
13407        inner and outer post side-effects which need to be emitted in
13408        an order different from the one given by the recursive
13409        traversal.  For instance, for the expression (*p--)++ the post
13410        side-effects of '--' must actually occur *after* the post
13411        side-effects of '++'.  However, gimplification will first visit
13412        the inner expression, so if a separate POST sequence was not
13413        used, the resulting sequence would be:
13414 
13415        	    1	t.1 = *p
13416        	    2	p = p - 1
13417        	    3	t.2 = t.1 + 1
13418        	    4	*p = t.2
13419 
13420        However, the post-decrement operation in line #2 must not be
13421        evaluated until after the store to *p at line #4, so the
13422        correct sequence should be:
13423 
13424        	    1	t.1 = *p
13425        	    2	t.2 = t.1 + 1
13426        	    3	*p = t.2
13427        	    4	p = p - 1
13428 
13429        So, by specifying a separate post queue, it is possible
13430        to emit the post side-effects in the correct order.
13431        If POST_P is NULL, an internal queue will be used.  Before
13432        returning to the caller, the sequence POST_P is appended to
13433        the main output sequence PRE_P.
13434 
13435    GIMPLE_TEST_F points to a function that takes a tree T and
13436        returns nonzero if T is in the GIMPLE form requested by the
13437        caller.  The GIMPLE predicates are in gimple.c.
13438 
13439    FALLBACK tells the function what sort of a temporary we want if
13440        gimplification cannot produce an expression that complies with
13441        GIMPLE_TEST_F.
13442 
13443        fb_none means that no temporary should be generated
13444        fb_rvalue means that an rvalue is OK to generate
13445        fb_lvalue means that an lvalue is OK to generate
13446        fb_either means that either is OK, but an lvalue is preferable.
13447        fb_mayfail means that gimplification may fail (in which case
13448        GS_ERROR will be returned)
13449 
13450    The return value is either GS_ERROR or GS_ALL_DONE, since this
13451    function iterates until EXPR is completely gimplified or an error
13452    occurs.  */
13453 
13454 enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)13455 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
13456 	       bool (*gimple_test_f) (tree), fallback_t fallback)
13457 {
13458   tree tmp;
13459   gimple_seq internal_pre = NULL;
13460   gimple_seq internal_post = NULL;
13461   tree save_expr;
13462   bool is_statement;
13463   location_t saved_location;
13464   enum gimplify_status ret;
13465   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
13466   tree label;
13467 
13468   save_expr = *expr_p;
13469   if (save_expr == NULL_TREE)
13470     return GS_ALL_DONE;
13471 
13472   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
13473   is_statement = gimple_test_f == is_gimple_stmt;
13474   if (is_statement)
13475     gcc_assert (pre_p);
13476 
13477   /* Consistency checks.  */
13478   if (gimple_test_f == is_gimple_reg)
13479     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
13480   else if (gimple_test_f == is_gimple_val
13481            || gimple_test_f == is_gimple_call_addr
13482            || gimple_test_f == is_gimple_condexpr
13483 	   || gimple_test_f == is_gimple_condexpr_for_cond
13484            || gimple_test_f == is_gimple_mem_rhs
13485            || gimple_test_f == is_gimple_mem_rhs_or_call
13486            || gimple_test_f == is_gimple_reg_rhs
13487            || gimple_test_f == is_gimple_reg_rhs_or_call
13488            || gimple_test_f == is_gimple_asm_val
13489 	   || gimple_test_f == is_gimple_mem_ref_addr)
13490     gcc_assert (fallback & fb_rvalue);
13491   else if (gimple_test_f == is_gimple_min_lval
13492 	   || gimple_test_f == is_gimple_lvalue)
13493     gcc_assert (fallback & fb_lvalue);
13494   else if (gimple_test_f == is_gimple_addressable)
13495     gcc_assert (fallback & fb_either);
13496   else if (gimple_test_f == is_gimple_stmt)
13497     gcc_assert (fallback == fb_none);
13498   else
13499     {
13500       /* We should have recognized the GIMPLE_TEST_F predicate to
13501 	 know what kind of fallback to use in case a temporary is
13502 	 needed to hold the value or address of *EXPR_P.  */
13503       gcc_unreachable ();
13504     }
13505 
13506   /* We used to check the predicate here and return immediately if it
13507      succeeds.  This is wrong; the design is for gimplification to be
13508      idempotent, and for the predicates to only test for valid forms, not
13509      whether they are fully simplified.  */
13510   if (pre_p == NULL)
13511     pre_p = &internal_pre;
13512 
13513   if (post_p == NULL)
13514     post_p = &internal_post;
13515 
13516   /* Remember the last statements added to PRE_P and POST_P.  Every
13517      new statement added by the gimplification helpers needs to be
13518      annotated with location information.  To centralize the
13519      responsibility, we remember the last statement that had been
13520      added to both queues before gimplifying *EXPR_P.  If
13521      gimplification produces new statements in PRE_P and POST_P, those
13522      statements will be annotated with the same location information
13523      as *EXPR_P.  */
13524   pre_last_gsi = gsi_last (*pre_p);
13525   post_last_gsi = gsi_last (*post_p);
13526 
13527   saved_location = input_location;
13528   if (save_expr != error_mark_node
13529       && EXPR_HAS_LOCATION (*expr_p))
13530     input_location = EXPR_LOCATION (*expr_p);
13531 
13532   /* Loop over the specific gimplifiers until the toplevel node
13533      remains the same.  */
13534   do
13535     {
13536       /* Strip away as many useless type conversions as possible
13537 	 at the toplevel.  */
13538       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
13539 
13540       /* Remember the expr.  */
13541       save_expr = *expr_p;
13542 
13543       /* Die, die, die, my darling.  */
13544       if (error_operand_p (save_expr))
13545 	{
13546 	  ret = GS_ERROR;
13547 	  break;
13548 	}
13549 
13550       /* Do any language-specific gimplification.  */
13551       ret = ((enum gimplify_status)
13552 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
13553       if (ret == GS_OK)
13554 	{
13555 	  if (*expr_p == NULL_TREE)
13556 	    break;
13557 	  if (*expr_p != save_expr)
13558 	    continue;
13559 	}
13560       else if (ret != GS_UNHANDLED)
13561 	break;
13562 
13563       /* Make sure that all the cases set 'ret' appropriately.  */
13564       ret = GS_UNHANDLED;
13565       switch (TREE_CODE (*expr_p))
13566 	{
13567 	  /* First deal with the special cases.  */
13568 
13569 	case POSTINCREMENT_EXPR:
13570 	case POSTDECREMENT_EXPR:
13571 	case PREINCREMENT_EXPR:
13572 	case PREDECREMENT_EXPR:
13573 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
13574 					fallback != fb_none,
13575 					TREE_TYPE (*expr_p));
13576 	  break;
13577 
13578 	case VIEW_CONVERT_EXPR:
13579 	  if ((fallback & fb_rvalue)
13580 	      && is_gimple_reg_type (TREE_TYPE (*expr_p))
13581 	      && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
13582 	    {
13583 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13584 				   post_p, is_gimple_val, fb_rvalue);
13585 	      recalculate_side_effects (*expr_p);
13586 	      break;
13587 	    }
13588 	  /* Fallthru.  */
13589 
13590 	case ARRAY_REF:
13591 	case ARRAY_RANGE_REF:
13592 	case REALPART_EXPR:
13593 	case IMAGPART_EXPR:
13594 	case COMPONENT_REF:
13595 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
13596 					fallback ? fallback : fb_rvalue);
13597 	  break;
13598 
13599 	case COND_EXPR:
13600 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
13601 
13602 	  /* C99 code may assign to an array in a structure value of a
13603 	     conditional expression, and this has undefined behavior
13604 	     only on execution, so create a temporary if an lvalue is
13605 	     required.  */
13606 	  if (fallback == fb_lvalue)
13607 	    {
13608 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13609 	      mark_addressable (*expr_p);
13610 	      ret = GS_OK;
13611 	    }
13612 	  break;
13613 
13614 	case CALL_EXPR:
13615 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
13616 
13617 	  /* C99 code may assign to an array in a structure returned
13618 	     from a function, and this has undefined behavior only on
13619 	     execution, so create a temporary if an lvalue is
13620 	     required.  */
13621 	  if (fallback == fb_lvalue)
13622 	    {
13623 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13624 	      mark_addressable (*expr_p);
13625 	      ret = GS_OK;
13626 	    }
13627 	  break;
13628 
13629 	case TREE_LIST:
13630 	  gcc_unreachable ();
13631 
13632 	case COMPOUND_EXPR:
13633 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
13634 	  break;
13635 
13636 	case COMPOUND_LITERAL_EXPR:
13637 	  ret = gimplify_compound_literal_expr (expr_p, pre_p,
13638 						gimple_test_f, fallback);
13639 	  break;
13640 
13641 	case MODIFY_EXPR:
13642 	case INIT_EXPR:
13643 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
13644 				      fallback != fb_none);
13645 	  break;
13646 
13647 	case TRUTH_ANDIF_EXPR:
13648 	case TRUTH_ORIF_EXPR:
13649 	  {
13650 	    /* Preserve the original type of the expression and the
13651 	       source location of the outer expression.  */
13652 	    tree org_type = TREE_TYPE (*expr_p);
13653 	    *expr_p = gimple_boolify (*expr_p);
13654 	    *expr_p = build3_loc (input_location, COND_EXPR,
13655 				  org_type, *expr_p,
13656 				  fold_convert_loc
13657 				    (input_location,
13658 				     org_type, boolean_true_node),
13659 				  fold_convert_loc
13660 				    (input_location,
13661 				     org_type, boolean_false_node));
13662 	    ret = GS_OK;
13663 	    break;
13664 	  }
13665 
13666 	case TRUTH_NOT_EXPR:
13667 	  {
13668 	    tree type = TREE_TYPE (*expr_p);
13669 	    /* The parsers are careful to generate TRUTH_NOT_EXPR
13670 	       only with operands that are always zero or one.
13671 	       We do not fold here but handle the only interesting case
13672 	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
13673 	    *expr_p = gimple_boolify (*expr_p);
13674 	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
13675 	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
13676 				    TREE_TYPE (*expr_p),
13677 				    TREE_OPERAND (*expr_p, 0));
13678 	    else
13679 	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
13680 				    TREE_TYPE (*expr_p),
13681 				    TREE_OPERAND (*expr_p, 0),
13682 				    build_int_cst (TREE_TYPE (*expr_p), 1));
13683 	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
13684 	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
13685 	    ret = GS_OK;
13686 	    break;
13687 	  }
13688 
13689 	case ADDR_EXPR:
13690 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
13691 	  break;
13692 
13693 	case ANNOTATE_EXPR:
13694 	  {
13695 	    tree cond = TREE_OPERAND (*expr_p, 0);
13696 	    tree kind = TREE_OPERAND (*expr_p, 1);
13697 	    tree data = TREE_OPERAND (*expr_p, 2);
13698 	    tree type = TREE_TYPE (cond);
13699 	    if (!INTEGRAL_TYPE_P (type))
13700 	      {
13701 		*expr_p = cond;
13702 		ret = GS_OK;
13703 		break;
13704 	      }
13705 	    tree tmp = create_tmp_var (type);
13706 	    gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
13707 	    gcall *call
13708 	      = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
13709 	    gimple_call_set_lhs (call, tmp);
13710 	    gimplify_seq_add_stmt (pre_p, call);
13711 	    *expr_p = tmp;
13712 	    ret = GS_ALL_DONE;
13713 	    break;
13714 	  }
13715 
13716 	case VA_ARG_EXPR:
13717 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
13718 	  break;
13719 
13720 	CASE_CONVERT:
13721 	  if (IS_EMPTY_STMT (*expr_p))
13722 	    {
13723 	      ret = GS_ALL_DONE;
13724 	      break;
13725 	    }
13726 
13727 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
13728 	      || fallback == fb_none)
13729 	    {
13730 	      /* Just strip a conversion to void (or in void context) and
13731 		 try again.  */
13732 	      *expr_p = TREE_OPERAND (*expr_p, 0);
13733 	      ret = GS_OK;
13734 	      break;
13735 	    }
13736 
13737 	  ret = gimplify_conversion (expr_p);
13738 	  if (ret == GS_ERROR)
13739 	    break;
13740 	  if (*expr_p != save_expr)
13741 	    break;
13742 	  /* FALLTHRU */
13743 
13744 	case FIX_TRUNC_EXPR:
13745 	  /* unary_expr: ... | '(' cast ')' val | ...  */
13746 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13747 			       is_gimple_val, fb_rvalue);
13748 	  recalculate_side_effects (*expr_p);
13749 	  break;
13750 
13751 	case INDIRECT_REF:
13752 	  {
13753 	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
13754 	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
13755 	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
13756 
13757 	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
13758 	    if (*expr_p != save_expr)
13759 	      {
13760 		ret = GS_OK;
13761 		break;
13762 	      }
13763 
13764 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13765 				 is_gimple_reg, fb_rvalue);
13766 	    if (ret == GS_ERROR)
13767 	      break;
13768 
13769 	    recalculate_side_effects (*expr_p);
13770 	    *expr_p = fold_build2_loc (input_location, MEM_REF,
13771 				       TREE_TYPE (*expr_p),
13772 				       TREE_OPERAND (*expr_p, 0),
13773 				       build_int_cst (saved_ptr_type, 0));
13774 	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
13775 	    TREE_THIS_NOTRAP (*expr_p) = notrap;
13776 	    ret = GS_OK;
13777 	    break;
13778 	  }
13779 
13780 	/* We arrive here through the various re-gimplifcation paths.  */
13781 	case MEM_REF:
13782 	  /* First try re-folding the whole thing.  */
13783 	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
13784 			     TREE_OPERAND (*expr_p, 0),
13785 			     TREE_OPERAND (*expr_p, 1));
13786 	  if (tmp)
13787 	    {
13788 	      REF_REVERSE_STORAGE_ORDER (tmp)
13789 	        = REF_REVERSE_STORAGE_ORDER (*expr_p);
13790 	      *expr_p = tmp;
13791 	      recalculate_side_effects (*expr_p);
13792 	      ret = GS_OK;
13793 	      break;
13794 	    }
13795 	  /* Avoid re-gimplifying the address operand if it is already
13796 	     in suitable form.  Re-gimplifying would mark the address
13797 	     operand addressable.  Always gimplify when not in SSA form
13798 	     as we still may have to gimplify decls with value-exprs.  */
13799 	  if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
13800 	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
13801 	    {
13802 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
13803 				   is_gimple_mem_ref_addr, fb_rvalue);
13804 	      if (ret == GS_ERROR)
13805 		break;
13806 	    }
13807 	  recalculate_side_effects (*expr_p);
13808 	  ret = GS_ALL_DONE;
13809 	  break;
13810 
13811 	/* Constants need not be gimplified.  */
13812 	case INTEGER_CST:
13813 	case REAL_CST:
13814 	case FIXED_CST:
13815 	case STRING_CST:
13816 	case COMPLEX_CST:
13817 	case VECTOR_CST:
13818 	  /* Drop the overflow flag on constants, we do not want
13819 	     that in the GIMPLE IL.  */
13820 	  if (TREE_OVERFLOW_P (*expr_p))
13821 	    *expr_p = drop_tree_overflow (*expr_p);
13822 	  ret = GS_ALL_DONE;
13823 	  break;
13824 
13825 	case CONST_DECL:
13826 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
13827 	     CONST_DECL node.  Otherwise the decl is replaceable by its
13828 	     value.  */
13829 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
13830 	  if (fallback & fb_lvalue)
13831 	    ret = GS_ALL_DONE;
13832 	  else
13833 	    {
13834 	      *expr_p = DECL_INITIAL (*expr_p);
13835 	      ret = GS_OK;
13836 	    }
13837 	  break;
13838 
13839 	case DECL_EXPR:
13840 	  ret = gimplify_decl_expr (expr_p, pre_p);
13841 	  break;
13842 
13843 	case BIND_EXPR:
13844 	  ret = gimplify_bind_expr (expr_p, pre_p);
13845 	  break;
13846 
13847 	case LOOP_EXPR:
13848 	  ret = gimplify_loop_expr (expr_p, pre_p);
13849 	  break;
13850 
13851 	case SWITCH_EXPR:
13852 	  ret = gimplify_switch_expr (expr_p, pre_p);
13853 	  break;
13854 
13855 	case EXIT_EXPR:
13856 	  ret = gimplify_exit_expr (expr_p);
13857 	  break;
13858 
13859 	case GOTO_EXPR:
13860 	  /* If the target is not LABEL, then it is a computed jump
13861 	     and the target needs to be gimplified.  */
13862 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
13863 	    {
13864 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
13865 				   NULL, is_gimple_val, fb_rvalue);
13866 	      if (ret == GS_ERROR)
13867 		break;
13868 	    }
13869 	  gimplify_seq_add_stmt (pre_p,
13870 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
13871 	  ret = GS_ALL_DONE;
13872 	  break;
13873 
13874 	case PREDICT_EXPR:
13875 	  gimplify_seq_add_stmt (pre_p,
13876 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
13877 					      PREDICT_EXPR_OUTCOME (*expr_p)));
13878 	  ret = GS_ALL_DONE;
13879 	  break;
13880 
13881 	case LABEL_EXPR:
13882 	  ret = gimplify_label_expr (expr_p, pre_p);
13883 	  label = LABEL_EXPR_LABEL (*expr_p);
13884 	  gcc_assert (decl_function_context (label) == current_function_decl);
13885 
13886 	  /* If the label is used in a goto statement, or address of the label
13887 	     is taken, we need to unpoison all variables that were seen so far.
13888 	     Doing so would prevent us from reporting a false positives.  */
13889 	  if (asan_poisoned_variables
13890 	      && asan_used_labels != NULL
13891 	      && asan_used_labels->contains (label))
13892 	    asan_poison_variables (asan_poisoned_variables, false, pre_p);
13893 	  break;
13894 
13895 	case CASE_LABEL_EXPR:
13896 	  ret = gimplify_case_label_expr (expr_p, pre_p);
13897 
13898 	  if (gimplify_ctxp->live_switch_vars)
13899 	    asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
13900 				   pre_p);
13901 	  break;
13902 
13903 	case RETURN_EXPR:
13904 	  ret = gimplify_return_expr (*expr_p, pre_p);
13905 	  break;
13906 
13907 	case CONSTRUCTOR:
13908 	  /* Don't reduce this in place; let gimplify_init_constructor work its
13909 	     magic.  Buf if we're just elaborating this for side effects, just
13910 	     gimplify any element that has side-effects.  */
13911 	  if (fallback == fb_none)
13912 	    {
13913 	      unsigned HOST_WIDE_INT ix;
13914 	      tree val;
13915 	      tree temp = NULL_TREE;
13916 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
13917 		if (TREE_SIDE_EFFECTS (val))
13918 		  append_to_statement_list (val, &temp);
13919 
13920 	      *expr_p = temp;
13921 	      ret = temp ? GS_OK : GS_ALL_DONE;
13922 	    }
13923 	  /* C99 code may assign to an array in a constructed
13924 	     structure or union, and this has undefined behavior only
13925 	     on execution, so create a temporary if an lvalue is
13926 	     required.  */
13927 	  else if (fallback == fb_lvalue)
13928 	    {
13929 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
13930 	      mark_addressable (*expr_p);
13931 	      ret = GS_OK;
13932 	    }
13933 	  else
13934 	    ret = GS_ALL_DONE;
13935 	  break;
13936 
13937 	  /* The following are special cases that are not handled by the
13938 	     original GIMPLE grammar.  */
13939 
13940 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
13941 	     eliminated.  */
13942 	case SAVE_EXPR:
13943 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
13944 	  break;
13945 
13946 	case BIT_FIELD_REF:
13947 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
13948 			       post_p, is_gimple_lvalue, fb_either);
13949 	  recalculate_side_effects (*expr_p);
13950 	  break;
13951 
13952 	case TARGET_MEM_REF:
13953 	  {
13954 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
13955 
13956 	    if (TMR_BASE (*expr_p))
13957 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
13958 				  post_p, is_gimple_mem_ref_addr, fb_either);
13959 	    if (TMR_INDEX (*expr_p))
13960 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
13961 				  post_p, is_gimple_val, fb_rvalue);
13962 	    if (TMR_INDEX2 (*expr_p))
13963 	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
13964 				  post_p, is_gimple_val, fb_rvalue);
13965 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
13966 	    ret = MIN (r0, r1);
13967 	  }
13968 	  break;
13969 
13970 	case NON_LVALUE_EXPR:
13971 	  /* This should have been stripped above.  */
13972 	  gcc_unreachable ();
13973 
13974 	case ASM_EXPR:
13975 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
13976 	  break;
13977 
13978 	case TRY_FINALLY_EXPR:
13979 	case TRY_CATCH_EXPR:
13980 	  {
13981 	    gimple_seq eval, cleanup;
13982 	    gtry *try_;
13983 
13984 	    /* Calls to destructors are generated automatically in FINALLY/CATCH
13985 	       block. They should have location as UNKNOWN_LOCATION. However,
13986 	       gimplify_call_expr will reset these call stmts to input_location
13987 	       if it finds stmt's location is unknown. To prevent resetting for
13988 	       destructors, we set the input_location to unknown.
13989 	       Note that this only affects the destructor calls in FINALLY/CATCH
13990 	       block, and will automatically reset to its original value by the
13991 	       end of gimplify_expr.  */
13992 	    input_location = UNKNOWN_LOCATION;
13993 	    eval = cleanup = NULL;
13994 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
13995 	    if (TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
13996 		&& TREE_CODE (TREE_OPERAND (*expr_p, 1)) == EH_ELSE_EXPR)
13997 	      {
13998 		gimple_seq n = NULL, e = NULL;
13999 		gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14000 						0), &n);
14001 		gimplify_and_add (TREE_OPERAND (TREE_OPERAND (*expr_p, 1),
14002 						1), &e);
14003 		if (!gimple_seq_empty_p (n) && !gimple_seq_empty_p (e))
14004 		  {
14005 		    geh_else *stmt = gimple_build_eh_else (n, e);
14006 		    gimple_seq_add_stmt (&cleanup, stmt);
14007 		  }
14008 	      }
14009 	    else
14010 	      gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
14011 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
14012 	    if (gimple_seq_empty_p (cleanup))
14013 	      {
14014 		gimple_seq_add_seq (pre_p, eval);
14015 		ret = GS_ALL_DONE;
14016 		break;
14017 	      }
14018 	    try_ = gimple_build_try (eval, cleanup,
14019 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
14020 				     ? GIMPLE_TRY_FINALLY
14021 				     : GIMPLE_TRY_CATCH);
14022 	    if (EXPR_HAS_LOCATION (save_expr))
14023 	      gimple_set_location (try_, EXPR_LOCATION (save_expr));
14024 	    else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
14025 	      gimple_set_location (try_, saved_location);
14026 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
14027 	      gimple_try_set_catch_is_cleanup (try_,
14028 					       TRY_CATCH_IS_CLEANUP (*expr_p));
14029 	    gimplify_seq_add_stmt (pre_p, try_);
14030 	    ret = GS_ALL_DONE;
14031 	    break;
14032 	  }
14033 
14034 	case CLEANUP_POINT_EXPR:
14035 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
14036 	  break;
14037 
14038 	case TARGET_EXPR:
14039 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
14040 	  break;
14041 
14042 	case CATCH_EXPR:
14043 	  {
14044 	    gimple *c;
14045 	    gimple_seq handler = NULL;
14046 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
14047 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
14048 	    gimplify_seq_add_stmt (pre_p, c);
14049 	    ret = GS_ALL_DONE;
14050 	    break;
14051 	  }
14052 
14053 	case EH_FILTER_EXPR:
14054 	  {
14055 	    gimple *ehf;
14056 	    gimple_seq failure = NULL;
14057 
14058 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
14059 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
14060 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
14061 	    gimplify_seq_add_stmt (pre_p, ehf);
14062 	    ret = GS_ALL_DONE;
14063 	    break;
14064 	  }
14065 
14066 	case OBJ_TYPE_REF:
14067 	  {
14068 	    enum gimplify_status r0, r1;
14069 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
14070 				post_p, is_gimple_val, fb_rvalue);
14071 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
14072 				post_p, is_gimple_val, fb_rvalue);
14073 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
14074 	    ret = MIN (r0, r1);
14075 	  }
14076 	  break;
14077 
14078 	case LABEL_DECL:
14079 	  /* We get here when taking the address of a label.  We mark
14080 	     the label as "forced"; meaning it can never be removed and
14081 	     it is a potential target for any computed goto.  */
14082 	  FORCED_LABEL (*expr_p) = 1;
14083 	  ret = GS_ALL_DONE;
14084 	  break;
14085 
14086 	case STATEMENT_LIST:
14087 	  ret = gimplify_statement_list (expr_p, pre_p);
14088 	  break;
14089 
14090 	case WITH_SIZE_EXPR:
14091 	  {
14092 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14093 			   post_p == &internal_post ? NULL : post_p,
14094 			   gimple_test_f, fallback);
14095 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14096 			   is_gimple_val, fb_rvalue);
14097 	    ret = GS_ALL_DONE;
14098 	  }
14099 	  break;
14100 
14101 	case VAR_DECL:
14102 	case PARM_DECL:
14103 	  ret = gimplify_var_or_parm_decl (expr_p);
14104 	  break;
14105 
14106 	case RESULT_DECL:
14107 	  /* When within an OMP context, notice uses of variables.  */
14108 	  if (gimplify_omp_ctxp)
14109 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
14110 	  ret = GS_ALL_DONE;
14111 	  break;
14112 
14113 	case DEBUG_EXPR_DECL:
14114 	  gcc_unreachable ();
14115 
14116 	case DEBUG_BEGIN_STMT:
14117 	  gimplify_seq_add_stmt (pre_p,
14118 				 gimple_build_debug_begin_stmt
14119 				 (TREE_BLOCK (*expr_p),
14120 				  EXPR_LOCATION (*expr_p)));
14121 	  ret = GS_ALL_DONE;
14122 	  *expr_p = NULL;
14123 	  break;
14124 
14125 	case SSA_NAME:
14126 	  /* Allow callbacks into the gimplifier during optimization.  */
14127 	  ret = GS_ALL_DONE;
14128 	  break;
14129 
14130 	case OMP_PARALLEL:
14131 	  gimplify_omp_parallel (expr_p, pre_p);
14132 	  ret = GS_ALL_DONE;
14133 	  break;
14134 
14135 	case OMP_TASK:
14136 	  gimplify_omp_task (expr_p, pre_p);
14137 	  ret = GS_ALL_DONE;
14138 	  break;
14139 
14140 	case OMP_FOR:
14141 	case OMP_SIMD:
14142 	case OMP_DISTRIBUTE:
14143 	case OMP_TASKLOOP:
14144 	case OACC_LOOP:
14145 	  ret = gimplify_omp_for (expr_p, pre_p);
14146 	  break;
14147 
14148 	case OMP_LOOP:
14149 	  ret = gimplify_omp_loop (expr_p, pre_p);
14150 	  break;
14151 
14152 	case OACC_CACHE:
14153 	  gimplify_oacc_cache (expr_p, pre_p);
14154 	  ret = GS_ALL_DONE;
14155 	  break;
14156 
14157 	case OACC_DECLARE:
14158 	  gimplify_oacc_declare (expr_p, pre_p);
14159 	  ret = GS_ALL_DONE;
14160 	  break;
14161 
14162 	case OACC_HOST_DATA:
14163 	case OACC_DATA:
14164 	case OACC_KERNELS:
14165 	case OACC_PARALLEL:
14166 	case OACC_SERIAL:
14167 	case OMP_SECTIONS:
14168 	case OMP_SINGLE:
14169 	case OMP_TARGET:
14170 	case OMP_TARGET_DATA:
14171 	case OMP_TEAMS:
14172 	  gimplify_omp_workshare (expr_p, pre_p);
14173 	  ret = GS_ALL_DONE;
14174 	  break;
14175 
14176 	case OACC_ENTER_DATA:
14177 	case OACC_EXIT_DATA:
14178 	case OACC_UPDATE:
14179 	case OMP_TARGET_UPDATE:
14180 	case OMP_TARGET_ENTER_DATA:
14181 	case OMP_TARGET_EXIT_DATA:
14182 	  gimplify_omp_target_update (expr_p, pre_p);
14183 	  ret = GS_ALL_DONE;
14184 	  break;
14185 
14186 	case OMP_SECTION:
14187 	case OMP_MASTER:
14188 	case OMP_ORDERED:
14189 	case OMP_CRITICAL:
14190 	case OMP_SCAN:
14191 	  {
14192 	    gimple_seq body = NULL;
14193 	    gimple *g;
14194 	    bool saved_in_omp_construct = in_omp_construct;
14195 
14196 	    in_omp_construct = true;
14197 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
14198 	    in_omp_construct = saved_in_omp_construct;
14199 	    switch (TREE_CODE (*expr_p))
14200 	      {
14201 	      case OMP_SECTION:
14202 	        g = gimple_build_omp_section (body);
14203 	        break;
14204 	      case OMP_MASTER:
14205 	        g = gimple_build_omp_master (body);
14206 		break;
14207 	      case OMP_ORDERED:
14208 		g = gimplify_omp_ordered (*expr_p, body);
14209 		break;
14210 	      case OMP_CRITICAL:
14211 		gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
14212 					   pre_p, ORT_WORKSHARE, OMP_CRITICAL);
14213 		gimplify_adjust_omp_clauses (pre_p, body,
14214 					     &OMP_CRITICAL_CLAUSES (*expr_p),
14215 					     OMP_CRITICAL);
14216 		g = gimple_build_omp_critical (body,
14217 		    			       OMP_CRITICAL_NAME (*expr_p),
14218 		    			       OMP_CRITICAL_CLAUSES (*expr_p));
14219 		break;
14220 	      case OMP_SCAN:
14221 		gimplify_scan_omp_clauses (&OMP_SCAN_CLAUSES (*expr_p),
14222 					   pre_p, ORT_WORKSHARE, OMP_SCAN);
14223 		gimplify_adjust_omp_clauses (pre_p, body,
14224 					     &OMP_SCAN_CLAUSES (*expr_p),
14225 					     OMP_SCAN);
14226 		g = gimple_build_omp_scan (body, OMP_SCAN_CLAUSES (*expr_p));
14227 		break;
14228 	      default:
14229 		gcc_unreachable ();
14230 	      }
14231 	    gimplify_seq_add_stmt (pre_p, g);
14232 	    ret = GS_ALL_DONE;
14233 	    break;
14234 	  }
14235 
14236 	case OMP_TASKGROUP:
14237 	  {
14238 	    gimple_seq body = NULL;
14239 
14240 	    tree *pclauses = &OMP_TASKGROUP_CLAUSES (*expr_p);
14241 	    bool saved_in_omp_construct = in_omp_construct;
14242 	    gimplify_scan_omp_clauses (pclauses, pre_p, ORT_TASKGROUP,
14243 				       OMP_TASKGROUP);
14244 	    gimplify_adjust_omp_clauses (pre_p, NULL, pclauses, OMP_TASKGROUP);
14245 
14246 	    in_omp_construct = true;
14247 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
14248 	    in_omp_construct = saved_in_omp_construct;
14249 	    gimple_seq cleanup = NULL;
14250 	    tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
14251 	    gimple *g = gimple_build_call (fn, 0);
14252 	    gimple_seq_add_stmt (&cleanup, g);
14253 	    g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
14254 	    body = NULL;
14255 	    gimple_seq_add_stmt (&body, g);
14256 	    g = gimple_build_omp_taskgroup (body, *pclauses);
14257 	    gimplify_seq_add_stmt (pre_p, g);
14258 	    ret = GS_ALL_DONE;
14259 	    break;
14260 	  }
14261 
14262 	case OMP_ATOMIC:
14263 	case OMP_ATOMIC_READ:
14264 	case OMP_ATOMIC_CAPTURE_OLD:
14265 	case OMP_ATOMIC_CAPTURE_NEW:
14266 	  ret = gimplify_omp_atomic (expr_p, pre_p);
14267 	  break;
14268 
14269 	case TRANSACTION_EXPR:
14270 	  ret = gimplify_transaction (expr_p, pre_p);
14271 	  break;
14272 
14273 	case TRUTH_AND_EXPR:
14274 	case TRUTH_OR_EXPR:
14275 	case TRUTH_XOR_EXPR:
14276 	  {
14277 	    tree orig_type = TREE_TYPE (*expr_p);
14278 	    tree new_type, xop0, xop1;
14279 	    *expr_p = gimple_boolify (*expr_p);
14280 	    new_type = TREE_TYPE (*expr_p);
14281 	    if (!useless_type_conversion_p (orig_type, new_type))
14282 	      {
14283 		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
14284 		ret = GS_OK;
14285 		break;
14286 	      }
14287 
14288 	  /* Boolified binary truth expressions are semantically equivalent
14289 	     to bitwise binary expressions.  Canonicalize them to the
14290 	     bitwise variant.  */
14291 	    switch (TREE_CODE (*expr_p))
14292 	      {
14293 	      case TRUTH_AND_EXPR:
14294 		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
14295 		break;
14296 	      case TRUTH_OR_EXPR:
14297 		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
14298 		break;
14299 	      case TRUTH_XOR_EXPR:
14300 		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
14301 		break;
14302 	      default:
14303 		break;
14304 	      }
14305 	    /* Now make sure that operands have compatible type to
14306 	       expression's new_type.  */
14307 	    xop0 = TREE_OPERAND (*expr_p, 0);
14308 	    xop1 = TREE_OPERAND (*expr_p, 1);
14309 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
14310 	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
14311 							    new_type,
14312 	      						    xop0);
14313 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
14314 	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
14315 							    new_type,
14316 	      						    xop1);
14317 	    /* Continue classified as tcc_binary.  */
14318 	    goto expr_2;
14319 	  }
14320 
14321 	case VEC_COND_EXPR:
14322 	  {
14323 	    enum gimplify_status r0, r1, r2;
14324 
14325 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14326 				post_p, is_gimple_condexpr, fb_rvalue);
14327 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14328 				post_p, is_gimple_val, fb_rvalue);
14329 	    r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14330 				post_p, is_gimple_val, fb_rvalue);
14331 
14332 	    ret = MIN (MIN (r0, r1), r2);
14333 	    recalculate_side_effects (*expr_p);
14334 	  }
14335 	  break;
14336 
14337 	case VEC_PERM_EXPR:
14338 	  /* Classified as tcc_expression.  */
14339 	  goto expr_3;
14340 
14341 	case BIT_INSERT_EXPR:
14342 	  /* Argument 3 is a constant.  */
14343 	  goto expr_2;
14344 
14345 	case POINTER_PLUS_EXPR:
14346 	  {
14347 	    enum gimplify_status r0, r1;
14348 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14349 				post_p, is_gimple_val, fb_rvalue);
14350 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14351 				post_p, is_gimple_val, fb_rvalue);
14352 	    recalculate_side_effects (*expr_p);
14353 	    ret = MIN (r0, r1);
14354 	    break;
14355 	  }
14356 
14357 	default:
14358 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
14359 	    {
14360 	    case tcc_comparison:
14361 	      /* Handle comparison of objects of non scalar mode aggregates
14362 	     	 with a call to memcmp.  It would be nice to only have to do
14363 	     	 this for variable-sized objects, but then we'd have to allow
14364 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
14365 	     	 that's too complex.
14366 
14367 		 Compare scalar mode aggregates as scalar mode values.  Using
14368 		 memcmp for them would be very inefficient at best, and is
14369 		 plain wrong if bitfields are involved.  */
14370 		{
14371 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
14372 
14373 		  /* Vector comparisons need no boolification.  */
14374 		  if (TREE_CODE (type) == VECTOR_TYPE)
14375 		    goto expr_2;
14376 		  else if (!AGGREGATE_TYPE_P (type))
14377 		    {
14378 		      tree org_type = TREE_TYPE (*expr_p);
14379 		      *expr_p = gimple_boolify (*expr_p);
14380 		      if (!useless_type_conversion_p (org_type,
14381 						      TREE_TYPE (*expr_p)))
14382 			{
14383 			  *expr_p = fold_convert_loc (input_location,
14384 						      org_type, *expr_p);
14385 			  ret = GS_OK;
14386 			}
14387 		      else
14388 			goto expr_2;
14389 		    }
14390 		  else if (TYPE_MODE (type) != BLKmode)
14391 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
14392 		  else
14393 		    ret = gimplify_variable_sized_compare (expr_p);
14394 
14395 		  break;
14396 		}
14397 
14398 	    /* If *EXPR_P does not need to be special-cased, handle it
14399 	       according to its class.  */
14400 	    case tcc_unary:
14401 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14402 				   post_p, is_gimple_val, fb_rvalue);
14403 	      break;
14404 
14405 	    case tcc_binary:
14406 	    expr_2:
14407 	      {
14408 		enum gimplify_status r0, r1;
14409 
14410 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14411 		                    post_p, is_gimple_val, fb_rvalue);
14412 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14413 				    post_p, is_gimple_val, fb_rvalue);
14414 
14415 		ret = MIN (r0, r1);
14416 		break;
14417 	      }
14418 
14419 	    expr_3:
14420 	      {
14421 		enum gimplify_status r0, r1, r2;
14422 
14423 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
14424 		                    post_p, is_gimple_val, fb_rvalue);
14425 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
14426 				    post_p, is_gimple_val, fb_rvalue);
14427 		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
14428 				    post_p, is_gimple_val, fb_rvalue);
14429 
14430 		ret = MIN (MIN (r0, r1), r2);
14431 		break;
14432 	      }
14433 
14434 	    case tcc_declaration:
14435 	    case tcc_constant:
14436 	      ret = GS_ALL_DONE;
14437 	      goto dont_recalculate;
14438 
14439 	    default:
14440 	      gcc_unreachable ();
14441 	    }
14442 
14443 	  recalculate_side_effects (*expr_p);
14444 
14445 	dont_recalculate:
14446 	  break;
14447 	}
14448 
14449       gcc_assert (*expr_p || ret != GS_OK);
14450     }
14451   while (ret == GS_OK);
14452 
14453   /* If we encountered an error_mark somewhere nested inside, either
14454      stub out the statement or propagate the error back out.  */
14455   if (ret == GS_ERROR)
14456     {
14457       if (is_statement)
14458 	*expr_p = NULL;
14459       goto out;
14460     }
14461 
14462   /* This was only valid as a return value from the langhook, which
14463      we handled.  Make sure it doesn't escape from any other context.  */
14464   gcc_assert (ret != GS_UNHANDLED);
14465 
14466   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
14467     {
14468       /* We aren't looking for a value, and we don't have a valid
14469 	 statement.  If it doesn't have side-effects, throw it away.
14470 	 We can also get here with code such as "*&&L;", where L is
14471 	 a LABEL_DECL that is marked as FORCED_LABEL.  */
14472       if (TREE_CODE (*expr_p) == LABEL_DECL
14473 	  || !TREE_SIDE_EFFECTS (*expr_p))
14474 	*expr_p = NULL;
14475       else if (!TREE_THIS_VOLATILE (*expr_p))
14476 	{
14477 	  /* This is probably a _REF that contains something nested that
14478 	     has side effects.  Recurse through the operands to find it.  */
14479 	  enum tree_code code = TREE_CODE (*expr_p);
14480 
14481 	  switch (code)
14482 	    {
14483 	    case COMPONENT_REF:
14484 	    case REALPART_EXPR:
14485 	    case IMAGPART_EXPR:
14486 	    case VIEW_CONVERT_EXPR:
14487 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14488 			     gimple_test_f, fallback);
14489 	      break;
14490 
14491 	    case ARRAY_REF:
14492 	    case ARRAY_RANGE_REF:
14493 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
14494 			     gimple_test_f, fallback);
14495 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
14496 			     gimple_test_f, fallback);
14497 	      break;
14498 
14499 	    default:
14500 	       /* Anything else with side-effects must be converted to
14501 		  a valid statement before we get here.  */
14502 	      gcc_unreachable ();
14503 	    }
14504 
14505 	  *expr_p = NULL;
14506 	}
14507       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
14508 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
14509 	{
14510 	  /* Historically, the compiler has treated a bare reference
14511 	     to a non-BLKmode volatile lvalue as forcing a load.  */
14512 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
14513 
14514 	  /* Normally, we do not want to create a temporary for a
14515 	     TREE_ADDRESSABLE type because such a type should not be
14516 	     copied by bitwise-assignment.  However, we make an
14517 	     exception here, as all we are doing here is ensuring that
14518 	     we read the bytes that make up the type.  We use
14519 	     create_tmp_var_raw because create_tmp_var will abort when
14520 	     given a TREE_ADDRESSABLE type.  */
14521 	  tree tmp = create_tmp_var_raw (type, "vol");
14522 	  gimple_add_tmp_var (tmp);
14523 	  gimplify_assign (tmp, *expr_p, pre_p);
14524 	  *expr_p = NULL;
14525 	}
14526       else
14527 	/* We can't do anything useful with a volatile reference to
14528 	   an incomplete type, so just throw it away.  Likewise for
14529 	   a BLKmode type, since any implicit inner load should
14530 	   already have been turned into an explicit one by the
14531 	   gimplification process.  */
14532 	*expr_p = NULL;
14533     }
14534 
14535   /* If we are gimplifying at the statement level, we're done.  Tack
14536      everything together and return.  */
14537   if (fallback == fb_none || is_statement)
14538     {
14539       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
14540          it out for GC to reclaim it.  */
14541       *expr_p = NULL_TREE;
14542 
14543       if (!gimple_seq_empty_p (internal_pre)
14544 	  || !gimple_seq_empty_p (internal_post))
14545 	{
14546 	  gimplify_seq_add_seq (&internal_pre, internal_post);
14547 	  gimplify_seq_add_seq (pre_p, internal_pre);
14548 	}
14549 
14550       /* The result of gimplifying *EXPR_P is going to be the last few
14551 	 statements in *PRE_P and *POST_P.  Add location information
14552 	 to all the statements that were added by the gimplification
14553 	 helpers.  */
14554       if (!gimple_seq_empty_p (*pre_p))
14555 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
14556 
14557       if (!gimple_seq_empty_p (*post_p))
14558 	annotate_all_with_location_after (*post_p, post_last_gsi,
14559 					  input_location);
14560 
14561       goto out;
14562     }
14563 
14564 #ifdef ENABLE_GIMPLE_CHECKING
14565   if (*expr_p)
14566     {
14567       enum tree_code code = TREE_CODE (*expr_p);
14568       /* These expressions should already be in gimple IR form.  */
14569       gcc_assert (code != MODIFY_EXPR
14570 		  && code != ASM_EXPR
14571 		  && code != BIND_EXPR
14572 		  && code != CATCH_EXPR
14573 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
14574 		  && code != EH_FILTER_EXPR
14575 		  && code != GOTO_EXPR
14576 		  && code != LABEL_EXPR
14577 		  && code != LOOP_EXPR
14578 		  && code != SWITCH_EXPR
14579 		  && code != TRY_FINALLY_EXPR
14580 		  && code != EH_ELSE_EXPR
14581 		  && code != OACC_PARALLEL
14582 		  && code != OACC_KERNELS
14583 		  && code != OACC_SERIAL
14584 		  && code != OACC_DATA
14585 		  && code != OACC_HOST_DATA
14586 		  && code != OACC_DECLARE
14587 		  && code != OACC_UPDATE
14588 		  && code != OACC_ENTER_DATA
14589 		  && code != OACC_EXIT_DATA
14590 		  && code != OACC_CACHE
14591 		  && code != OMP_CRITICAL
14592 		  && code != OMP_FOR
14593 		  && code != OACC_LOOP
14594 		  && code != OMP_MASTER
14595 		  && code != OMP_TASKGROUP
14596 		  && code != OMP_ORDERED
14597 		  && code != OMP_PARALLEL
14598 		  && code != OMP_SCAN
14599 		  && code != OMP_SECTIONS
14600 		  && code != OMP_SECTION
14601 		  && code != OMP_SINGLE);
14602     }
14603 #endif
14604 
14605   /* Otherwise we're gimplifying a subexpression, so the resulting
14606      value is interesting.  If it's a valid operand that matches
14607      GIMPLE_TEST_F, we're done. Unless we are handling some
14608      post-effects internally; if that's the case, we need to copy into
14609      a temporary before adding the post-effects to POST_P.  */
14610   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
14611     goto out;
14612 
14613   /* Otherwise, we need to create a new temporary for the gimplified
14614      expression.  */
14615 
14616   /* We can't return an lvalue if we have an internal postqueue.  The
14617      object the lvalue refers to would (probably) be modified by the
14618      postqueue; we need to copy the value out first, which means an
14619      rvalue.  */
14620   if ((fallback & fb_lvalue)
14621       && gimple_seq_empty_p (internal_post)
14622       && is_gimple_addressable (*expr_p))
14623     {
14624       /* An lvalue will do.  Take the address of the expression, store it
14625 	 in a temporary, and replace the expression with an INDIRECT_REF of
14626 	 that temporary.  */
14627       tree ref_alias_type = reference_alias_ptr_type (*expr_p);
14628       unsigned int ref_align = get_object_alignment (*expr_p);
14629       tree ref_type = TREE_TYPE (*expr_p);
14630       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
14631       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
14632       if (TYPE_ALIGN (ref_type) != ref_align)
14633 	ref_type = build_aligned_type (ref_type, ref_align);
14634       *expr_p = build2 (MEM_REF, ref_type,
14635 			tmp, build_zero_cst (ref_alias_type));
14636     }
14637   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
14638     {
14639       /* An rvalue will do.  Assign the gimplified expression into a
14640 	 new temporary TMP and replace the original expression with
14641 	 TMP.  First, make sure that the expression has a type so that
14642 	 it can be assigned into a temporary.  */
14643       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
14644       *expr_p = get_formal_tmp_var (*expr_p, pre_p);
14645     }
14646   else
14647     {
14648 #ifdef ENABLE_GIMPLE_CHECKING
14649       if (!(fallback & fb_mayfail))
14650 	{
14651 	  fprintf (stderr, "gimplification failed:\n");
14652 	  print_generic_expr (stderr, *expr_p);
14653 	  debug_tree (*expr_p);
14654 	  internal_error ("gimplification failed");
14655 	}
14656 #endif
14657       gcc_assert (fallback & fb_mayfail);
14658 
14659       /* If this is an asm statement, and the user asked for the
14660 	 impossible, don't die.  Fail and let gimplify_asm_expr
14661 	 issue an error.  */
14662       ret = GS_ERROR;
14663       goto out;
14664     }
14665 
14666   /* Make sure the temporary matches our predicate.  */
14667   gcc_assert ((*gimple_test_f) (*expr_p));
14668 
14669   if (!gimple_seq_empty_p (internal_post))
14670     {
14671       annotate_all_with_location (internal_post, input_location);
14672       gimplify_seq_add_seq (pre_p, internal_post);
14673     }
14674 
14675  out:
14676   input_location = saved_location;
14677   return ret;
14678 }
14679 
14680 /* Like gimplify_expr but make sure the gimplified result is not itself
14681    a SSA name (but a decl if it were).  Temporaries required by
14682    evaluating *EXPR_P may be still SSA names.  */
14683 
14684 static enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback,bool allow_ssa)14685 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
14686 	       bool (*gimple_test_f) (tree), fallback_t fallback,
14687 	       bool allow_ssa)
14688 {
14689   bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
14690   enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
14691 					    gimple_test_f, fallback);
14692   if (! allow_ssa
14693       && TREE_CODE (*expr_p) == SSA_NAME)
14694     {
14695       tree name = *expr_p;
14696       if (was_ssa_name_p)
14697 	*expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
14698       else
14699 	{
14700 	  /* Avoid the extra copy if possible.  */
14701 	  *expr_p = create_tmp_reg (TREE_TYPE (name));
14702 	  if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
14703 	    gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
14704 	  release_ssa_name (name);
14705 	}
14706     }
14707   return ret;
14708 }
14709 
14710 /* Look through TYPE for variable-sized objects and gimplify each such
14711    size that we find.  Add to LIST_P any statements generated.  */
14712 
14713 void
gimplify_type_sizes(tree type,gimple_seq * list_p)14714 gimplify_type_sizes (tree type, gimple_seq *list_p)
14715 {
14716   tree field, t;
14717 
14718   if (type == NULL || type == error_mark_node)
14719     return;
14720 
14721   /* We first do the main variant, then copy into any other variants.  */
14722   type = TYPE_MAIN_VARIANT (type);
14723 
14724   /* Avoid infinite recursion.  */
14725   if (TYPE_SIZES_GIMPLIFIED (type))
14726     return;
14727 
14728   TYPE_SIZES_GIMPLIFIED (type) = 1;
14729 
14730   switch (TREE_CODE (type))
14731     {
14732     case INTEGER_TYPE:
14733     case ENUMERAL_TYPE:
14734     case BOOLEAN_TYPE:
14735     case REAL_TYPE:
14736     case FIXED_POINT_TYPE:
14737       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
14738       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
14739 
14740       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14741 	{
14742 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
14743 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
14744 	}
14745       break;
14746 
14747     case ARRAY_TYPE:
14748       /* These types may not have declarations, so handle them here.  */
14749       gimplify_type_sizes (TREE_TYPE (type), list_p);
14750       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
14751       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
14752 	 with assigned stack slots, for -O1+ -g they should be tracked
14753 	 by VTA.  */
14754       if (!(TYPE_NAME (type)
14755 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
14756 	    && DECL_IGNORED_P (TYPE_NAME (type)))
14757 	  && TYPE_DOMAIN (type)
14758 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
14759 	{
14760 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
14761 	  if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14762 	    DECL_IGNORED_P (t) = 0;
14763 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
14764 	  if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
14765 	    DECL_IGNORED_P (t) = 0;
14766 	}
14767       break;
14768 
14769     case RECORD_TYPE:
14770     case UNION_TYPE:
14771     case QUAL_UNION_TYPE:
14772       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
14773 	if (TREE_CODE (field) == FIELD_DECL)
14774 	  {
14775 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
14776 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
14777 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
14778 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
14779 	  }
14780       break;
14781 
14782     case POINTER_TYPE:
14783     case REFERENCE_TYPE:
14784 	/* We used to recurse on the pointed-to type here, which turned out to
14785 	   be incorrect because its definition might refer to variables not
14786 	   yet initialized at this point if a forward declaration is involved.
14787 
14788 	   It was actually useful for anonymous pointed-to types to ensure
14789 	   that the sizes evaluation dominates every possible later use of the
14790 	   values.  Restricting to such types here would be safe since there
14791 	   is no possible forward declaration around, but would introduce an
14792 	   undesirable middle-end semantic to anonymity.  We then defer to
14793 	   front-ends the responsibility of ensuring that the sizes are
14794 	   evaluated both early and late enough, e.g. by attaching artificial
14795 	   type declarations to the tree.  */
14796       break;
14797 
14798     default:
14799       break;
14800     }
14801 
14802   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
14803   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
14804 
14805   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
14806     {
14807       TYPE_SIZE (t) = TYPE_SIZE (type);
14808       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
14809       TYPE_SIZES_GIMPLIFIED (t) = 1;
14810     }
14811 }
14812 
14813 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
14814    a size or position, has had all of its SAVE_EXPRs evaluated.
14815    We add any required statements to *STMT_P.  */
14816 
14817 void
gimplify_one_sizepos(tree * expr_p,gimple_seq * stmt_p)14818 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
14819 {
14820   tree expr = *expr_p;
14821 
14822   /* We don't do anything if the value isn't there, is constant, or contains
14823      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
14824      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
14825      will want to replace it with a new variable, but that will cause problems
14826      if this type is from outside the function.  It's OK to have that here.  */
14827   if (expr == NULL_TREE
14828       || is_gimple_constant (expr)
14829       || TREE_CODE (expr) == VAR_DECL
14830       || CONTAINS_PLACEHOLDER_P (expr))
14831     return;
14832 
14833   *expr_p = unshare_expr (expr);
14834 
14835   /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
14836      if the def vanishes.  */
14837   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
14838 
14839   /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
14840      FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
14841      as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs.  */
14842   if (is_gimple_constant (*expr_p))
14843     *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
14844 }
14845 
14846 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
14847    containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
14848    is true, also gimplify the parameters.  */
14849 
14850 gbind *
gimplify_body(tree fndecl,bool do_parms)14851 gimplify_body (tree fndecl, bool do_parms)
14852 {
14853   location_t saved_location = input_location;
14854   gimple_seq parm_stmts, parm_cleanup = NULL, seq;
14855   gimple *outer_stmt;
14856   gbind *outer_bind;
14857 
14858   timevar_push (TV_TREE_GIMPLIFY);
14859 
14860   init_tree_ssa (cfun);
14861 
14862   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
14863      gimplification.  */
14864   default_rtl_profile ();
14865 
14866   gcc_assert (gimplify_ctxp == NULL);
14867   push_gimplify_context (true);
14868 
14869   if (flag_openacc || flag_openmp)
14870     {
14871       gcc_assert (gimplify_omp_ctxp == NULL);
14872       if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
14873 	gimplify_omp_ctxp = new_omp_context (ORT_IMPLICIT_TARGET);
14874     }
14875 
14876   /* Unshare most shared trees in the body and in that of any nested functions.
14877      It would seem we don't have to do this for nested functions because
14878      they are supposed to be output and then the outer function gimplified
14879      first, but the g++ front end doesn't always do it that way.  */
14880   unshare_body (fndecl);
14881   unvisit_body (fndecl);
14882 
14883   /* Make sure input_location isn't set to something weird.  */
14884   input_location = DECL_SOURCE_LOCATION (fndecl);
14885 
14886   /* Resolve callee-copies.  This has to be done before processing
14887      the body so that DECL_VALUE_EXPR gets processed correctly.  */
14888   parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
14889 
14890   /* Gimplify the function's body.  */
14891   seq = NULL;
14892   gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
14893   outer_stmt = gimple_seq_first_nondebug_stmt (seq);
14894   if (!outer_stmt)
14895     {
14896       outer_stmt = gimple_build_nop ();
14897       gimplify_seq_add_stmt (&seq, outer_stmt);
14898     }
14899 
14900   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
14901      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
14902   if (gimple_code (outer_stmt) == GIMPLE_BIND
14903       && (gimple_seq_first_nondebug_stmt (seq)
14904 	  == gimple_seq_last_nondebug_stmt (seq)))
14905     {
14906       outer_bind = as_a <gbind *> (outer_stmt);
14907       if (gimple_seq_first_stmt (seq) != outer_stmt
14908 	  || gimple_seq_last_stmt (seq) != outer_stmt)
14909 	{
14910 	  /* If there are debug stmts before or after outer_stmt, move them
14911 	     inside of outer_bind body.  */
14912 	  gimple_stmt_iterator gsi = gsi_for_stmt (outer_stmt, &seq);
14913 	  gimple_seq second_seq = NULL;
14914 	  if (gimple_seq_first_stmt (seq) != outer_stmt
14915 	      && gimple_seq_last_stmt (seq) != outer_stmt)
14916 	    {
14917 	      second_seq = gsi_split_seq_after (gsi);
14918 	      gsi_remove (&gsi, false);
14919 	    }
14920 	  else if (gimple_seq_first_stmt (seq) != outer_stmt)
14921 	    gsi_remove (&gsi, false);
14922 	  else
14923 	    {
14924 	      gsi_remove (&gsi, false);
14925 	      second_seq = seq;
14926 	      seq = NULL;
14927 	    }
14928 	  gimple_seq_add_seq_without_update (&seq,
14929 					     gimple_bind_body (outer_bind));
14930 	  gimple_seq_add_seq_without_update (&seq, second_seq);
14931 	  gimple_bind_set_body (outer_bind, seq);
14932 	}
14933     }
14934   else
14935     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
14936 
14937   DECL_SAVED_TREE (fndecl) = NULL_TREE;
14938 
14939   /* If we had callee-copies statements, insert them at the beginning
14940      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
14941   if (!gimple_seq_empty_p (parm_stmts))
14942     {
14943       tree parm;
14944 
14945       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
14946       if (parm_cleanup)
14947 	{
14948 	  gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
14949 				      GIMPLE_TRY_FINALLY);
14950 	  parm_stmts = NULL;
14951 	  gimple_seq_add_stmt (&parm_stmts, g);
14952 	}
14953       gimple_bind_set_body (outer_bind, parm_stmts);
14954 
14955       for (parm = DECL_ARGUMENTS (current_function_decl);
14956 	   parm; parm = DECL_CHAIN (parm))
14957 	if (DECL_HAS_VALUE_EXPR_P (parm))
14958 	  {
14959 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
14960 	    DECL_IGNORED_P (parm) = 0;
14961 	  }
14962     }
14963 
14964   if ((flag_openacc || flag_openmp || flag_openmp_simd)
14965       && gimplify_omp_ctxp)
14966     {
14967       delete_omp_context (gimplify_omp_ctxp);
14968       gimplify_omp_ctxp = NULL;
14969     }
14970 
14971   pop_gimplify_context (outer_bind);
14972   gcc_assert (gimplify_ctxp == NULL);
14973 
14974   if (flag_checking && !seen_error ())
14975     verify_gimple_in_seq (gimple_bind_body (outer_bind));
14976 
14977   timevar_pop (TV_TREE_GIMPLIFY);
14978   input_location = saved_location;
14979 
14980   return outer_bind;
14981 }
14982 
14983 typedef char *char_p; /* For DEF_VEC_P.  */
14984 
14985 /* Return whether we should exclude FNDECL from instrumentation.  */
14986 
14987 static bool
flag_instrument_functions_exclude_p(tree fndecl)14988 flag_instrument_functions_exclude_p (tree fndecl)
14989 {
14990   vec<char_p> *v;
14991 
14992   v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
14993   if (v && v->length () > 0)
14994     {
14995       const char *name;
14996       int i;
14997       char *s;
14998 
14999       name = lang_hooks.decl_printable_name (fndecl, 1);
15000       FOR_EACH_VEC_ELT (*v, i, s)
15001 	if (strstr (name, s) != NULL)
15002 	  return true;
15003     }
15004 
15005   v = (vec<char_p> *) flag_instrument_functions_exclude_files;
15006   if (v && v->length () > 0)
15007     {
15008       const char *name;
15009       int i;
15010       char *s;
15011 
15012       name = DECL_SOURCE_FILE (fndecl);
15013       FOR_EACH_VEC_ELT (*v, i, s)
15014 	if (strstr (name, s) != NULL)
15015 	  return true;
15016     }
15017 
15018   return false;
15019 }
15020 
15021 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
15022    node for the function we want to gimplify.
15023 
15024    Return the sequence of GIMPLE statements corresponding to the body
15025    of FNDECL.  */
15026 
15027 void
gimplify_function_tree(tree fndecl)15028 gimplify_function_tree (tree fndecl)
15029 {
15030   tree parm, ret;
15031   gimple_seq seq;
15032   gbind *bind;
15033 
15034   gcc_assert (!gimple_body (fndecl));
15035 
15036   if (DECL_STRUCT_FUNCTION (fndecl))
15037     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
15038   else
15039     push_struct_function (fndecl);
15040 
15041   /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
15042      if necessary.  */
15043   cfun->curr_properties |= PROP_gimple_lva;
15044 
15045   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
15046     {
15047       /* Preliminarily mark non-addressed complex variables as eligible
15048          for promotion to gimple registers.  We'll transform their uses
15049          as we find them.  */
15050       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
15051 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
15052           && !TREE_THIS_VOLATILE (parm)
15053           && !needs_to_live_in_memory (parm))
15054         DECL_GIMPLE_REG_P (parm) = 1;
15055     }
15056 
15057   ret = DECL_RESULT (fndecl);
15058   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
15059        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
15060       && !needs_to_live_in_memory (ret))
15061     DECL_GIMPLE_REG_P (ret) = 1;
15062 
15063   if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
15064     asan_poisoned_variables = new hash_set<tree> ();
15065   bind = gimplify_body (fndecl, true);
15066   if (asan_poisoned_variables)
15067     {
15068       delete asan_poisoned_variables;
15069       asan_poisoned_variables = NULL;
15070     }
15071 
15072   /* The tree body of the function is no longer needed, replace it
15073      with the new GIMPLE body.  */
15074   seq = NULL;
15075   gimple_seq_add_stmt (&seq, bind);
15076   gimple_set_body (fndecl, seq);
15077 
15078   /* If we're instrumenting function entry/exit, then prepend the call to
15079      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
15080      catch the exit hook.  */
15081   /* ??? Add some way to ignore exceptions for this TFE.  */
15082   if (flag_instrument_function_entry_exit
15083       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
15084       /* Do not instrument extern inline functions.  */
15085       && !(DECL_DECLARED_INLINE_P (fndecl)
15086 	   && DECL_EXTERNAL (fndecl)
15087 	   && DECL_DISREGARD_INLINE_LIMITS (fndecl))
15088       && !flag_instrument_functions_exclude_p (fndecl))
15089     {
15090       tree x;
15091       gbind *new_bind;
15092       gimple *tf;
15093       gimple_seq cleanup = NULL, body = NULL;
15094       tree tmp_var, this_fn_addr;
15095       gcall *call;
15096 
15097       /* The instrumentation hooks aren't going to call the instrumented
15098 	 function and the address they receive is expected to be matchable
15099 	 against symbol addresses.  Make sure we don't create a trampoline,
15100 	 in case the current function is nested.  */
15101       this_fn_addr = build_fold_addr_expr (current_function_decl);
15102       TREE_NO_TRAMPOLINE (this_fn_addr) = 1;
15103 
15104       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15105       call = gimple_build_call (x, 1, integer_zero_node);
15106       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15107       gimple_call_set_lhs (call, tmp_var);
15108       gimplify_seq_add_stmt (&cleanup, call);
15109       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
15110       call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15111       gimplify_seq_add_stmt (&cleanup, call);
15112       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
15113 
15114       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
15115       call = gimple_build_call (x, 1, integer_zero_node);
15116       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
15117       gimple_call_set_lhs (call, tmp_var);
15118       gimplify_seq_add_stmt (&body, call);
15119       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
15120       call = gimple_build_call (x, 2, this_fn_addr, tmp_var);
15121       gimplify_seq_add_stmt (&body, call);
15122       gimplify_seq_add_stmt (&body, tf);
15123       new_bind = gimple_build_bind (NULL, body, NULL);
15124 
15125       /* Replace the current function body with the body
15126          wrapped in the try/finally TF.  */
15127       seq = NULL;
15128       gimple_seq_add_stmt (&seq, new_bind);
15129       gimple_set_body (fndecl, seq);
15130       bind = new_bind;
15131     }
15132 
15133   if (sanitize_flags_p (SANITIZE_THREAD))
15134     {
15135       gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
15136       gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
15137       gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
15138       /* Replace the current function body with the body
15139 	 wrapped in the try/finally TF.  */
15140       seq = NULL;
15141       gimple_seq_add_stmt (&seq, new_bind);
15142       gimple_set_body (fndecl, seq);
15143     }
15144 
15145   DECL_SAVED_TREE (fndecl) = NULL_TREE;
15146   cfun->curr_properties |= PROP_gimple_any;
15147 
15148   pop_cfun ();
15149 
15150   dump_function (TDI_gimple, fndecl);
15151 }
15152 
15153 /* Return a dummy expression of type TYPE in order to keep going after an
15154    error.  */
15155 
15156 static tree
dummy_object(tree type)15157 dummy_object (tree type)
15158 {
15159   tree t = build_int_cst (build_pointer_type (type), 0);
15160   return build2 (MEM_REF, type, t, t);
15161 }
15162 
15163 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
15164    builtin function, but a very special sort of operator.  */
15165 
15166 enum gimplify_status
gimplify_va_arg_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)15167 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
15168 		      gimple_seq *post_p ATTRIBUTE_UNUSED)
15169 {
15170   tree promoted_type, have_va_type;
15171   tree valist = TREE_OPERAND (*expr_p, 0);
15172   tree type = TREE_TYPE (*expr_p);
15173   tree t, tag, aptag;
15174   location_t loc = EXPR_LOCATION (*expr_p);
15175 
15176   /* Verify that valist is of the proper type.  */
15177   have_va_type = TREE_TYPE (valist);
15178   if (have_va_type == error_mark_node)
15179     return GS_ERROR;
15180   have_va_type = targetm.canonical_va_list_type (have_va_type);
15181   if (have_va_type == NULL_TREE
15182       && POINTER_TYPE_P (TREE_TYPE (valist)))
15183     /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg.  */
15184     have_va_type
15185       = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
15186   gcc_assert (have_va_type != NULL_TREE);
15187 
15188   /* Generate a diagnostic for requesting data of a type that cannot
15189      be passed through `...' due to type promotion at the call site.  */
15190   if ((promoted_type = lang_hooks.types.type_promotes_to (type))
15191 	   != type)
15192     {
15193       static bool gave_help;
15194       bool warned;
15195       /* Use the expansion point to handle cases such as passing bool (defined
15196 	 in a system header) through `...'.  */
15197       location_t xloc
15198 	= expansion_point_location_if_in_system_header (loc);
15199 
15200       /* Unfortunately, this is merely undefined, rather than a constraint
15201 	 violation, so we cannot make this an error.  If this call is never
15202 	 executed, the program is still strictly conforming.  */
15203       auto_diagnostic_group d;
15204       warned = warning_at (xloc, 0,
15205 			   "%qT is promoted to %qT when passed through %<...%>",
15206 			   type, promoted_type);
15207       if (!gave_help && warned)
15208 	{
15209 	  gave_help = true;
15210 	  inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
15211 		  promoted_type, type);
15212 	}
15213 
15214       /* We can, however, treat "undefined" any way we please.
15215 	 Call abort to encourage the user to fix the program.  */
15216       if (warned)
15217 	inform (xloc, "if this code is reached, the program will abort");
15218       /* Before the abort, allow the evaluation of the va_list
15219 	 expression to exit or longjmp.  */
15220       gimplify_and_add (valist, pre_p);
15221       t = build_call_expr_loc (loc,
15222 			       builtin_decl_implicit (BUILT_IN_TRAP), 0);
15223       gimplify_and_add (t, pre_p);
15224 
15225       /* This is dead code, but go ahead and finish so that the
15226 	 mode of the result comes out right.  */
15227       *expr_p = dummy_object (type);
15228       return GS_ALL_DONE;
15229     }
15230 
15231   tag = build_int_cst (build_pointer_type (type), 0);
15232   aptag = build_int_cst (TREE_TYPE (valist), 0);
15233 
15234   *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
15235 					  valist, tag, aptag);
15236 
15237   /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
15238      needs to be expanded.  */
15239   cfun->curr_properties &= ~PROP_gimple_lva;
15240 
15241   return GS_OK;
15242 }
15243 
15244 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
15245 
15246    DST/SRC are the destination and source respectively.  You can pass
15247    ungimplified trees in DST or SRC, in which case they will be
15248    converted to a gimple operand if necessary.
15249 
15250    This function returns the newly created GIMPLE_ASSIGN tuple.  */
15251 
15252 gimple *
gimplify_assign(tree dst,tree src,gimple_seq * seq_p)15253 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
15254 {
15255   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
15256   gimplify_and_add (t, seq_p);
15257   ggc_free (t);
15258   return gimple_seq_last_stmt (*seq_p);
15259 }
15260 
15261 inline hashval_t
hash(const elt_t * p)15262 gimplify_hasher::hash (const elt_t *p)
15263 {
15264   tree t = p->val;
15265   return iterative_hash_expr (t, 0);
15266 }
15267 
15268 inline bool
equal(const elt_t * p1,const elt_t * p2)15269 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
15270 {
15271   tree t1 = p1->val;
15272   tree t2 = p2->val;
15273   enum tree_code code = TREE_CODE (t1);
15274 
15275   if (TREE_CODE (t2) != code
15276       || TREE_TYPE (t1) != TREE_TYPE (t2))
15277     return false;
15278 
15279   if (!operand_equal_p (t1, t2, 0))
15280     return false;
15281 
15282   /* Only allow them to compare equal if they also hash equal; otherwise
15283      results are nondeterminate, and we fail bootstrap comparison.  */
15284   gcc_checking_assert (hash (p1) == hash (p2));
15285 
15286   return true;
15287 }
15288