xref: /netbsd/external/gpl3/gcc/dist/gcc/gimplify.c (revision 5ef59e75)
1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002-2015 Free Software Foundation, Inc.
4    Major work done by Sebastian Pop <s.pop@laposte.net>,
5    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "hash-set.h"
27 #include "machmode.h"
28 #include "vec.h"
29 #include "double-int.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "options.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "tree.h"
37 #include "fold-const.h"
38 #include "hashtab.h"
39 #include "tm.h"
40 #include "hard-reg-set.h"
41 #include "function.h"
42 #include "rtl.h"
43 #include "flags.h"
44 #include "statistics.h"
45 #include "real.h"
46 #include "fixed-value.h"
47 #include "insn-config.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "calls.h"
52 #include "emit-rtl.h"
53 #include "varasm.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "predict.h"
57 #include "basic-block.h"
58 #include "tree-ssa-alias.h"
59 #include "internal-fn.h"
60 #include "gimple-fold.h"
61 #include "tree-eh.h"
62 #include "gimple-expr.h"
63 #include "is-a.h"
64 #include "gimple.h"
65 #include "gimplify.h"
66 #include "gimple-iterator.h"
67 #include "stringpool.h"
68 #include "stor-layout.h"
69 #include "print-tree.h"
70 #include "tree-iterator.h"
71 #include "tree-inline.h"
72 #include "tree-pretty-print.h"
73 #include "langhooks.h"
74 #include "bitmap.h"
75 #include "gimple-ssa.h"
76 #include "hash-map.h"
77 #include "plugin-api.h"
78 #include "ipa-ref.h"
79 #include "cgraph.h"
80 #include "tree-cfg.h"
81 #include "tree-ssanames.h"
82 #include "tree-ssa.h"
83 #include "diagnostic-core.h"
84 #include "target.h"
85 #include "splay-tree.h"
86 #include "omp-low.h"
87 #include "gimple-low.h"
88 #include "cilk.h"
89 #include "gomp-constants.h"
90 
91 #include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name */
92 #include "tree-pass.h"		/* FIXME: only for PROP_gimple_any */
93 #include "builtins.h"
94 
95 enum gimplify_omp_var_data
96 {
97   GOVD_SEEN = 1,
98   GOVD_EXPLICIT = 2,
99   GOVD_SHARED = 4,
100   GOVD_PRIVATE = 8,
101   GOVD_FIRSTPRIVATE = 16,
102   GOVD_LASTPRIVATE = 32,
103   GOVD_REDUCTION = 64,
104   GOVD_LOCAL = 128,
105   GOVD_MAP = 256,
106   GOVD_DEBUG_PRIVATE = 512,
107   GOVD_PRIVATE_OUTER_REF = 1024,
108   GOVD_LINEAR = 2048,
109   GOVD_ALIGNED = 4096,
110 
111   /* Flag for GOVD_MAP: don't copy back.  */
112   GOVD_MAP_TO_ONLY = 8192,
113 
114   /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference.  */
115   GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
116 
117   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
118 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
119 			   | GOVD_LOCAL)
120 };
121 
122 
123 enum omp_region_type
124 {
125   ORT_WORKSHARE = 0,
126   ORT_SIMD = 1,
127   ORT_PARALLEL = 2,
128   ORT_COMBINED_PARALLEL = 3,
129   ORT_TASK = 4,
130   ORT_UNTIED_TASK = 5,
131   ORT_TEAMS = 8,
132   ORT_COMBINED_TEAMS = 9,
133   /* Data region.  */
134   ORT_TARGET_DATA = 16,
135   /* Data region with offloading.  */
136   ORT_TARGET = 32
137 };
138 
139 /* Gimplify hashtable helper.  */
140 
141 struct gimplify_hasher : typed_free_remove <elt_t>
142 {
143   typedef elt_t value_type;
144   typedef elt_t compare_type;
145   static inline hashval_t hash (const value_type *);
146   static inline bool equal (const value_type *, const compare_type *);
147 };
148 
149 struct gimplify_ctx
150 {
151   struct gimplify_ctx *prev_context;
152 
153   vec<gbind *> bind_expr_stack;
154   tree temps;
155   gimple_seq conditional_cleanups;
156   tree exit_label;
157   tree return_temp;
158 
159   vec<tree> case_labels;
160   /* The formal temporary table.  Should this be persistent?  */
161   hash_table<gimplify_hasher> *temp_htab;
162 
163   int conditions;
164   bool save_stack;
165   bool into_ssa;
166   bool allow_rhs_cond_expr;
167   bool in_cleanup_point_expr;
168 };
169 
170 struct gimplify_omp_ctx
171 {
172   struct gimplify_omp_ctx *outer_context;
173   splay_tree variables;
174   hash_set<tree> *privatized_types;
175   location_t location;
176   enum omp_clause_default_kind default_kind;
177   enum omp_region_type region_type;
178   bool combined_loop;
179   bool distribute;
180 };
181 
182 static struct gimplify_ctx *gimplify_ctxp;
183 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
184 
185 /* Forward declaration.  */
186 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
187 
188 /* Shorter alias name for the above function for use in gimplify.c
189    only.  */
190 
191 static inline void
192 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
193 {
194   gimple_seq_add_stmt_without_update (seq_p, gs);
195 }
196 
197 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
198    NULL, a new sequence is allocated.   This function is
199    similar to gimple_seq_add_seq, but does not scan the operands.
200    During gimplification, we need to manipulate statement sequences
201    before the def/use vectors have been constructed.  */
202 
203 static void
204 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
205 {
206   gimple_stmt_iterator si;
207 
208   if (src == NULL)
209     return;
210 
211   si = gsi_last (*dst_p);
212   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
213 }
214 
215 
216 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
217    and popping gimplify contexts.  */
218 
219 static struct gimplify_ctx *ctx_pool = NULL;
220 
221 /* Return a gimplify context struct from the pool.  */
222 
223 static inline struct gimplify_ctx *
224 ctx_alloc (void)
225 {
226   struct gimplify_ctx * c = ctx_pool;
227 
228   if (c)
229     ctx_pool = c->prev_context;
230   else
231     c = XNEW (struct gimplify_ctx);
232 
233   memset (c, '\0', sizeof (*c));
234   return c;
235 }
236 
237 /* Put gimplify context C back into the pool.  */
238 
239 static inline void
240 ctx_free (struct gimplify_ctx *c)
241 {
242   c->prev_context = ctx_pool;
243   ctx_pool = c;
244 }
245 
246 /* Free allocated ctx stack memory.  */
247 
248 void
249 free_gimplify_stack (void)
250 {
251   struct gimplify_ctx *c;
252 
253   while ((c = ctx_pool))
254     {
255       ctx_pool = c->prev_context;
256       free (c);
257     }
258 }
259 
260 
261 /* Set up a context for the gimplifier.  */
262 
263 void
264 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
265 {
266   struct gimplify_ctx *c = ctx_alloc ();
267 
268   c->prev_context = gimplify_ctxp;
269   gimplify_ctxp = c;
270   gimplify_ctxp->into_ssa = in_ssa;
271   gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
272 }
273 
274 /* Tear down a context for the gimplifier.  If BODY is non-null, then
275    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
276    in the local_decls.
277 
278    BODY is not a sequence, but the first tuple in a sequence.  */
279 
280 void
281 pop_gimplify_context (gimple body)
282 {
283   struct gimplify_ctx *c = gimplify_ctxp;
284 
285   gcc_assert (c
286               && (!c->bind_expr_stack.exists ()
287 		  || c->bind_expr_stack.is_empty ()));
288   c->bind_expr_stack.release ();
289   gimplify_ctxp = c->prev_context;
290 
291   if (body)
292     declare_vars (c->temps, body, false);
293   else
294     record_vars (c->temps);
295 
296   delete c->temp_htab;
297   c->temp_htab = NULL;
298   ctx_free (c);
299 }
300 
301 /* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
302 
303 static void
304 gimple_push_bind_expr (gbind *bind_stmt)
305 {
306   gimplify_ctxp->bind_expr_stack.reserve (8);
307   gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
308 }
309 
310 /* Pop the first element off the stack of bindings.  */
311 
312 static void
313 gimple_pop_bind_expr (void)
314 {
315   gimplify_ctxp->bind_expr_stack.pop ();
316 }
317 
318 /* Return the first element of the stack of bindings.  */
319 
320 gbind *
321 gimple_current_bind_expr (void)
322 {
323   return gimplify_ctxp->bind_expr_stack.last ();
324 }
325 
326 /* Return the stack of bindings created during gimplification.  */
327 
328 vec<gbind *>
329 gimple_bind_expr_stack (void)
330 {
331   return gimplify_ctxp->bind_expr_stack;
332 }
333 
334 /* Return true iff there is a COND_EXPR between us and the innermost
335    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
336 
337 static bool
338 gimple_conditional_context (void)
339 {
340   return gimplify_ctxp->conditions > 0;
341 }
342 
343 /* Note that we've entered a COND_EXPR.  */
344 
345 static void
346 gimple_push_condition (void)
347 {
348 #ifdef ENABLE_GIMPLE_CHECKING
349   if (gimplify_ctxp->conditions == 0)
350     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
351 #endif
352   ++(gimplify_ctxp->conditions);
353 }
354 
355 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
356    now, add any conditional cleanups we've seen to the prequeue.  */
357 
358 static void
359 gimple_pop_condition (gimple_seq *pre_p)
360 {
361   int conds = --(gimplify_ctxp->conditions);
362 
363   gcc_assert (conds >= 0);
364   if (conds == 0)
365     {
366       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
367       gimplify_ctxp->conditional_cleanups = NULL;
368     }
369 }
370 
371 /* A stable comparison routine for use with splay trees and DECLs.  */
372 
373 static int
374 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
375 {
376   tree a = (tree) xa;
377   tree b = (tree) xb;
378 
379   return DECL_UID (a) - DECL_UID (b);
380 }
381 
382 /* Create a new omp construct that deals with variable remapping.  */
383 
384 static struct gimplify_omp_ctx *
385 new_omp_context (enum omp_region_type region_type)
386 {
387   struct gimplify_omp_ctx *c;
388 
389   c = XCNEW (struct gimplify_omp_ctx);
390   c->outer_context = gimplify_omp_ctxp;
391   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
392   c->privatized_types = new hash_set<tree>;
393   c->location = input_location;
394   c->region_type = region_type;
395   if ((region_type & ORT_TASK) == 0)
396     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
397   else
398     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
399 
400   return c;
401 }
402 
403 /* Destroy an omp construct that deals with variable remapping.  */
404 
405 static void
406 delete_omp_context (struct gimplify_omp_ctx *c)
407 {
408   splay_tree_delete (c->variables);
409   delete c->privatized_types;
410   XDELETE (c);
411 }
412 
413 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
414 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
415 
416 /* Both gimplify the statement T and append it to *SEQ_P.  This function
417    behaves exactly as gimplify_stmt, but you don't have to pass T as a
418    reference.  */
419 
420 void
421 gimplify_and_add (tree t, gimple_seq *seq_p)
422 {
423   gimplify_stmt (&t, seq_p);
424 }
425 
426 /* Gimplify statement T into sequence *SEQ_P, and return the first
427    tuple in the sequence of generated tuples for this statement.
428    Return NULL if gimplifying T produced no tuples.  */
429 
430 static gimple
431 gimplify_and_return_first (tree t, gimple_seq *seq_p)
432 {
433   gimple_stmt_iterator last = gsi_last (*seq_p);
434 
435   gimplify_and_add (t, seq_p);
436 
437   if (!gsi_end_p (last))
438     {
439       gsi_next (&last);
440       return gsi_stmt (last);
441     }
442   else
443     return gimple_seq_first_stmt (*seq_p);
444 }
445 
446 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
447    LHS, or for a call argument.  */
448 
449 static bool
450 is_gimple_mem_rhs (tree t)
451 {
452   /* If we're dealing with a renamable type, either source or dest must be
453      a renamed variable.  */
454   if (is_gimple_reg_type (TREE_TYPE (t)))
455     return is_gimple_val (t);
456   else
457     return is_gimple_val (t) || is_gimple_lvalue (t);
458 }
459 
460 /* Return true if T is a CALL_EXPR or an expression that can be
461    assigned to a temporary.  Note that this predicate should only be
462    used during gimplification.  See the rationale for this in
463    gimplify_modify_expr.  */
464 
465 static bool
466 is_gimple_reg_rhs_or_call (tree t)
467 {
468   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
469 	  || TREE_CODE (t) == CALL_EXPR);
470 }
471 
472 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
473    this predicate should only be used during gimplification.  See the
474    rationale for this in gimplify_modify_expr.  */
475 
476 static bool
477 is_gimple_mem_rhs_or_call (tree t)
478 {
479   /* If we're dealing with a renamable type, either source or dest must be
480      a renamed variable.  */
481   if (is_gimple_reg_type (TREE_TYPE (t)))
482     return is_gimple_val (t);
483   else
484     return (is_gimple_val (t) || is_gimple_lvalue (t)
485 	    || TREE_CODE (t) == CALL_EXPR);
486 }
487 
488 /* Create a temporary with a name derived from VAL.  Subroutine of
489    lookup_tmp_var; nobody else should call this function.  */
490 
491 static inline tree
492 create_tmp_from_val (tree val)
493 {
494   /* Drop all qualifiers and address-space information from the value type.  */
495   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
496   tree var = create_tmp_var (type, get_name (val));
497   if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
498       || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
499     DECL_GIMPLE_REG_P (var) = 1;
500   return var;
501 }
502 
503 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
504    an existing expression temporary.  */
505 
506 static tree
507 lookup_tmp_var (tree val, bool is_formal)
508 {
509   tree ret;
510 
511   /* If not optimizing, never really reuse a temporary.  local-alloc
512      won't allocate any variable that is used in more than one basic
513      block, which means it will go into memory, causing much extra
514      work in reload and final and poorer code generation, outweighing
515      the extra memory allocation here.  */
516   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
517     ret = create_tmp_from_val (val);
518   else
519     {
520       elt_t elt, *elt_p;
521       elt_t **slot;
522 
523       elt.val = val;
524       if (!gimplify_ctxp->temp_htab)
525         gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
526       slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
527       if (*slot == NULL)
528 	{
529 	  elt_p = XNEW (elt_t);
530 	  elt_p->val = val;
531 	  elt_p->temp = ret = create_tmp_from_val (val);
532 	  *slot = elt_p;
533 	}
534       else
535 	{
536 	  elt_p = *slot;
537           ret = elt_p->temp;
538 	}
539     }
540 
541   return ret;
542 }
543 
544 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
545 
546 static tree
547 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
548                       bool is_formal)
549 {
550   tree t, mod;
551 
552   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
553      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
554   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
555 		 fb_rvalue);
556 
557   if (gimplify_ctxp->into_ssa
558       && is_gimple_reg_type (TREE_TYPE (val)))
559     t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
560   else
561     t = lookup_tmp_var (val, is_formal);
562 
563   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
564 
565   SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
566 
567   /* gimplify_modify_expr might want to reduce this further.  */
568   gimplify_and_add (mod, pre_p);
569   ggc_free (mod);
570 
571   return t;
572 }
573 
574 /* Return a formal temporary variable initialized with VAL.  PRE_P is as
575    in gimplify_expr.  Only use this function if:
576 
577    1) The value of the unfactored expression represented by VAL will not
578       change between the initialization and use of the temporary, and
579    2) The temporary will not be otherwise modified.
580 
581    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
582    and #2 means it is inappropriate for && temps.
583 
584    For other cases, use get_initialized_tmp_var instead.  */
585 
586 tree
587 get_formal_tmp_var (tree val, gimple_seq *pre_p)
588 {
589   return internal_get_tmp_var (val, pre_p, NULL, true);
590 }
591 
592 /* Return a temporary variable initialized with VAL.  PRE_P and POST_P
593    are as in gimplify_expr.  */
594 
595 tree
596 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
597 {
598   return internal_get_tmp_var (val, pre_p, post_p, false);
599 }
600 
601 /* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
602    generate debug info for them; otherwise don't.  */
603 
604 void
605 declare_vars (tree vars, gimple gs, bool debug_info)
606 {
607   tree last = vars;
608   if (last)
609     {
610       tree temps, block;
611 
612       gbind *scope = as_a <gbind *> (gs);
613 
614       temps = nreverse (last);
615 
616       block = gimple_bind_block (scope);
617       gcc_assert (!block || TREE_CODE (block) == BLOCK);
618       if (!block || !debug_info)
619 	{
620 	  DECL_CHAIN (last) = gimple_bind_vars (scope);
621 	  gimple_bind_set_vars (scope, temps);
622 	}
623       else
624 	{
625 	  /* We need to attach the nodes both to the BIND_EXPR and to its
626 	     associated BLOCK for debugging purposes.  The key point here
627 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
628 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
629 	  if (BLOCK_VARS (block))
630 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
631 	  else
632 	    {
633 	      gimple_bind_set_vars (scope,
634 	      			    chainon (gimple_bind_vars (scope), temps));
635 	      BLOCK_VARS (block) = temps;
636 	    }
637 	}
638     }
639 }
640 
641 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
642    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
643    no such upper bound can be obtained.  */
644 
645 static void
646 force_constant_size (tree var)
647 {
648   /* The only attempt we make is by querying the maximum size of objects
649      of the variable's type.  */
650 
651   HOST_WIDE_INT max_size;
652 
653   gcc_assert (TREE_CODE (var) == VAR_DECL);
654 
655   max_size = max_int_size_in_bytes (TREE_TYPE (var));
656 
657   gcc_assert (max_size >= 0);
658 
659   DECL_SIZE_UNIT (var)
660     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
661   DECL_SIZE (var)
662     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
663 }
664 
665 /* Push the temporary variable TMP into the current binding.  */
666 
667 void
668 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
669 {
670   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
671 
672   /* Later processing assumes that the object size is constant, which might
673      not be true at this point.  Force the use of a constant upper bound in
674      this case.  */
675   if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
676     force_constant_size (tmp);
677 
678   DECL_CONTEXT (tmp) = fn->decl;
679   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
680 
681   record_vars_into (tmp, fn->decl);
682 }
683 
684 /* Push the temporary variable TMP into the current binding.  */
685 
686 void
687 gimple_add_tmp_var (tree tmp)
688 {
689   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
690 
691   /* Later processing assumes that the object size is constant, which might
692      not be true at this point.  Force the use of a constant upper bound in
693      this case.  */
694   if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
695     force_constant_size (tmp);
696 
697   DECL_CONTEXT (tmp) = current_function_decl;
698   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
699 
700   if (gimplify_ctxp)
701     {
702       DECL_CHAIN (tmp) = gimplify_ctxp->temps;
703       gimplify_ctxp->temps = tmp;
704 
705       /* Mark temporaries local within the nearest enclosing parallel.  */
706       if (gimplify_omp_ctxp)
707 	{
708 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
709 	  while (ctx
710 		 && (ctx->region_type == ORT_WORKSHARE
711 		     || ctx->region_type == ORT_SIMD))
712 	    ctx = ctx->outer_context;
713 	  if (ctx)
714 	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
715 	}
716     }
717   else if (cfun)
718     record_vars (tmp);
719   else
720     {
721       gimple_seq body_seq;
722 
723       /* This case is for nested functions.  We need to expose the locals
724 	 they create.  */
725       body_seq = gimple_body (current_function_decl);
726       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
727     }
728 }
729 
730 
731 
732 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
733    nodes that are referenced more than once in GENERIC functions.  This is
734    necessary because gimplification (translation into GIMPLE) is performed
735    by modifying tree nodes in-place, so gimplication of a shared node in a
736    first context could generate an invalid GIMPLE form in a second context.
737 
738    This is achieved with a simple mark/copy/unmark algorithm that walks the
739    GENERIC representation top-down, marks nodes with TREE_VISITED the first
740    time it encounters them, duplicates them if they already have TREE_VISITED
741    set, and finally removes the TREE_VISITED marks it has set.
742 
743    The algorithm works only at the function level, i.e. it generates a GENERIC
744    representation of a function with no nodes shared within the function when
745    passed a GENERIC function (except for nodes that are allowed to be shared).
746 
747    At the global level, it is also necessary to unshare tree nodes that are
748    referenced in more than one function, for the same aforementioned reason.
749    This requires some cooperation from the front-end.  There are 2 strategies:
750 
751      1. Manual unsharing.  The front-end needs to call unshare_expr on every
752         expression that might end up being shared across functions.
753 
754      2. Deep unsharing.  This is an extension of regular unsharing.  Instead
755         of calling unshare_expr on expressions that might be shared across
756         functions, the front-end pre-marks them with TREE_VISITED.  This will
757         ensure that they are unshared on the first reference within functions
758         when the regular unsharing algorithm runs.  The counterpart is that
759         this algorithm must look deeper than for manual unsharing, which is
760         specified by LANG_HOOKS_DEEP_UNSHARING.
761 
762   If there are only few specific cases of node sharing across functions, it is
763   probably easier for a front-end to unshare the expressions manually.  On the
764   contrary, if the expressions generated at the global level are as widespread
765   as expressions generated within functions, deep unsharing is very likely the
766   way to go.  */
767 
768 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
769    These nodes model computations that must be done once.  If we were to
770    unshare something like SAVE_EXPR(i++), the gimplification process would
771    create wrong code.  However, if DATA is non-null, it must hold a pointer
772    set that is used to unshare the subtrees of these nodes.  */
773 
774 static tree
775 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
776 {
777   tree t = *tp;
778   enum tree_code code = TREE_CODE (t);
779 
780   /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
781      copy their subtrees if we can make sure to do it only once.  */
782   if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
783     {
784       if (data && !((hash_set<tree> *)data)->add (t))
785 	;
786       else
787 	*walk_subtrees = 0;
788     }
789 
790   /* Stop at types, decls, constants like copy_tree_r.  */
791   else if (TREE_CODE_CLASS (code) == tcc_type
792 	   || TREE_CODE_CLASS (code) == tcc_declaration
793 	   || TREE_CODE_CLASS (code) == tcc_constant
794 	   /* We can't do anything sensible with a BLOCK used as an
795 	      expression, but we also can't just die when we see it
796 	      because of non-expression uses.  So we avert our eyes
797 	      and cross our fingers.  Silly Java.  */
798 	   || code == BLOCK)
799     *walk_subtrees = 0;
800 
801   /* Cope with the statement expression extension.  */
802   else if (code == STATEMENT_LIST)
803     ;
804 
805   /* Leave the bulk of the work to copy_tree_r itself.  */
806   else
807     copy_tree_r (tp, walk_subtrees, NULL);
808 
809   return NULL_TREE;
810 }
811 
812 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
813    If *TP has been visited already, then *TP is deeply copied by calling
814    mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
815 
816 static tree
817 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
818 {
819   tree t = *tp;
820   enum tree_code code = TREE_CODE (t);
821 
822   /* Skip types, decls, and constants.  But we do want to look at their
823      types and the bounds of types.  Mark them as visited so we properly
824      unmark their subtrees on the unmark pass.  If we've already seen them,
825      don't look down further.  */
826   if (TREE_CODE_CLASS (code) == tcc_type
827       || TREE_CODE_CLASS (code) == tcc_declaration
828       || TREE_CODE_CLASS (code) == tcc_constant)
829     {
830       if (TREE_VISITED (t))
831 	*walk_subtrees = 0;
832       else
833 	TREE_VISITED (t) = 1;
834     }
835 
836   /* If this node has been visited already, unshare it and don't look
837      any deeper.  */
838   else if (TREE_VISITED (t))
839     {
840       walk_tree (tp, mostly_copy_tree_r, data, NULL);
841       *walk_subtrees = 0;
842     }
843 
844   /* Otherwise, mark the node as visited and keep looking.  */
845   else
846     TREE_VISITED (t) = 1;
847 
848   return NULL_TREE;
849 }
850 
851 /* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
852    copy_if_shared_r callback unmodified.  */
853 
854 static inline void
855 copy_if_shared (tree *tp, void *data)
856 {
857   walk_tree (tp, copy_if_shared_r, data, NULL);
858 }
859 
860 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
861    any nested functions.  */
862 
863 static void
864 unshare_body (tree fndecl)
865 {
866   struct cgraph_node *cgn = cgraph_node::get (fndecl);
867   /* If the language requires deep unsharing, we need a pointer set to make
868      sure we don't repeatedly unshare subtrees of unshareable nodes.  */
869   hash_set<tree> *visited
870     = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
871 
872   copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
873   copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
874   copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
875 
876   delete visited;
877 
878   if (cgn)
879     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
880       unshare_body (cgn->decl);
881 }
882 
883 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
884    Subtrees are walked until the first unvisited node is encountered.  */
885 
886 static tree
887 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
888 {
889   tree t = *tp;
890 
891   /* If this node has been visited, unmark it and keep looking.  */
892   if (TREE_VISITED (t))
893     TREE_VISITED (t) = 0;
894 
895   /* Otherwise, don't look any deeper.  */
896   else
897     *walk_subtrees = 0;
898 
899   return NULL_TREE;
900 }
901 
902 /* Unmark the visited trees rooted at *TP.  */
903 
904 static inline void
905 unmark_visited (tree *tp)
906 {
907   walk_tree (tp, unmark_visited_r, NULL, NULL);
908 }
909 
910 /* Likewise, but mark all trees as not visited.  */
911 
912 static void
913 unvisit_body (tree fndecl)
914 {
915   struct cgraph_node *cgn = cgraph_node::get (fndecl);
916 
917   unmark_visited (&DECL_SAVED_TREE (fndecl));
918   unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
919   unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
920 
921   if (cgn)
922     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
923       unvisit_body (cgn->decl);
924 }
925 
926 /* Unconditionally make an unshared copy of EXPR.  This is used when using
927    stored expressions which span multiple functions, such as BINFO_VTABLE,
928    as the normal unsharing process can't tell that they're shared.  */
929 
930 tree
931 unshare_expr (tree expr)
932 {
933   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
934   return expr;
935 }
936 
937 /* Worker for unshare_expr_without_location.  */
938 
939 static tree
940 prune_expr_location (tree *tp, int *walk_subtrees, void *)
941 {
942   if (EXPR_P (*tp))
943     SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
944   else
945     *walk_subtrees = 0;
946   return NULL_TREE;
947 }
948 
949 /* Similar to unshare_expr but also prune all expression locations
950    from EXPR.  */
951 
952 tree
953 unshare_expr_without_location (tree expr)
954 {
955   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
956   if (EXPR_P (expr))
957     walk_tree (&expr, prune_expr_location, NULL, NULL);
958   return expr;
959 }
960 
961 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
962    contain statements and have a value.  Assign its value to a temporary
963    and give it void_type_node.  Return the temporary, or NULL_TREE if
964    WRAPPER was already void.  */
965 
966 tree
967 voidify_wrapper_expr (tree wrapper, tree temp)
968 {
969   tree type = TREE_TYPE (wrapper);
970   if (type && !VOID_TYPE_P (type))
971     {
972       tree *p;
973 
974       /* Set p to point to the body of the wrapper.  Loop until we find
975 	 something that isn't a wrapper.  */
976       for (p = &wrapper; p && *p; )
977 	{
978 	  switch (TREE_CODE (*p))
979 	    {
980 	    case BIND_EXPR:
981 	      TREE_SIDE_EFFECTS (*p) = 1;
982 	      TREE_TYPE (*p) = void_type_node;
983 	      /* For a BIND_EXPR, the body is operand 1.  */
984 	      p = &BIND_EXPR_BODY (*p);
985 	      break;
986 
987 	    case CLEANUP_POINT_EXPR:
988 	    case TRY_FINALLY_EXPR:
989 	    case TRY_CATCH_EXPR:
990 	      TREE_SIDE_EFFECTS (*p) = 1;
991 	      TREE_TYPE (*p) = void_type_node;
992 	      p = &TREE_OPERAND (*p, 0);
993 	      break;
994 
995 	    case STATEMENT_LIST:
996 	      {
997 		tree_stmt_iterator i = tsi_last (*p);
998 		TREE_SIDE_EFFECTS (*p) = 1;
999 		TREE_TYPE (*p) = void_type_node;
1000 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1001 	      }
1002 	      break;
1003 
1004 	    case COMPOUND_EXPR:
1005 	      /* Advance to the last statement.  Set all container types to
1006 		 void.  */
1007 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1008 		{
1009 		  TREE_SIDE_EFFECTS (*p) = 1;
1010 		  TREE_TYPE (*p) = void_type_node;
1011 		}
1012 	      break;
1013 
1014 	    case TRANSACTION_EXPR:
1015 	      TREE_SIDE_EFFECTS (*p) = 1;
1016 	      TREE_TYPE (*p) = void_type_node;
1017 	      p = &TRANSACTION_EXPR_BODY (*p);
1018 	      break;
1019 
1020 	    default:
1021 	      /* Assume that any tree upon which voidify_wrapper_expr is
1022 		 directly called is a wrapper, and that its body is op0.  */
1023 	      if (p == &wrapper)
1024 		{
1025 		  TREE_SIDE_EFFECTS (*p) = 1;
1026 		  TREE_TYPE (*p) = void_type_node;
1027 		  p = &TREE_OPERAND (*p, 0);
1028 		  break;
1029 		}
1030 	      goto out;
1031 	    }
1032 	}
1033 
1034     out:
1035       if (p == NULL || IS_EMPTY_STMT (*p))
1036 	temp = NULL_TREE;
1037       else if (temp)
1038 	{
1039 	  /* The wrapper is on the RHS of an assignment that we're pushing
1040 	     down.  */
1041 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1042 		      || TREE_CODE (temp) == MODIFY_EXPR);
1043 	  TREE_OPERAND (temp, 1) = *p;
1044 	  *p = temp;
1045 	}
1046       else
1047 	{
1048 	  temp = create_tmp_var (type, "retval");
1049 	  *p = build2 (INIT_EXPR, type, temp, *p);
1050 	}
1051 
1052       return temp;
1053     }
1054 
1055   return NULL_TREE;
1056 }
1057 
1058 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1059    a temporary through which they communicate.  */
1060 
1061 static void
1062 build_stack_save_restore (gcall **save, gcall **restore)
1063 {
1064   tree tmp_var;
1065 
1066   *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1067   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1068   gimple_call_set_lhs (*save, tmp_var);
1069 
1070   *restore
1071     = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1072 			 1, tmp_var);
1073 }
1074 
1075 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1076 
1077 static enum gimplify_status
1078 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1079 {
1080   tree bind_expr = *expr_p;
1081   bool old_save_stack = gimplify_ctxp->save_stack;
1082   tree t;
1083   gbind *bind_stmt;
1084   gimple_seq body, cleanup;
1085   gcall *stack_save;
1086   location_t start_locus = 0, end_locus = 0;
1087 
1088   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1089 
1090   /* Mark variables seen in this bind expr.  */
1091   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1092     {
1093       if (TREE_CODE (t) == VAR_DECL)
1094 	{
1095 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1096 
1097 	  /* Mark variable as local.  */
1098 	  if (ctx && !DECL_EXTERNAL (t)
1099 	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1100 		  || splay_tree_lookup (ctx->variables,
1101 					(splay_tree_key) t) == NULL))
1102 	    {
1103 	      if (ctx->region_type == ORT_SIMD
1104 		  && TREE_ADDRESSABLE (t)
1105 		  && !TREE_STATIC (t))
1106 		omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1107 	      else
1108 		omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1109 	    }
1110 
1111 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1112 
1113 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1114 	    cfun->has_local_explicit_reg_vars = true;
1115 	}
1116 
1117       /* Preliminarily mark non-addressed complex variables as eligible
1118 	 for promotion to gimple registers.  We'll transform their uses
1119 	 as we find them.  */
1120       if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1121 	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1122 	  && !TREE_THIS_VOLATILE (t)
1123 	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1124 	  && !needs_to_live_in_memory (t))
1125 	DECL_GIMPLE_REG_P (t) = 1;
1126     }
1127 
1128   bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1129                                    BIND_EXPR_BLOCK (bind_expr));
1130   gimple_push_bind_expr (bind_stmt);
1131 
1132   gimplify_ctxp->save_stack = false;
1133 
1134   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1135   body = NULL;
1136   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1137   gimple_bind_set_body (bind_stmt, body);
1138 
1139   /* Source location wise, the cleanup code (stack_restore and clobbers)
1140      belongs to the end of the block, so propagate what we have.  The
1141      stack_save operation belongs to the beginning of block, which we can
1142      infer from the bind_expr directly if the block has no explicit
1143      assignment.  */
1144   if (BIND_EXPR_BLOCK (bind_expr))
1145     {
1146       end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1147       start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1148     }
1149   if (start_locus == 0)
1150     start_locus = EXPR_LOCATION (bind_expr);
1151 
1152   cleanup = NULL;
1153   stack_save = NULL;
1154   if (gimplify_ctxp->save_stack)
1155     {
1156       gcall *stack_restore;
1157 
1158       /* Save stack on entry and restore it on exit.  Add a try_finally
1159 	 block to achieve this.  */
1160       build_stack_save_restore (&stack_save, &stack_restore);
1161 
1162       gimple_set_location (stack_save, start_locus);
1163       gimple_set_location (stack_restore, end_locus);
1164 
1165       gimplify_seq_add_stmt (&cleanup, stack_restore);
1166     }
1167 
1168   /* Add clobbers for all variables that go out of scope.  */
1169   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1170     {
1171       if (TREE_CODE (t) == VAR_DECL
1172 	  && !is_global_var (t)
1173 	  && DECL_CONTEXT (t) == current_function_decl
1174 	  && !DECL_HARD_REGISTER (t)
1175 	  && !TREE_THIS_VOLATILE (t)
1176 	  && !DECL_HAS_VALUE_EXPR_P (t)
1177 	  /* Only care for variables that have to be in memory.  Others
1178 	     will be rewritten into SSA names, hence moved to the top-level.  */
1179 	  && !is_gimple_reg (t)
1180 	  && flag_stack_reuse != SR_NONE)
1181 	{
1182 	  tree clobber = build_constructor (TREE_TYPE (t), NULL);
1183 	  gimple clobber_stmt;
1184 	  TREE_THIS_VOLATILE (clobber) = 1;
1185 	  clobber_stmt = gimple_build_assign (t, clobber);
1186 	  gimple_set_location (clobber_stmt, end_locus);
1187 	  gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1188 	}
1189     }
1190 
1191   if (cleanup)
1192     {
1193       gtry *gs;
1194       gimple_seq new_body;
1195 
1196       new_body = NULL;
1197       gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1198 	  		     GIMPLE_TRY_FINALLY);
1199 
1200       if (stack_save)
1201 	gimplify_seq_add_stmt (&new_body, stack_save);
1202       gimplify_seq_add_stmt (&new_body, gs);
1203       gimple_bind_set_body (bind_stmt, new_body);
1204     }
1205 
1206   gimplify_ctxp->save_stack = old_save_stack;
1207   gimple_pop_bind_expr ();
1208 
1209   gimplify_seq_add_stmt (pre_p, bind_stmt);
1210 
1211   if (temp)
1212     {
1213       *expr_p = temp;
1214       return GS_OK;
1215     }
1216 
1217   *expr_p = NULL_TREE;
1218   return GS_ALL_DONE;
1219 }
1220 
1221 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1222    GIMPLE value, it is assigned to a new temporary and the statement is
1223    re-written to return the temporary.
1224 
1225    PRE_P points to the sequence where side effects that must happen before
1226    STMT should be stored.  */
1227 
1228 static enum gimplify_status
1229 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1230 {
1231   greturn *ret;
1232   tree ret_expr = TREE_OPERAND (stmt, 0);
1233   tree result_decl, result;
1234 
1235   if (ret_expr == error_mark_node)
1236     return GS_ERROR;
1237 
1238   /* Implicit _Cilk_sync must be inserted right before any return statement
1239      if there is a _Cilk_spawn in the function.  If the user has provided a
1240      _Cilk_sync, the optimizer should remove this duplicate one.  */
1241   if (fn_contains_cilk_spawn_p (cfun))
1242     {
1243       tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1244       gimplify_and_add (impl_sync, pre_p);
1245     }
1246 
1247   if (!ret_expr
1248       || TREE_CODE (ret_expr) == RESULT_DECL
1249       || ret_expr == error_mark_node)
1250     {
1251       greturn *ret = gimple_build_return (ret_expr);
1252       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1253       gimplify_seq_add_stmt (pre_p, ret);
1254       return GS_ALL_DONE;
1255     }
1256 
1257   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1258     result_decl = NULL_TREE;
1259   else
1260     {
1261       result_decl = TREE_OPERAND (ret_expr, 0);
1262 
1263       /* See through a return by reference.  */
1264       if (TREE_CODE (result_decl) == INDIRECT_REF)
1265 	result_decl = TREE_OPERAND (result_decl, 0);
1266 
1267       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1268 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1269 		  && TREE_CODE (result_decl) == RESULT_DECL);
1270     }
1271 
1272   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1273      Recall that aggregate_value_p is FALSE for any aggregate type that is
1274      returned in registers.  If we're returning values in registers, then
1275      we don't want to extend the lifetime of the RESULT_DECL, particularly
1276      across another call.  In addition, for those aggregates for which
1277      hard_function_value generates a PARALLEL, we'll die during normal
1278      expansion of structure assignments; there's special code in expand_return
1279      to handle this case that does not exist in expand_expr.  */
1280   if (!result_decl)
1281     result = NULL_TREE;
1282   else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1283     {
1284       if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1285 	{
1286 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1287 	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1288 	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1289 	     should be effectively allocated by the caller, i.e. all calls to
1290 	     this function must be subject to the Return Slot Optimization.  */
1291 	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1292 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1293 	}
1294       result = result_decl;
1295     }
1296   else if (gimplify_ctxp->return_temp)
1297     result = gimplify_ctxp->return_temp;
1298   else
1299     {
1300       result = create_tmp_reg (TREE_TYPE (result_decl));
1301 
1302       /* ??? With complex control flow (usually involving abnormal edges),
1303 	 we can wind up warning about an uninitialized value for this.  Due
1304 	 to how this variable is constructed and initialized, this is never
1305 	 true.  Give up and never warn.  */
1306       TREE_NO_WARNING (result) = 1;
1307 
1308       gimplify_ctxp->return_temp = result;
1309     }
1310 
1311   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1312      Then gimplify the whole thing.  */
1313   if (result != result_decl)
1314     TREE_OPERAND (ret_expr, 0) = result;
1315 
1316   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1317 
1318   ret = gimple_build_return (result);
1319   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1320   gimplify_seq_add_stmt (pre_p, ret);
1321 
1322   return GS_ALL_DONE;
1323 }
1324 
1325 /* Gimplify a variable-length array DECL.  */
1326 
1327 static void
1328 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1329 {
1330   /* This is a variable-sized decl.  Simplify its size and mark it
1331      for deferred expansion.  */
1332   tree t, addr, ptr_type;
1333 
1334   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1335   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1336 
1337   /* Don't mess with a DECL_VALUE_EXPR set by the front-end.  */
1338   if (DECL_HAS_VALUE_EXPR_P (decl))
1339     return;
1340 
1341   /* All occurrences of this decl in final gimplified code will be
1342      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1343      things: First, it lets the rest of the gimplifier know what
1344      replacement to use.  Second, it lets the debug info know
1345      where to find the value.  */
1346   ptr_type = build_pointer_type (TREE_TYPE (decl));
1347   addr = create_tmp_var (ptr_type, get_name (decl));
1348   DECL_IGNORED_P (addr) = 0;
1349   t = build_fold_indirect_ref (addr);
1350   TREE_THIS_NOTRAP (t) = 1;
1351   SET_DECL_VALUE_EXPR (decl, t);
1352   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1353 
1354   t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1355   t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1356 		       size_int (DECL_ALIGN (decl)));
1357   /* The call has been built for a variable-sized object.  */
1358   CALL_ALLOCA_FOR_VAR_P (t) = 1;
1359   t = fold_convert (ptr_type, t);
1360   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1361 
1362   gimplify_and_add (t, seq_p);
1363 
1364   /* Indicate that we need to restore the stack level when the
1365      enclosing BIND_EXPR is exited.  */
1366   gimplify_ctxp->save_stack = true;
1367 }
1368 
1369 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1370    as being forced.  To be called for DECL_INITIAL of static variables.  */
1371 
1372 static tree
1373 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1374 {
1375   if (TYPE_P (*tp))
1376     *walk_subtrees = 0;
1377   if (TREE_CODE (*tp) == LABEL_DECL)
1378     FORCED_LABEL (*tp) = 1;
1379 
1380   return NULL_TREE;
1381 }
1382 
1383 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1384    and initialization explicit.  */
1385 
1386 static enum gimplify_status
1387 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1388 {
1389   tree stmt = *stmt_p;
1390   tree decl = DECL_EXPR_DECL (stmt);
1391 
1392   *stmt_p = NULL_TREE;
1393 
1394   if (TREE_TYPE (decl) == error_mark_node)
1395     return GS_ERROR;
1396 
1397   if ((TREE_CODE (decl) == TYPE_DECL
1398        || TREE_CODE (decl) == VAR_DECL)
1399       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1400     gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1401 
1402   /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1403      in case its size expressions contain problematic nodes like CALL_EXPR.  */
1404   if (TREE_CODE (decl) == TYPE_DECL
1405       && DECL_ORIGINAL_TYPE (decl)
1406       && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1407     gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1408 
1409   if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1410     {
1411       tree init = DECL_INITIAL (decl);
1412 
1413       if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1414 	  || (!TREE_STATIC (decl)
1415 	      && flag_stack_check == GENERIC_STACK_CHECK
1416 	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1417 				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1418 	gimplify_vla_decl (decl, seq_p);
1419 
1420       /* Some front ends do not explicitly declare all anonymous
1421 	 artificial variables.  We compensate here by declaring the
1422 	 variables, though it would be better if the front ends would
1423 	 explicitly declare them.  */
1424       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1425 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1426 	gimple_add_tmp_var (decl);
1427 
1428       if (init && init != error_mark_node)
1429 	{
1430 	  if (!TREE_STATIC (decl))
1431 	    {
1432 	      DECL_INITIAL (decl) = NULL_TREE;
1433 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1434 	      gimplify_and_add (init, seq_p);
1435 	      ggc_free (init);
1436 	    }
1437 	  else
1438 	    /* We must still examine initializers for static variables
1439 	       as they may contain a label address.  */
1440 	    walk_tree (&init, force_labels_r, NULL, NULL);
1441 	}
1442     }
1443 
1444   return GS_ALL_DONE;
1445 }
1446 
1447 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1448    and replacing the LOOP_EXPR with goto, but if the loop contains an
1449    EXIT_EXPR, we need to append a label for it to jump to.  */
1450 
1451 static enum gimplify_status
1452 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1453 {
1454   tree saved_label = gimplify_ctxp->exit_label;
1455   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1456 
1457   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1458 
1459   gimplify_ctxp->exit_label = NULL_TREE;
1460 
1461   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1462 
1463   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1464 
1465   if (gimplify_ctxp->exit_label)
1466     gimplify_seq_add_stmt (pre_p,
1467 			   gimple_build_label (gimplify_ctxp->exit_label));
1468 
1469   gimplify_ctxp->exit_label = saved_label;
1470 
1471   *expr_p = NULL;
1472   return GS_ALL_DONE;
1473 }
1474 
1475 /* Gimplify a statement list onto a sequence.  These may be created either
1476    by an enlightened front-end, or by shortcut_cond_expr.  */
1477 
1478 static enum gimplify_status
1479 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1480 {
1481   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1482 
1483   tree_stmt_iterator i = tsi_start (*expr_p);
1484 
1485   while (!tsi_end_p (i))
1486     {
1487       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1488       tsi_delink (&i);
1489     }
1490 
1491   if (temp)
1492     {
1493       *expr_p = temp;
1494       return GS_OK;
1495     }
1496 
1497   return GS_ALL_DONE;
1498 }
1499 
1500 
1501 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1502    branch to.  */
1503 
1504 static enum gimplify_status
1505 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1506 {
1507   tree switch_expr = *expr_p;
1508   gimple_seq switch_body_seq = NULL;
1509   enum gimplify_status ret;
1510   tree index_type = TREE_TYPE (switch_expr);
1511   if (index_type == NULL_TREE)
1512     index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1513 
1514   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1515                        fb_rvalue);
1516   if (ret == GS_ERROR || ret == GS_UNHANDLED)
1517     return ret;
1518 
1519   if (SWITCH_BODY (switch_expr))
1520     {
1521       vec<tree> labels;
1522       vec<tree> saved_labels;
1523       tree default_case = NULL_TREE;
1524       gswitch *switch_stmt;
1525 
1526       /* If someone can be bothered to fill in the labels, they can
1527 	 be bothered to null out the body too.  */
1528       gcc_assert (!SWITCH_LABELS (switch_expr));
1529 
1530       /* Save old labels, get new ones from body, then restore the old
1531          labels.  Save all the things from the switch body to append after.  */
1532       saved_labels = gimplify_ctxp->case_labels;
1533       gimplify_ctxp->case_labels.create (8);
1534 
1535       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1536       labels = gimplify_ctxp->case_labels;
1537       gimplify_ctxp->case_labels = saved_labels;
1538 
1539       preprocess_case_label_vec_for_gimple (labels, index_type,
1540 					    &default_case);
1541 
1542       if (!default_case)
1543 	{
1544 	  glabel *new_default;
1545 
1546 	  default_case
1547 	    = build_case_label (NULL_TREE, NULL_TREE,
1548 				create_artificial_label (UNKNOWN_LOCATION));
1549 	  new_default = gimple_build_label (CASE_LABEL (default_case));
1550 	  gimplify_seq_add_stmt (&switch_body_seq, new_default);
1551 	}
1552 
1553       switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
1554 					   default_case, labels);
1555       gimplify_seq_add_stmt (pre_p, switch_stmt);
1556       gimplify_seq_add_seq (pre_p, switch_body_seq);
1557       labels.release ();
1558     }
1559   else
1560     gcc_assert (SWITCH_LABELS (switch_expr));
1561 
1562   return GS_ALL_DONE;
1563 }
1564 
1565 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
1566 
1567 static enum gimplify_status
1568 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1569 {
1570   struct gimplify_ctx *ctxp;
1571   glabel *label_stmt;
1572 
1573   /* Invalid programs can play Duff's Device type games with, for example,
1574      #pragma omp parallel.  At least in the C front end, we don't
1575      detect such invalid branches until after gimplification, in the
1576      diagnose_omp_blocks pass.  */
1577   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1578     if (ctxp->case_labels.exists ())
1579       break;
1580 
1581   label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
1582   ctxp->case_labels.safe_push (*expr_p);
1583   gimplify_seq_add_stmt (pre_p, label_stmt);
1584 
1585   return GS_ALL_DONE;
1586 }
1587 
1588 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1589    if necessary.  */
1590 
1591 tree
1592 build_and_jump (tree *label_p)
1593 {
1594   if (label_p == NULL)
1595     /* If there's nowhere to jump, just fall through.  */
1596     return NULL_TREE;
1597 
1598   if (*label_p == NULL_TREE)
1599     {
1600       tree label = create_artificial_label (UNKNOWN_LOCATION);
1601       *label_p = label;
1602     }
1603 
1604   return build1 (GOTO_EXPR, void_type_node, *label_p);
1605 }
1606 
1607 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1608    This also involves building a label to jump to and communicating it to
1609    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1610 
1611 static enum gimplify_status
1612 gimplify_exit_expr (tree *expr_p)
1613 {
1614   tree cond = TREE_OPERAND (*expr_p, 0);
1615   tree expr;
1616 
1617   expr = build_and_jump (&gimplify_ctxp->exit_label);
1618   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1619   *expr_p = expr;
1620 
1621   return GS_OK;
1622 }
1623 
1624 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1625    different from its canonical type, wrap the whole thing inside a
1626    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1627    type.
1628 
1629    The canonical type of a COMPONENT_REF is the type of the field being
1630    referenced--unless the field is a bit-field which can be read directly
1631    in a smaller mode, in which case the canonical type is the
1632    sign-appropriate type corresponding to that mode.  */
1633 
1634 static void
1635 canonicalize_component_ref (tree *expr_p)
1636 {
1637   tree expr = *expr_p;
1638   tree type;
1639 
1640   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1641 
1642   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1643     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1644   else
1645     type = TREE_TYPE (TREE_OPERAND (expr, 1));
1646 
1647   /* One could argue that all the stuff below is not necessary for
1648      the non-bitfield case and declare it a FE error if type
1649      adjustment would be needed.  */
1650   if (TREE_TYPE (expr) != type)
1651     {
1652 #ifdef ENABLE_TYPES_CHECKING
1653       tree old_type = TREE_TYPE (expr);
1654 #endif
1655       int type_quals;
1656 
1657       /* We need to preserve qualifiers and propagate them from
1658 	 operand 0.  */
1659       type_quals = TYPE_QUALS (type)
1660 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1661       if (TYPE_QUALS (type) != type_quals)
1662 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1663 
1664       /* Set the type of the COMPONENT_REF to the underlying type.  */
1665       TREE_TYPE (expr) = type;
1666 
1667 #ifdef ENABLE_TYPES_CHECKING
1668       /* It is now a FE error, if the conversion from the canonical
1669 	 type to the original expression type is not useless.  */
1670       gcc_assert (useless_type_conversion_p (old_type, type));
1671 #endif
1672     }
1673 }
1674 
1675 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1676    to foo, embed that change in the ADDR_EXPR by converting
1677       T array[U];
1678       (T *)&array
1679    ==>
1680       &array[L]
1681    where L is the lower bound.  For simplicity, only do this for constant
1682    lower bound.
1683    The constraint is that the type of &array[L] is trivially convertible
1684    to T *.  */
1685 
1686 static void
1687 canonicalize_addr_expr (tree *expr_p)
1688 {
1689   tree expr = *expr_p;
1690   tree addr_expr = TREE_OPERAND (expr, 0);
1691   tree datype, ddatype, pddatype;
1692 
1693   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1694   if (!POINTER_TYPE_P (TREE_TYPE (expr))
1695       || TREE_CODE (addr_expr) != ADDR_EXPR)
1696     return;
1697 
1698   /* The addr_expr type should be a pointer to an array.  */
1699   datype = TREE_TYPE (TREE_TYPE (addr_expr));
1700   if (TREE_CODE (datype) != ARRAY_TYPE)
1701     return;
1702 
1703   /* The pointer to element type shall be trivially convertible to
1704      the expression pointer type.  */
1705   ddatype = TREE_TYPE (datype);
1706   pddatype = build_pointer_type (ddatype);
1707   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1708 				  pddatype))
1709     return;
1710 
1711   /* The lower bound and element sizes must be constant.  */
1712   if (!TYPE_SIZE_UNIT (ddatype)
1713       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1714       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1715       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1716     return;
1717 
1718   /* All checks succeeded.  Build a new node to merge the cast.  */
1719   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1720 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1721 		    NULL_TREE, NULL_TREE);
1722   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1723 
1724   /* We can have stripped a required restrict qualifier above.  */
1725   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1726     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1727 }
1728 
1729 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1730    underneath as appropriate.  */
1731 
1732 static enum gimplify_status
1733 gimplify_conversion (tree *expr_p)
1734 {
1735   location_t loc = EXPR_LOCATION (*expr_p);
1736   gcc_assert (CONVERT_EXPR_P (*expr_p));
1737 
1738   /* Then strip away all but the outermost conversion.  */
1739   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1740 
1741   /* And remove the outermost conversion if it's useless.  */
1742   if (tree_ssa_useless_type_conversion (*expr_p))
1743     *expr_p = TREE_OPERAND (*expr_p, 0);
1744 
1745   /* If we still have a conversion at the toplevel,
1746      then canonicalize some constructs.  */
1747   if (CONVERT_EXPR_P (*expr_p))
1748     {
1749       tree sub = TREE_OPERAND (*expr_p, 0);
1750 
1751       /* If a NOP conversion is changing the type of a COMPONENT_REF
1752 	 expression, then canonicalize its type now in order to expose more
1753 	 redundant conversions.  */
1754       if (TREE_CODE (sub) == COMPONENT_REF)
1755 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1756 
1757       /* If a NOP conversion is changing a pointer to array of foo
1758 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
1759       else if (TREE_CODE (sub) == ADDR_EXPR)
1760 	canonicalize_addr_expr (expr_p);
1761     }
1762 
1763   /* If we have a conversion to a non-register type force the
1764      use of a VIEW_CONVERT_EXPR instead.  */
1765   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1766     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1767 			       TREE_OPERAND (*expr_p, 0));
1768 
1769   /* Canonicalize CONVERT_EXPR to NOP_EXPR.  */
1770   if (TREE_CODE (*expr_p) == CONVERT_EXPR)
1771     TREE_SET_CODE (*expr_p, NOP_EXPR);
1772 
1773   return GS_OK;
1774 }
1775 
1776 /* Nonlocal VLAs seen in the current function.  */
1777 static hash_set<tree> *nonlocal_vlas;
1778 
1779 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes.  */
1780 static tree nonlocal_vla_vars;
1781 
1782 /* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
1783    DECL_VALUE_EXPR, and it's worth re-examining things.  */
1784 
1785 static enum gimplify_status
1786 gimplify_var_or_parm_decl (tree *expr_p)
1787 {
1788   tree decl = *expr_p;
1789 
1790   /* ??? If this is a local variable, and it has not been seen in any
1791      outer BIND_EXPR, then it's probably the result of a duplicate
1792      declaration, for which we've already issued an error.  It would
1793      be really nice if the front end wouldn't leak these at all.
1794      Currently the only known culprit is C++ destructors, as seen
1795      in g++.old-deja/g++.jason/binding.C.  */
1796   if (TREE_CODE (decl) == VAR_DECL
1797       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1798       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1799       && decl_function_context (decl) == current_function_decl)
1800     {
1801       gcc_assert (seen_error ());
1802       return GS_ERROR;
1803     }
1804 
1805   /* When within an OMP context, notice uses of variables.  */
1806   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1807     return GS_ALL_DONE;
1808 
1809   /* If the decl is an alias for another expression, substitute it now.  */
1810   if (DECL_HAS_VALUE_EXPR_P (decl))
1811     {
1812       tree value_expr = DECL_VALUE_EXPR (decl);
1813 
1814       /* For referenced nonlocal VLAs add a decl for debugging purposes
1815 	 to the current function.  */
1816       if (TREE_CODE (decl) == VAR_DECL
1817 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1818 	  && nonlocal_vlas != NULL
1819 	  && TREE_CODE (value_expr) == INDIRECT_REF
1820 	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1821 	  && decl_function_context (decl) != current_function_decl)
1822 	{
1823 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1824 	  while (ctx
1825 		 && (ctx->region_type == ORT_WORKSHARE
1826 		     || ctx->region_type == ORT_SIMD))
1827 	    ctx = ctx->outer_context;
1828 	  if (!ctx && !nonlocal_vlas->add (decl))
1829 	    {
1830 	      tree copy = copy_node (decl);
1831 
1832 	      lang_hooks.dup_lang_specific_decl (copy);
1833 	      SET_DECL_RTL (copy, 0);
1834 	      TREE_USED (copy) = 1;
1835 	      DECL_CHAIN (copy) = nonlocal_vla_vars;
1836 	      nonlocal_vla_vars = copy;
1837 	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1838 	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
1839 	    }
1840 	}
1841 
1842       *expr_p = unshare_expr (value_expr);
1843       return GS_OK;
1844     }
1845 
1846   return GS_ALL_DONE;
1847 }
1848 
1849 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T.  */
1850 
1851 static void
1852 recalculate_side_effects (tree t)
1853 {
1854   enum tree_code code = TREE_CODE (t);
1855   int len = TREE_OPERAND_LENGTH (t);
1856   int i;
1857 
1858   switch (TREE_CODE_CLASS (code))
1859     {
1860     case tcc_expression:
1861       switch (code)
1862 	{
1863 	case INIT_EXPR:
1864 	case MODIFY_EXPR:
1865 	case VA_ARG_EXPR:
1866 	case PREDECREMENT_EXPR:
1867 	case PREINCREMENT_EXPR:
1868 	case POSTDECREMENT_EXPR:
1869 	case POSTINCREMENT_EXPR:
1870 	  /* All of these have side-effects, no matter what their
1871 	     operands are.  */
1872 	  return;
1873 
1874 	default:
1875 	  break;
1876 	}
1877       /* Fall through.  */
1878 
1879     case tcc_comparison:  /* a comparison expression */
1880     case tcc_unary:       /* a unary arithmetic expression */
1881     case tcc_binary:      /* a binary arithmetic expression */
1882     case tcc_reference:   /* a reference */
1883     case tcc_vl_exp:        /* a function call */
1884       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1885       for (i = 0; i < len; ++i)
1886 	{
1887 	  tree op = TREE_OPERAND (t, i);
1888 	  if (op && TREE_SIDE_EFFECTS (op))
1889 	    TREE_SIDE_EFFECTS (t) = 1;
1890 	}
1891       break;
1892 
1893     case tcc_constant:
1894       /* No side-effects.  */
1895       return;
1896 
1897     default:
1898       gcc_unreachable ();
1899    }
1900 }
1901 
1902 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1903    node *EXPR_P.
1904 
1905       compound_lval
1906 	      : min_lval '[' val ']'
1907 	      | min_lval '.' ID
1908 	      | compound_lval '[' val ']'
1909 	      | compound_lval '.' ID
1910 
1911    This is not part of the original SIMPLE definition, which separates
1912    array and member references, but it seems reasonable to handle them
1913    together.  Also, this way we don't run into problems with union
1914    aliasing; gcc requires that for accesses through a union to alias, the
1915    union reference must be explicit, which was not always the case when we
1916    were splitting up array and member refs.
1917 
1918    PRE_P points to the sequence where side effects that must happen before
1919      *EXPR_P should be stored.
1920 
1921    POST_P points to the sequence where side effects that must happen after
1922      *EXPR_P should be stored.  */
1923 
1924 static enum gimplify_status
1925 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1926 			fallback_t fallback)
1927 {
1928   tree *p;
1929   enum gimplify_status ret = GS_ALL_DONE, tret;
1930   int i;
1931   location_t loc = EXPR_LOCATION (*expr_p);
1932   tree expr = *expr_p;
1933 
1934   /* Create a stack of the subexpressions so later we can walk them in
1935      order from inner to outer.  */
1936   auto_vec<tree, 10> expr_stack;
1937 
1938   /* We can handle anything that get_inner_reference can deal with.  */
1939   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1940     {
1941     restart:
1942       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
1943       if (TREE_CODE (*p) == INDIRECT_REF)
1944 	*p = fold_indirect_ref_loc (loc, *p);
1945 
1946       if (handled_component_p (*p))
1947 	;
1948       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
1949 	 additional COMPONENT_REFs.  */
1950       else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1951 	       && gimplify_var_or_parm_decl (p) == GS_OK)
1952 	goto restart;
1953       else
1954 	break;
1955 
1956       expr_stack.safe_push (*p);
1957     }
1958 
1959   gcc_assert (expr_stack.length ());
1960 
1961   /* Now EXPR_STACK is a stack of pointers to all the refs we've
1962      walked through and P points to the innermost expression.
1963 
1964      Java requires that we elaborated nodes in source order.  That
1965      means we must gimplify the inner expression followed by each of
1966      the indices, in order.  But we can't gimplify the inner
1967      expression until we deal with any variable bounds, sizes, or
1968      positions in order to deal with PLACEHOLDER_EXPRs.
1969 
1970      So we do this in three steps.  First we deal with the annotations
1971      for any variables in the components, then we gimplify the base,
1972      then we gimplify any indices, from left to right.  */
1973   for (i = expr_stack.length () - 1; i >= 0; i--)
1974     {
1975       tree t = expr_stack[i];
1976 
1977       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1978 	{
1979 	  /* Gimplify the low bound and element type size and put them into
1980 	     the ARRAY_REF.  If these values are set, they have already been
1981 	     gimplified.  */
1982 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
1983 	    {
1984 	      tree low = unshare_expr (array_ref_low_bound (t));
1985 	      if (!is_gimple_min_invariant (low))
1986 		{
1987 		  TREE_OPERAND (t, 2) = low;
1988 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1989 					post_p, is_gimple_reg,
1990 					fb_rvalue);
1991 		  ret = MIN (ret, tret);
1992 		}
1993 	    }
1994 	  else
1995 	    {
1996 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1997 				    is_gimple_reg, fb_rvalue);
1998 	      ret = MIN (ret, tret);
1999 	    }
2000 
2001 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
2002 	    {
2003 	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2004 	      tree elmt_size = unshare_expr (array_ref_element_size (t));
2005 	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2006 
2007 	      /* Divide the element size by the alignment of the element
2008 		 type (above).  */
2009 	      elmt_size
2010 		= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2011 
2012 	      if (!is_gimple_min_invariant (elmt_size))
2013 		{
2014 		  TREE_OPERAND (t, 3) = elmt_size;
2015 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2016 					post_p, is_gimple_reg,
2017 					fb_rvalue);
2018 		  ret = MIN (ret, tret);
2019 		}
2020 	    }
2021 	  else
2022 	    {
2023 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2024 				    is_gimple_reg, fb_rvalue);
2025 	      ret = MIN (ret, tret);
2026 	    }
2027 	}
2028       else if (TREE_CODE (t) == COMPONENT_REF)
2029 	{
2030 	  /* Set the field offset into T and gimplify it.  */
2031 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2032 	    {
2033 	      tree offset = unshare_expr (component_ref_field_offset (t));
2034 	      tree field = TREE_OPERAND (t, 1);
2035 	      tree factor
2036 		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2037 
2038 	      /* Divide the offset by its alignment.  */
2039 	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2040 
2041 	      if (!is_gimple_min_invariant (offset))
2042 		{
2043 		  TREE_OPERAND (t, 2) = offset;
2044 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2045 					post_p, is_gimple_reg,
2046 					fb_rvalue);
2047 		  ret = MIN (ret, tret);
2048 		}
2049 	    }
2050 	  else
2051 	    {
2052 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2053 				    is_gimple_reg, fb_rvalue);
2054 	      ret = MIN (ret, tret);
2055 	    }
2056 	}
2057     }
2058 
2059   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2060      so as to match the min_lval predicate.  Failure to do so may result
2061      in the creation of large aggregate temporaries.  */
2062   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2063 			fallback | fb_lvalue);
2064   ret = MIN (ret, tret);
2065 
2066   /* And finally, the indices and operands of ARRAY_REF.  During this
2067      loop we also remove any useless conversions.  */
2068   for (; expr_stack.length () > 0; )
2069     {
2070       tree t = expr_stack.pop ();
2071 
2072       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2073 	{
2074 	  /* Gimplify the dimension.  */
2075 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2076 	    {
2077 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2078 				    is_gimple_val, fb_rvalue);
2079 	      ret = MIN (ret, tret);
2080 	    }
2081 	}
2082 
2083       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2084 
2085       /* The innermost expression P may have originally had
2086 	 TREE_SIDE_EFFECTS set which would have caused all the outer
2087 	 expressions in *EXPR_P leading to P to also have had
2088 	 TREE_SIDE_EFFECTS set.  */
2089       recalculate_side_effects (t);
2090     }
2091 
2092   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2093   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2094     {
2095       canonicalize_component_ref (expr_p);
2096     }
2097 
2098   expr_stack.release ();
2099 
2100   gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2101 
2102   return ret;
2103 }
2104 
2105 /*  Gimplify the self modifying expression pointed to by EXPR_P
2106     (++, --, +=, -=).
2107 
2108     PRE_P points to the list where side effects that must happen before
2109 	*EXPR_P should be stored.
2110 
2111     POST_P points to the list where side effects that must happen after
2112 	*EXPR_P should be stored.
2113 
2114     WANT_VALUE is nonzero iff we want to use the value of this expression
2115 	in another expression.
2116 
2117     ARITH_TYPE is the type the computation should be performed in.  */
2118 
2119 enum gimplify_status
2120 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2121 			bool want_value, tree arith_type)
2122 {
2123   enum tree_code code;
2124   tree lhs, lvalue, rhs, t1;
2125   gimple_seq post = NULL, *orig_post_p = post_p;
2126   bool postfix;
2127   enum tree_code arith_code;
2128   enum gimplify_status ret;
2129   location_t loc = EXPR_LOCATION (*expr_p);
2130 
2131   code = TREE_CODE (*expr_p);
2132 
2133   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2134 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2135 
2136   /* Prefix or postfix?  */
2137   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2138     /* Faster to treat as prefix if result is not used.  */
2139     postfix = want_value;
2140   else
2141     postfix = false;
2142 
2143   /* For postfix, make sure the inner expression's post side effects
2144      are executed after side effects from this expression.  */
2145   if (postfix)
2146     post_p = &post;
2147 
2148   /* Add or subtract?  */
2149   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2150     arith_code = PLUS_EXPR;
2151   else
2152     arith_code = MINUS_EXPR;
2153 
2154   /* Gimplify the LHS into a GIMPLE lvalue.  */
2155   lvalue = TREE_OPERAND (*expr_p, 0);
2156   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2157   if (ret == GS_ERROR)
2158     return ret;
2159 
2160   /* Extract the operands to the arithmetic operation.  */
2161   lhs = lvalue;
2162   rhs = TREE_OPERAND (*expr_p, 1);
2163 
2164   /* For postfix operator, we evaluate the LHS to an rvalue and then use
2165      that as the result value and in the postqueue operation.  */
2166   if (postfix)
2167     {
2168       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2169       if (ret == GS_ERROR)
2170 	return ret;
2171 
2172       lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2173     }
2174 
2175   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2176   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2177     {
2178       rhs = convert_to_ptrofftype_loc (loc, rhs);
2179       if (arith_code == MINUS_EXPR)
2180 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2181       t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2182     }
2183   else
2184     t1 = fold_convert (TREE_TYPE (*expr_p),
2185 		       fold_build2 (arith_code, arith_type,
2186 				    fold_convert (arith_type, lhs),
2187 				    fold_convert (arith_type, rhs)));
2188 
2189   if (postfix)
2190     {
2191       gimplify_assign (lvalue, t1, pre_p);
2192       gimplify_seq_add_seq (orig_post_p, post);
2193       *expr_p = lhs;
2194       return GS_ALL_DONE;
2195     }
2196   else
2197     {
2198       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2199       return GS_OK;
2200     }
2201 }
2202 
2203 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2204 
2205 static void
2206 maybe_with_size_expr (tree *expr_p)
2207 {
2208   tree expr = *expr_p;
2209   tree type = TREE_TYPE (expr);
2210   tree size;
2211 
2212   /* If we've already wrapped this or the type is error_mark_node, we can't do
2213      anything.  */
2214   if (TREE_CODE (expr) == WITH_SIZE_EXPR
2215       || type == error_mark_node)
2216     return;
2217 
2218   /* If the size isn't known or is a constant, we have nothing to do.  */
2219   size = TYPE_SIZE_UNIT (type);
2220   if (!size || TREE_CODE (size) == INTEGER_CST)
2221     return;
2222 
2223   /* Otherwise, make a WITH_SIZE_EXPR.  */
2224   size = unshare_expr (size);
2225   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2226   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2227 }
2228 
2229 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2230    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2231    the CALL_EXPR.  */
2232 
2233 enum gimplify_status
2234 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2235 {
2236   bool (*test) (tree);
2237   fallback_t fb;
2238 
2239   /* In general, we allow lvalues for function arguments to avoid
2240      extra overhead of copying large aggregates out of even larger
2241      aggregates into temporaries only to copy the temporaries to
2242      the argument list.  Make optimizers happy by pulling out to
2243      temporaries those types that fit in registers.  */
2244   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2245     test = is_gimple_val, fb = fb_rvalue;
2246   else
2247     {
2248       test = is_gimple_lvalue, fb = fb_either;
2249       /* Also strip a TARGET_EXPR that would force an extra copy.  */
2250       if (TREE_CODE (*arg_p) == TARGET_EXPR)
2251 	{
2252 	  tree init = TARGET_EXPR_INITIAL (*arg_p);
2253 	  if (init
2254 	      && !VOID_TYPE_P (TREE_TYPE (init)))
2255 	    *arg_p = init;
2256 	}
2257     }
2258 
2259   /* If this is a variable sized type, we must remember the size.  */
2260   maybe_with_size_expr (arg_p);
2261 
2262   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2263   /* Make sure arguments have the same location as the function call
2264      itself.  */
2265   protected_set_expr_location (*arg_p, call_location);
2266 
2267   /* There is a sequence point before a function call.  Side effects in
2268      the argument list must occur before the actual call. So, when
2269      gimplifying arguments, force gimplify_expr to use an internal
2270      post queue which is then appended to the end of PRE_P.  */
2271   return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2272 }
2273 
2274 /* Don't fold inside offloading or taskreg regions: it can break code by
2275    adding decl references that weren't in the source.  We'll do it during
2276    omplower pass instead.  */
2277 
2278 static bool
2279 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2280 {
2281   struct gimplify_omp_ctx *ctx;
2282   for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2283     if (ctx->region_type == ORT_TARGET
2284 	|| (ctx->region_type & (ORT_PARALLEL | ORT_TASK)) != 0)
2285       return false;
2286   return fold_stmt (gsi);
2287 }
2288 
2289 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2290    WANT_VALUE is true if the result of the call is desired.  */
2291 
2292 static enum gimplify_status
2293 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2294 {
2295   tree fndecl, parms, p, fnptrtype;
2296   enum gimplify_status ret;
2297   int i, nargs;
2298   gcall *call;
2299   bool builtin_va_start_p = false;
2300   location_t loc = EXPR_LOCATION (*expr_p);
2301 
2302   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2303 
2304   /* For reliable diagnostics during inlining, it is necessary that
2305      every call_expr be annotated with file and line.  */
2306   if (! EXPR_HAS_LOCATION (*expr_p))
2307     SET_EXPR_LOCATION (*expr_p, input_location);
2308 
2309   /* Gimplify internal functions created in the FEs.  */
2310   if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
2311     {
2312       if (want_value)
2313 	return GS_ALL_DONE;
2314 
2315       nargs = call_expr_nargs (*expr_p);
2316       enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
2317       auto_vec<tree> vargs (nargs);
2318 
2319       for (i = 0; i < nargs; i++)
2320 	{
2321 	  gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2322 			EXPR_LOCATION (*expr_p));
2323 	  vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
2324 	}
2325       gimple call = gimple_build_call_internal_vec (ifn, vargs);
2326       gimplify_seq_add_stmt (pre_p, call);
2327       return GS_ALL_DONE;
2328     }
2329 
2330   /* This may be a call to a builtin function.
2331 
2332      Builtin function calls may be transformed into different
2333      (and more efficient) builtin function calls under certain
2334      circumstances.  Unfortunately, gimplification can muck things
2335      up enough that the builtin expanders are not aware that certain
2336      transformations are still valid.
2337 
2338      So we attempt transformation/gimplification of the call before
2339      we gimplify the CALL_EXPR.  At this time we do not manage to
2340      transform all calls in the same manner as the expanders do, but
2341      we do transform most of them.  */
2342   fndecl = get_callee_fndecl (*expr_p);
2343   if (fndecl
2344       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2345     switch (DECL_FUNCTION_CODE (fndecl))
2346       {
2347       case BUILT_IN_VA_START:
2348         {
2349 	  builtin_va_start_p = TRUE;
2350 	  if (call_expr_nargs (*expr_p) < 2)
2351 	    {
2352 	      error ("too few arguments to function %<va_start%>");
2353 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2354 	      return GS_OK;
2355 	    }
2356 
2357 	  if (fold_builtin_next_arg (*expr_p, true))
2358 	    {
2359 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2360 	      return GS_OK;
2361 	    }
2362 	  break;
2363 	}
2364       case BUILT_IN_LINE:
2365 	{
2366 	  *expr_p = build_int_cst (TREE_TYPE (*expr_p),
2367 				   LOCATION_LINE (EXPR_LOCATION (*expr_p)));
2368 	  return GS_OK;
2369 	}
2370       case BUILT_IN_FILE:
2371 	{
2372 	  const char *locfile = LOCATION_FILE (EXPR_LOCATION (*expr_p));
2373 	  *expr_p = build_string_literal (strlen (locfile) + 1, locfile);
2374 	  return GS_OK;
2375 	}
2376       case BUILT_IN_FUNCTION:
2377 	{
2378 	  const char *function;
2379 	  function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2380 	  *expr_p = build_string_literal (strlen (function) + 1, function);
2381 	  return GS_OK;
2382 	}
2383       default:
2384         ;
2385       }
2386   if (fndecl && DECL_BUILT_IN (fndecl))
2387     {
2388       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2389       if (new_tree && new_tree != *expr_p)
2390 	{
2391 	  /* There was a transformation of this call which computes the
2392 	     same value, but in a more efficient way.  Return and try
2393 	     again.  */
2394 	  *expr_p = new_tree;
2395 	  return GS_OK;
2396 	}
2397     }
2398 
2399   /* Remember the original function pointer type.  */
2400   fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2401 
2402   /* There is a sequence point before the call, so any side effects in
2403      the calling expression must occur before the actual call.  Force
2404      gimplify_expr to use an internal post queue.  */
2405   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2406 		       is_gimple_call_addr, fb_rvalue);
2407 
2408   nargs = call_expr_nargs (*expr_p);
2409 
2410   /* Get argument types for verification.  */
2411   fndecl = get_callee_fndecl (*expr_p);
2412   parms = NULL_TREE;
2413   if (fndecl)
2414     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2415   else
2416     parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
2417 
2418   if (fndecl && DECL_ARGUMENTS (fndecl))
2419     p = DECL_ARGUMENTS (fndecl);
2420   else if (parms)
2421     p = parms;
2422   else
2423     p = NULL_TREE;
2424   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2425     ;
2426 
2427   /* If the last argument is __builtin_va_arg_pack () and it is not
2428      passed as a named argument, decrease the number of CALL_EXPR
2429      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2430   if (!p
2431       && i < nargs
2432       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2433     {
2434       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2435       tree last_arg_fndecl = get_callee_fndecl (last_arg);
2436 
2437       if (last_arg_fndecl
2438 	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2439 	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2440 	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2441 	{
2442 	  tree call = *expr_p;
2443 
2444 	  --nargs;
2445 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2446 					  CALL_EXPR_FN (call),
2447 					  nargs, CALL_EXPR_ARGP (call));
2448 
2449 	  /* Copy all CALL_EXPR flags, location and block, except
2450 	     CALL_EXPR_VA_ARG_PACK flag.  */
2451 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2452 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2453 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2454 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
2455 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2456 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2457 
2458 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
2459 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2460 	}
2461     }
2462 
2463   /* Gimplify the function arguments.  */
2464   if (nargs > 0)
2465     {
2466       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2467            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2468            PUSH_ARGS_REVERSED ? i-- : i++)
2469         {
2470           enum gimplify_status t;
2471 
2472           /* Avoid gimplifying the second argument to va_start, which needs to
2473              be the plain PARM_DECL.  */
2474           if ((i != 1) || !builtin_va_start_p)
2475             {
2476               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2477 				EXPR_LOCATION (*expr_p));
2478 
2479               if (t == GS_ERROR)
2480                 ret = GS_ERROR;
2481             }
2482         }
2483     }
2484 
2485   /* Gimplify the static chain.  */
2486   if (CALL_EXPR_STATIC_CHAIN (*expr_p))
2487     {
2488       if (fndecl && !DECL_STATIC_CHAIN (fndecl))
2489 	CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
2490       else
2491 	{
2492 	  enum gimplify_status t;
2493 	  t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
2494 			    EXPR_LOCATION (*expr_p));
2495 	  if (t == GS_ERROR)
2496 	    ret = GS_ERROR;
2497 	}
2498     }
2499 
2500   /* Verify the function result.  */
2501   if (want_value && fndecl
2502       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2503     {
2504       error_at (loc, "using result of function returning %<void%>");
2505       ret = GS_ERROR;
2506     }
2507 
2508   /* Try this again in case gimplification exposed something.  */
2509   if (ret != GS_ERROR)
2510     {
2511       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2512 
2513       if (new_tree && new_tree != *expr_p)
2514 	{
2515 	  /* There was a transformation of this call which computes the
2516 	     same value, but in a more efficient way.  Return and try
2517 	     again.  */
2518 	  *expr_p = new_tree;
2519 	  return GS_OK;
2520 	}
2521     }
2522   else
2523     {
2524       *expr_p = error_mark_node;
2525       return GS_ERROR;
2526     }
2527 
2528   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2529      decl.  This allows us to eliminate redundant or useless
2530      calls to "const" functions.  */
2531   if (TREE_CODE (*expr_p) == CALL_EXPR)
2532     {
2533       int flags = call_expr_flags (*expr_p);
2534       if (flags & (ECF_CONST | ECF_PURE)
2535 	  /* An infinite loop is considered a side effect.  */
2536 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2537 	TREE_SIDE_EFFECTS (*expr_p) = 0;
2538     }
2539 
2540   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2541      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2542      form and delegate the creation of a GIMPLE_CALL to
2543      gimplify_modify_expr.  This is always possible because when
2544      WANT_VALUE is true, the caller wants the result of this call into
2545      a temporary, which means that we will emit an INIT_EXPR in
2546      internal_get_tmp_var which will then be handled by
2547      gimplify_modify_expr.  */
2548   if (!want_value)
2549     {
2550       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2551 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
2552       gimple_stmt_iterator gsi;
2553       call = gimple_build_call_from_tree (*expr_p);
2554       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2555       notice_special_calls (call);
2556       gimplify_seq_add_stmt (pre_p, call);
2557       gsi = gsi_last (*pre_p);
2558       maybe_fold_stmt (&gsi);
2559       *expr_p = NULL_TREE;
2560     }
2561   else
2562     /* Remember the original function type.  */
2563     CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2564 				     CALL_EXPR_FN (*expr_p));
2565 
2566   return ret;
2567 }
2568 
2569 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2570    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2571 
2572    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2573    condition is true or false, respectively.  If null, we should generate
2574    our own to skip over the evaluation of this specific expression.
2575 
2576    LOCUS is the source location of the COND_EXPR.
2577 
2578    This function is the tree equivalent of do_jump.
2579 
2580    shortcut_cond_r should only be called by shortcut_cond_expr.  */
2581 
2582 static tree
2583 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2584 		 location_t locus)
2585 {
2586   tree local_label = NULL_TREE;
2587   tree t, expr = NULL;
2588 
2589   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2590      retain the shortcut semantics.  Just insert the gotos here;
2591      shortcut_cond_expr will append the real blocks later.  */
2592   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2593     {
2594       location_t new_locus;
2595 
2596       /* Turn if (a && b) into
2597 
2598 	 if (a); else goto no;
2599 	 if (b) goto yes; else goto no;
2600 	 (no:) */
2601 
2602       if (false_label_p == NULL)
2603 	false_label_p = &local_label;
2604 
2605       /* Keep the original source location on the first 'if'.  */
2606       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2607       append_to_statement_list (t, &expr);
2608 
2609       /* Set the source location of the && on the second 'if'.  */
2610       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2611       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2612 			   new_locus);
2613       append_to_statement_list (t, &expr);
2614     }
2615   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2616     {
2617       location_t new_locus;
2618 
2619       /* Turn if (a || b) into
2620 
2621 	 if (a) goto yes;
2622 	 if (b) goto yes; else goto no;
2623 	 (yes:) */
2624 
2625       if (true_label_p == NULL)
2626 	true_label_p = &local_label;
2627 
2628       /* Keep the original source location on the first 'if'.  */
2629       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2630       append_to_statement_list (t, &expr);
2631 
2632       /* Set the source location of the || on the second 'if'.  */
2633       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2634       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2635 			   new_locus);
2636       append_to_statement_list (t, &expr);
2637     }
2638   else if (TREE_CODE (pred) == COND_EXPR
2639 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2640 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2641     {
2642       location_t new_locus;
2643 
2644       /* As long as we're messing with gotos, turn if (a ? b : c) into
2645 	 if (a)
2646 	   if (b) goto yes; else goto no;
2647 	 else
2648 	   if (c) goto yes; else goto no;
2649 
2650 	 Don't do this if one of the arms has void type, which can happen
2651 	 in C++ when the arm is throw.  */
2652 
2653       /* Keep the original source location on the first 'if'.  Set the source
2654 	 location of the ? on the second 'if'.  */
2655       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2656       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2657 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2658 				      false_label_p, locus),
2659 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2660 				      false_label_p, new_locus));
2661     }
2662   else
2663     {
2664       expr = build3 (COND_EXPR, void_type_node, pred,
2665 		     build_and_jump (true_label_p),
2666 		     build_and_jump (false_label_p));
2667       SET_EXPR_LOCATION (expr, locus);
2668     }
2669 
2670   if (local_label)
2671     {
2672       t = build1 (LABEL_EXPR, void_type_node, local_label);
2673       append_to_statement_list (t, &expr);
2674     }
2675 
2676   return expr;
2677 }
2678 
2679 /* Given a conditional expression EXPR with short-circuit boolean
2680    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2681    predicate apart into the equivalent sequence of conditionals.  */
2682 
2683 static tree
2684 shortcut_cond_expr (tree expr)
2685 {
2686   tree pred = TREE_OPERAND (expr, 0);
2687   tree then_ = TREE_OPERAND (expr, 1);
2688   tree else_ = TREE_OPERAND (expr, 2);
2689   tree true_label, false_label, end_label, t;
2690   tree *true_label_p;
2691   tree *false_label_p;
2692   bool emit_end, emit_false, jump_over_else;
2693   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2694   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2695 
2696   /* First do simple transformations.  */
2697   if (!else_se)
2698     {
2699       /* If there is no 'else', turn
2700 	   if (a && b) then c
2701 	 into
2702 	   if (a) if (b) then c.  */
2703       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2704 	{
2705 	  /* Keep the original source location on the first 'if'.  */
2706 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2707 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2708 	  /* Set the source location of the && on the second 'if'.  */
2709 	  if (EXPR_HAS_LOCATION (pred))
2710 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2711 	  then_ = shortcut_cond_expr (expr);
2712 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
2713 	  pred = TREE_OPERAND (pred, 0);
2714 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2715 	  SET_EXPR_LOCATION (expr, locus);
2716 	}
2717     }
2718 
2719   if (!then_se)
2720     {
2721       /* If there is no 'then', turn
2722 	   if (a || b); else d
2723 	 into
2724 	   if (a); else if (b); else d.  */
2725       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2726 	{
2727 	  /* Keep the original source location on the first 'if'.  */
2728 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2729 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2730 	  /* Set the source location of the || on the second 'if'.  */
2731 	  if (EXPR_HAS_LOCATION (pred))
2732 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2733 	  else_ = shortcut_cond_expr (expr);
2734 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
2735 	  pred = TREE_OPERAND (pred, 0);
2736 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2737 	  SET_EXPR_LOCATION (expr, locus);
2738 	}
2739     }
2740 
2741   /* If we're done, great.  */
2742   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2743       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2744     return expr;
2745 
2746   /* Otherwise we need to mess with gotos.  Change
2747        if (a) c; else d;
2748      to
2749        if (a); else goto no;
2750        c; goto end;
2751        no: d; end:
2752      and recursively gimplify the condition.  */
2753 
2754   true_label = false_label = end_label = NULL_TREE;
2755 
2756   /* If our arms just jump somewhere, hijack those labels so we don't
2757      generate jumps to jumps.  */
2758 
2759   if (then_
2760       && TREE_CODE (then_) == GOTO_EXPR
2761       && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2762     {
2763       true_label = GOTO_DESTINATION (then_);
2764       then_ = NULL;
2765       then_se = false;
2766     }
2767 
2768   if (else_
2769       && TREE_CODE (else_) == GOTO_EXPR
2770       && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2771     {
2772       false_label = GOTO_DESTINATION (else_);
2773       else_ = NULL;
2774       else_se = false;
2775     }
2776 
2777   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2778   if (true_label)
2779     true_label_p = &true_label;
2780   else
2781     true_label_p = NULL;
2782 
2783   /* The 'else' branch also needs a label if it contains interesting code.  */
2784   if (false_label || else_se)
2785     false_label_p = &false_label;
2786   else
2787     false_label_p = NULL;
2788 
2789   /* If there was nothing else in our arms, just forward the label(s).  */
2790   if (!then_se && !else_se)
2791     return shortcut_cond_r (pred, true_label_p, false_label_p,
2792 			    EXPR_LOC_OR_LOC (expr, input_location));
2793 
2794   /* If our last subexpression already has a terminal label, reuse it.  */
2795   if (else_se)
2796     t = expr_last (else_);
2797   else if (then_se)
2798     t = expr_last (then_);
2799   else
2800     t = NULL;
2801   if (t && TREE_CODE (t) == LABEL_EXPR)
2802     end_label = LABEL_EXPR_LABEL (t);
2803 
2804   /* If we don't care about jumping to the 'else' branch, jump to the end
2805      if the condition is false.  */
2806   if (!false_label_p)
2807     false_label_p = &end_label;
2808 
2809   /* We only want to emit these labels if we aren't hijacking them.  */
2810   emit_end = (end_label == NULL_TREE);
2811   emit_false = (false_label == NULL_TREE);
2812 
2813   /* We only emit the jump over the else clause if we have to--if the
2814      then clause may fall through.  Otherwise we can wind up with a
2815      useless jump and a useless label at the end of gimplified code,
2816      which will cause us to think that this conditional as a whole
2817      falls through even if it doesn't.  If we then inline a function
2818      which ends with such a condition, that can cause us to issue an
2819      inappropriate warning about control reaching the end of a
2820      non-void function.  */
2821   jump_over_else = block_may_fallthru (then_);
2822 
2823   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2824 			  EXPR_LOC_OR_LOC (expr, input_location));
2825 
2826   expr = NULL;
2827   append_to_statement_list (pred, &expr);
2828 
2829   append_to_statement_list (then_, &expr);
2830   if (else_se)
2831     {
2832       if (jump_over_else)
2833 	{
2834 	  tree last = expr_last (expr);
2835 	  t = build_and_jump (&end_label);
2836 	  if (EXPR_HAS_LOCATION (last))
2837 	    SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2838 	  append_to_statement_list (t, &expr);
2839 	}
2840       if (emit_false)
2841 	{
2842 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
2843 	  append_to_statement_list (t, &expr);
2844 	}
2845       append_to_statement_list (else_, &expr);
2846     }
2847   if (emit_end && end_label)
2848     {
2849       t = build1 (LABEL_EXPR, void_type_node, end_label);
2850       append_to_statement_list (t, &expr);
2851     }
2852 
2853   return expr;
2854 }
2855 
2856 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2857 
2858 tree
2859 gimple_boolify (tree expr)
2860 {
2861   tree type = TREE_TYPE (expr);
2862   location_t loc = EXPR_LOCATION (expr);
2863 
2864   if (TREE_CODE (expr) == NE_EXPR
2865       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2866       && integer_zerop (TREE_OPERAND (expr, 1)))
2867     {
2868       tree call = TREE_OPERAND (expr, 0);
2869       tree fn = get_callee_fndecl (call);
2870 
2871       /* For __builtin_expect ((long) (x), y) recurse into x as well
2872 	 if x is truth_value_p.  */
2873       if (fn
2874 	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2875 	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2876 	  && call_expr_nargs (call) == 2)
2877 	{
2878 	  tree arg = CALL_EXPR_ARG (call, 0);
2879 	  if (arg)
2880 	    {
2881 	      if (TREE_CODE (arg) == NOP_EXPR
2882 		  && TREE_TYPE (arg) == TREE_TYPE (call))
2883 		arg = TREE_OPERAND (arg, 0);
2884 	      if (truth_value_p (TREE_CODE (arg)))
2885 		{
2886 		  arg = gimple_boolify (arg);
2887 		  CALL_EXPR_ARG (call, 0)
2888 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2889 		}
2890 	    }
2891 	}
2892     }
2893 
2894   switch (TREE_CODE (expr))
2895     {
2896     case TRUTH_AND_EXPR:
2897     case TRUTH_OR_EXPR:
2898     case TRUTH_XOR_EXPR:
2899     case TRUTH_ANDIF_EXPR:
2900     case TRUTH_ORIF_EXPR:
2901       /* Also boolify the arguments of truth exprs.  */
2902       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2903       /* FALLTHRU */
2904 
2905     case TRUTH_NOT_EXPR:
2906       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2907 
2908       /* These expressions always produce boolean results.  */
2909       if (TREE_CODE (type) != BOOLEAN_TYPE)
2910 	TREE_TYPE (expr) = boolean_type_node;
2911       return expr;
2912 
2913     case ANNOTATE_EXPR:
2914       switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
2915 	{
2916 	case annot_expr_ivdep_kind:
2917 	case annot_expr_no_vector_kind:
2918 	case annot_expr_vector_kind:
2919 	  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2920 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2921 	    TREE_TYPE (expr) = boolean_type_node;
2922 	  return expr;
2923 	default:
2924 	  gcc_unreachable ();
2925 	}
2926 
2927     default:
2928       if (COMPARISON_CLASS_P (expr))
2929 	{
2930 	  /* There expressions always prduce boolean results.  */
2931 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2932 	    TREE_TYPE (expr) = boolean_type_node;
2933 	  return expr;
2934 	}
2935       /* Other expressions that get here must have boolean values, but
2936 	 might need to be converted to the appropriate mode.  */
2937       if (TREE_CODE (type) == BOOLEAN_TYPE)
2938 	return expr;
2939       return fold_convert_loc (loc, boolean_type_node, expr);
2940     }
2941 }
2942 
2943 /* Given a conditional expression *EXPR_P without side effects, gimplify
2944    its operands.  New statements are inserted to PRE_P.  */
2945 
2946 static enum gimplify_status
2947 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2948 {
2949   tree expr = *expr_p, cond;
2950   enum gimplify_status ret, tret;
2951   enum tree_code code;
2952 
2953   cond = gimple_boolify (COND_EXPR_COND (expr));
2954 
2955   /* We need to handle && and || specially, as their gimplification
2956      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2957   code = TREE_CODE (cond);
2958   if (code == TRUTH_ANDIF_EXPR)
2959     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2960   else if (code == TRUTH_ORIF_EXPR)
2961     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2962   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2963   COND_EXPR_COND (*expr_p) = cond;
2964 
2965   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2966 				   is_gimple_val, fb_rvalue);
2967   ret = MIN (ret, tret);
2968   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2969 				   is_gimple_val, fb_rvalue);
2970 
2971   return MIN (ret, tret);
2972 }
2973 
2974 /* Return true if evaluating EXPR could trap.
2975    EXPR is GENERIC, while tree_could_trap_p can be called
2976    only on GIMPLE.  */
2977 
2978 static bool
2979 generic_expr_could_trap_p (tree expr)
2980 {
2981   unsigned i, n;
2982 
2983   if (!expr || is_gimple_val (expr))
2984     return false;
2985 
2986   if (!EXPR_P (expr) || tree_could_trap_p (expr))
2987     return true;
2988 
2989   n = TREE_OPERAND_LENGTH (expr);
2990   for (i = 0; i < n; i++)
2991     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2992       return true;
2993 
2994   return false;
2995 }
2996 
2997 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2998     into
2999 
3000     if (p)			if (p)
3001       t1 = a;			  a;
3002     else		or	else
3003       t1 = b;			  b;
3004     t1;
3005 
3006     The second form is used when *EXPR_P is of type void.
3007 
3008     PRE_P points to the list where side effects that must happen before
3009       *EXPR_P should be stored.  */
3010 
3011 static enum gimplify_status
3012 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3013 {
3014   tree expr = *expr_p;
3015   tree type = TREE_TYPE (expr);
3016   location_t loc = EXPR_LOCATION (expr);
3017   tree tmp, arm1, arm2;
3018   enum gimplify_status ret;
3019   tree label_true, label_false, label_cont;
3020   bool have_then_clause_p, have_else_clause_p;
3021   gcond *cond_stmt;
3022   enum tree_code pred_code;
3023   gimple_seq seq = NULL;
3024 
3025   /* If this COND_EXPR has a value, copy the values into a temporary within
3026      the arms.  */
3027   if (!VOID_TYPE_P (type))
3028     {
3029       tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3030       tree result;
3031 
3032       /* If either an rvalue is ok or we do not require an lvalue, create the
3033 	 temporary.  But we cannot do that if the type is addressable.  */
3034       if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3035 	  && !TREE_ADDRESSABLE (type))
3036 	{
3037 	  if (gimplify_ctxp->allow_rhs_cond_expr
3038 	      /* If either branch has side effects or could trap, it can't be
3039 		 evaluated unconditionally.  */
3040 	      && !TREE_SIDE_EFFECTS (then_)
3041 	      && !generic_expr_could_trap_p (then_)
3042 	      && !TREE_SIDE_EFFECTS (else_)
3043 	      && !generic_expr_could_trap_p (else_))
3044 	    return gimplify_pure_cond_expr (expr_p, pre_p);
3045 
3046 	  tmp = create_tmp_var (type, "iftmp");
3047 	  result = tmp;
3048 	}
3049 
3050       /* Otherwise, only create and copy references to the values.  */
3051       else
3052 	{
3053 	  type = build_pointer_type (type);
3054 
3055 	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
3056 	    then_ = build_fold_addr_expr_loc (loc, then_);
3057 
3058 	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
3059 	    else_ = build_fold_addr_expr_loc (loc, else_);
3060 
3061 	  expr
3062 	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3063 
3064 	  tmp = create_tmp_var (type, "iftmp");
3065 	  result = build_simple_mem_ref_loc (loc, tmp);
3066 	}
3067 
3068       /* Build the new then clause, `tmp = then_;'.  But don't build the
3069 	 assignment if the value is void; in C++ it can be if it's a throw.  */
3070       if (!VOID_TYPE_P (TREE_TYPE (then_)))
3071 	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3072 
3073       /* Similarly, build the new else clause, `tmp = else_;'.  */
3074       if (!VOID_TYPE_P (TREE_TYPE (else_)))
3075 	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3076 
3077       TREE_TYPE (expr) = void_type_node;
3078       recalculate_side_effects (expr);
3079 
3080       /* Move the COND_EXPR to the prequeue.  */
3081       gimplify_stmt (&expr, pre_p);
3082 
3083       *expr_p = result;
3084       return GS_ALL_DONE;
3085     }
3086 
3087   /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
3088   STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3089   if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3090     gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3091 
3092   /* Make sure the condition has BOOLEAN_TYPE.  */
3093   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3094 
3095   /* Break apart && and || conditions.  */
3096   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3097       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3098     {
3099       expr = shortcut_cond_expr (expr);
3100 
3101       if (expr != *expr_p)
3102 	{
3103 	  *expr_p = expr;
3104 
3105 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
3106 	     form properly, as cleanups might cause the target labels to be
3107 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
3108 	     set up a conditional context.  */
3109 	  gimple_push_condition ();
3110 	  gimplify_stmt (expr_p, &seq);
3111 	  gimple_pop_condition (pre_p);
3112 	  gimple_seq_add_seq (pre_p, seq);
3113 
3114 	  return GS_ALL_DONE;
3115 	}
3116     }
3117 
3118   /* Now do the normal gimplification.  */
3119 
3120   /* Gimplify condition.  */
3121   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3122 		       fb_rvalue);
3123   if (ret == GS_ERROR)
3124     return GS_ERROR;
3125   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3126 
3127   gimple_push_condition ();
3128 
3129   have_then_clause_p = have_else_clause_p = false;
3130   if (TREE_OPERAND (expr, 1) != NULL
3131       && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3132       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3133       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3134 	  == current_function_decl)
3135       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3136 	 have different locations, otherwise we end up with incorrect
3137 	 location information on the branches.  */
3138       && (optimize
3139 	  || !EXPR_HAS_LOCATION (expr)
3140 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3141 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3142     {
3143       label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3144       have_then_clause_p = true;
3145     }
3146   else
3147     label_true = create_artificial_label (UNKNOWN_LOCATION);
3148   if (TREE_OPERAND (expr, 2) != NULL
3149       && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3150       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3151       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3152 	  == current_function_decl)
3153       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3154 	 have different locations, otherwise we end up with incorrect
3155 	 location information on the branches.  */
3156       && (optimize
3157 	  || !EXPR_HAS_LOCATION (expr)
3158 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3159 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3160     {
3161       label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3162       have_else_clause_p = true;
3163     }
3164   else
3165     label_false = create_artificial_label (UNKNOWN_LOCATION);
3166 
3167   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3168 				 &arm2);
3169 
3170   cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
3171                                    label_false);
3172 
3173   gimplify_seq_add_stmt (&seq, cond_stmt);
3174   label_cont = NULL_TREE;
3175   if (!have_then_clause_p)
3176     {
3177       /* For if (...) {} else { code; } put label_true after
3178 	 the else block.  */
3179       if (TREE_OPERAND (expr, 1) == NULL_TREE
3180 	  && !have_else_clause_p
3181 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
3182 	label_cont = label_true;
3183       else
3184 	{
3185 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3186 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3187 	  /* For if (...) { code; } else {} or
3188 	     if (...) { code; } else goto label; or
3189 	     if (...) { code; return; } else { ... }
3190 	     label_cont isn't needed.  */
3191 	  if (!have_else_clause_p
3192 	      && TREE_OPERAND (expr, 2) != NULL_TREE
3193 	      && gimple_seq_may_fallthru (seq))
3194 	    {
3195 	      gimple g;
3196 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
3197 
3198 	      g = gimple_build_goto (label_cont);
3199 
3200 	      /* GIMPLE_COND's are very low level; they have embedded
3201 		 gotos.  This particular embedded goto should not be marked
3202 		 with the location of the original COND_EXPR, as it would
3203 		 correspond to the COND_EXPR's condition, not the ELSE or the
3204 		 THEN arms.  To avoid marking it with the wrong location, flag
3205 		 it as "no location".  */
3206 	      gimple_set_do_not_emit_location (g);
3207 
3208 	      gimplify_seq_add_stmt (&seq, g);
3209 	    }
3210 	}
3211     }
3212   if (!have_else_clause_p)
3213     {
3214       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3215       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3216     }
3217   if (label_cont)
3218     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3219 
3220   gimple_pop_condition (pre_p);
3221   gimple_seq_add_seq (pre_p, seq);
3222 
3223   if (ret == GS_ERROR)
3224     ; /* Do nothing.  */
3225   else if (have_then_clause_p || have_else_clause_p)
3226     ret = GS_ALL_DONE;
3227   else
3228     {
3229       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3230       expr = TREE_OPERAND (expr, 0);
3231       gimplify_stmt (&expr, pre_p);
3232     }
3233 
3234   *expr_p = NULL;
3235   return ret;
3236 }
3237 
3238 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3239    to be marked addressable.
3240 
3241    We cannot rely on such an expression being directly markable if a temporary
3242    has been created by the gimplification.  In this case, we create another
3243    temporary and initialize it with a copy, which will become a store after we
3244    mark it addressable.  This can happen if the front-end passed us something
3245    that it could not mark addressable yet, like a Fortran pass-by-reference
3246    parameter (int) floatvar.  */
3247 
3248 static void
3249 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3250 {
3251   while (handled_component_p (*expr_p))
3252     expr_p = &TREE_OPERAND (*expr_p, 0);
3253   if (is_gimple_reg (*expr_p))
3254     {
3255       tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3256       DECL_GIMPLE_REG_P (var) = 0;
3257       *expr_p = var;
3258     }
3259 }
3260 
3261 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3262    a call to __builtin_memcpy.  */
3263 
3264 static enum gimplify_status
3265 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3266     				gimple_seq *seq_p)
3267 {
3268   tree t, to, to_ptr, from, from_ptr;
3269   gcall *gs;
3270   location_t loc = EXPR_LOCATION (*expr_p);
3271 
3272   to = TREE_OPERAND (*expr_p, 0);
3273   from = TREE_OPERAND (*expr_p, 1);
3274 
3275   /* Mark the RHS addressable.  Beware that it may not be possible to do so
3276      directly if a temporary has been created by the gimplification.  */
3277   prepare_gimple_addressable (&from, seq_p);
3278 
3279   mark_addressable (from);
3280   from_ptr = build_fold_addr_expr_loc (loc, from);
3281   gimplify_arg (&from_ptr, seq_p, loc);
3282 
3283   mark_addressable (to);
3284   to_ptr = build_fold_addr_expr_loc (loc, to);
3285   gimplify_arg (&to_ptr, seq_p, loc);
3286 
3287   t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3288 
3289   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3290 
3291   if (want_value)
3292     {
3293       /* tmp = memcpy() */
3294       t = create_tmp_var (TREE_TYPE (to_ptr));
3295       gimple_call_set_lhs (gs, t);
3296       gimplify_seq_add_stmt (seq_p, gs);
3297 
3298       *expr_p = build_simple_mem_ref (t);
3299       return GS_ALL_DONE;
3300     }
3301 
3302   gimplify_seq_add_stmt (seq_p, gs);
3303   *expr_p = NULL;
3304   return GS_ALL_DONE;
3305 }
3306 
3307 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3308    a call to __builtin_memset.  In this case we know that the RHS is
3309    a CONSTRUCTOR with an empty element list.  */
3310 
3311 static enum gimplify_status
3312 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3313     				gimple_seq *seq_p)
3314 {
3315   tree t, from, to, to_ptr;
3316   gcall *gs;
3317   location_t loc = EXPR_LOCATION (*expr_p);
3318 
3319   /* Assert our assumptions, to abort instead of producing wrong code
3320      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3321      not be immediately exposed.  */
3322   from = TREE_OPERAND (*expr_p, 1);
3323   if (TREE_CODE (from) == WITH_SIZE_EXPR)
3324     from = TREE_OPERAND (from, 0);
3325 
3326   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3327 	      && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3328 
3329   /* Now proceed.  */
3330   to = TREE_OPERAND (*expr_p, 0);
3331 
3332   to_ptr = build_fold_addr_expr_loc (loc, to);
3333   gimplify_arg (&to_ptr, seq_p, loc);
3334   t = builtin_decl_implicit (BUILT_IN_MEMSET);
3335 
3336   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3337 
3338   if (want_value)
3339     {
3340       /* tmp = memset() */
3341       t = create_tmp_var (TREE_TYPE (to_ptr));
3342       gimple_call_set_lhs (gs, t);
3343       gimplify_seq_add_stmt (seq_p, gs);
3344 
3345       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3346       return GS_ALL_DONE;
3347     }
3348 
3349   gimplify_seq_add_stmt (seq_p, gs);
3350   *expr_p = NULL;
3351   return GS_ALL_DONE;
3352 }
3353 
3354 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3355    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3356    assignment.  Return non-null if we detect a potential overlap.  */
3357 
3358 struct gimplify_init_ctor_preeval_data
3359 {
3360   /* The base decl of the lhs object.  May be NULL, in which case we
3361      have to assume the lhs is indirect.  */
3362   tree lhs_base_decl;
3363 
3364   /* The alias set of the lhs object.  */
3365   alias_set_type lhs_alias_set;
3366 };
3367 
3368 static tree
3369 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3370 {
3371   struct gimplify_init_ctor_preeval_data *data
3372     = (struct gimplify_init_ctor_preeval_data *) xdata;
3373   tree t = *tp;
3374 
3375   /* If we find the base object, obviously we have overlap.  */
3376   if (data->lhs_base_decl == t)
3377     return t;
3378 
3379   /* If the constructor component is indirect, determine if we have a
3380      potential overlap with the lhs.  The only bits of information we
3381      have to go on at this point are addressability and alias sets.  */
3382   if ((INDIRECT_REF_P (t)
3383        || TREE_CODE (t) == MEM_REF)
3384       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3385       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3386     return t;
3387 
3388   /* If the constructor component is a call, determine if it can hide a
3389      potential overlap with the lhs through an INDIRECT_REF like above.
3390      ??? Ugh - this is completely broken.  In fact this whole analysis
3391      doesn't look conservative.  */
3392   if (TREE_CODE (t) == CALL_EXPR)
3393     {
3394       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3395 
3396       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3397 	if (POINTER_TYPE_P (TREE_VALUE (type))
3398 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3399 	    && alias_sets_conflict_p (data->lhs_alias_set,
3400 				      get_alias_set
3401 				        (TREE_TYPE (TREE_VALUE (type)))))
3402 	  return t;
3403     }
3404 
3405   if (IS_TYPE_OR_DECL_P (t))
3406     *walk_subtrees = 0;
3407   return NULL;
3408 }
3409 
3410 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3411    force values that overlap with the lhs (as described by *DATA)
3412    into temporaries.  */
3413 
3414 static void
3415 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3416 			    struct gimplify_init_ctor_preeval_data *data)
3417 {
3418   enum gimplify_status one;
3419 
3420   /* If the value is constant, then there's nothing to pre-evaluate.  */
3421   if (TREE_CONSTANT (*expr_p))
3422     {
3423       /* Ensure it does not have side effects, it might contain a reference to
3424 	 the object we're initializing.  */
3425       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3426       return;
3427     }
3428 
3429   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3430   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3431     return;
3432 
3433   /* Recurse for nested constructors.  */
3434   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3435     {
3436       unsigned HOST_WIDE_INT ix;
3437       constructor_elt *ce;
3438       vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3439 
3440       FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3441 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3442 
3443       return;
3444     }
3445 
3446   /* If this is a variable sized type, we must remember the size.  */
3447   maybe_with_size_expr (expr_p);
3448 
3449   /* Gimplify the constructor element to something appropriate for the rhs
3450      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3451      the gimplifier will consider this a store to memory.  Doing this
3452      gimplification now means that we won't have to deal with complicated
3453      language-specific trees, nor trees like SAVE_EXPR that can induce
3454      exponential search behavior.  */
3455   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3456   if (one == GS_ERROR)
3457     {
3458       *expr_p = NULL;
3459       return;
3460     }
3461 
3462   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3463      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3464      always be true for all scalars, since is_gimple_mem_rhs insists on a
3465      temporary variable for them.  */
3466   if (DECL_P (*expr_p))
3467     return;
3468 
3469   /* If this is of variable size, we have no choice but to assume it doesn't
3470      overlap since we can't make a temporary for it.  */
3471   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3472     return;
3473 
3474   /* Otherwise, we must search for overlap ...  */
3475   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3476     return;
3477 
3478   /* ... and if found, force the value into a temporary.  */
3479   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3480 }
3481 
3482 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3483    a RANGE_EXPR in a CONSTRUCTOR for an array.
3484 
3485       var = lower;
3486     loop_entry:
3487       object[var] = value;
3488       if (var == upper)
3489 	goto loop_exit;
3490       var = var + 1;
3491       goto loop_entry;
3492     loop_exit:
3493 
3494    We increment var _after_ the loop exit check because we might otherwise
3495    fail if upper == TYPE_MAX_VALUE (type for upper).
3496 
3497    Note that we never have to deal with SAVE_EXPRs here, because this has
3498    already been taken care of for us, in gimplify_init_ctor_preeval().  */
3499 
3500 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3501 				     gimple_seq *, bool);
3502 
3503 static void
3504 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3505 			       tree value, tree array_elt_type,
3506 			       gimple_seq *pre_p, bool cleared)
3507 {
3508   tree loop_entry_label, loop_exit_label, fall_thru_label;
3509   tree var, var_type, cref, tmp;
3510 
3511   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3512   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3513   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3514 
3515   /* Create and initialize the index variable.  */
3516   var_type = TREE_TYPE (upper);
3517   var = create_tmp_var (var_type);
3518   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3519 
3520   /* Add the loop entry label.  */
3521   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3522 
3523   /* Build the reference.  */
3524   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3525 		 var, NULL_TREE, NULL_TREE);
3526 
3527   /* If we are a constructor, just call gimplify_init_ctor_eval to do
3528      the store.  Otherwise just assign value to the reference.  */
3529 
3530   if (TREE_CODE (value) == CONSTRUCTOR)
3531     /* NB we might have to call ourself recursively through
3532        gimplify_init_ctor_eval if the value is a constructor.  */
3533     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3534 			     pre_p, cleared);
3535   else
3536     gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3537 
3538   /* We exit the loop when the index var is equal to the upper bound.  */
3539   gimplify_seq_add_stmt (pre_p,
3540 			 gimple_build_cond (EQ_EXPR, var, upper,
3541 					    loop_exit_label, fall_thru_label));
3542 
3543   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3544 
3545   /* Otherwise, increment the index var...  */
3546   tmp = build2 (PLUS_EXPR, var_type, var,
3547 		fold_convert (var_type, integer_one_node));
3548   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3549 
3550   /* ...and jump back to the loop entry.  */
3551   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3552 
3553   /* Add the loop exit label.  */
3554   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3555 }
3556 
3557 /* Return true if FDECL is accessing a field that is zero sized.  */
3558 
3559 static bool
3560 zero_sized_field_decl (const_tree fdecl)
3561 {
3562   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3563       && integer_zerop (DECL_SIZE (fdecl)))
3564     return true;
3565   return false;
3566 }
3567 
3568 /* Return true if TYPE is zero sized.  */
3569 
3570 static bool
3571 zero_sized_type (const_tree type)
3572 {
3573   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3574       && integer_zerop (TYPE_SIZE (type)))
3575     return true;
3576   return false;
3577 }
3578 
3579 /* A subroutine of gimplify_init_constructor.  Generate individual
3580    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3581    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3582    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3583    zeroed first.  */
3584 
3585 static void
3586 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3587 			 gimple_seq *pre_p, bool cleared)
3588 {
3589   tree array_elt_type = NULL;
3590   unsigned HOST_WIDE_INT ix;
3591   tree purpose, value;
3592 
3593   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3594     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3595 
3596   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3597     {
3598       tree cref;
3599 
3600       /* NULL values are created above for gimplification errors.  */
3601       if (value == NULL)
3602 	continue;
3603 
3604       if (cleared && initializer_zerop (value))
3605 	continue;
3606 
3607       /* ??? Here's to hoping the front end fills in all of the indices,
3608 	 so we don't have to figure out what's missing ourselves.  */
3609       gcc_assert (purpose);
3610 
3611       /* Skip zero-sized fields, unless value has side-effects.  This can
3612 	 happen with calls to functions returning a zero-sized type, which
3613 	 we shouldn't discard.  As a number of downstream passes don't
3614 	 expect sets of zero-sized fields, we rely on the gimplification of
3615 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
3616       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3617 	continue;
3618 
3619       /* If we have a RANGE_EXPR, we have to build a loop to assign the
3620 	 whole range.  */
3621       if (TREE_CODE (purpose) == RANGE_EXPR)
3622 	{
3623 	  tree lower = TREE_OPERAND (purpose, 0);
3624 	  tree upper = TREE_OPERAND (purpose, 1);
3625 
3626 	  /* If the lower bound is equal to upper, just treat it as if
3627 	     upper was the index.  */
3628 	  if (simple_cst_equal (lower, upper))
3629 	    purpose = upper;
3630 	  else
3631 	    {
3632 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
3633 					     array_elt_type, pre_p, cleared);
3634 	      continue;
3635 	    }
3636 	}
3637 
3638       if (array_elt_type)
3639 	{
3640 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
3641 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
3642 	    purpose
3643 	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3644 			      purpose);
3645 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3646 			 purpose, NULL_TREE, NULL_TREE);
3647 	}
3648       else
3649 	{
3650 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3651 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3652 			 unshare_expr (object), purpose, NULL_TREE);
3653 	}
3654 
3655       if (TREE_CODE (value) == CONSTRUCTOR
3656 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3657 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3658 				 pre_p, cleared);
3659       else
3660 	{
3661 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3662 	  gimplify_and_add (init, pre_p);
3663 	  ggc_free (init);
3664 	}
3665     }
3666 }
3667 
3668 /* Return the appropriate RHS predicate for this LHS.  */
3669 
3670 gimple_predicate
3671 rhs_predicate_for (tree lhs)
3672 {
3673   if (is_gimple_reg (lhs))
3674     return is_gimple_reg_rhs_or_call;
3675   else
3676     return is_gimple_mem_rhs_or_call;
3677 }
3678 
3679 /* Gimplify a C99 compound literal expression.  This just means adding
3680    the DECL_EXPR before the current statement and using its anonymous
3681    decl instead.  */
3682 
3683 static enum gimplify_status
3684 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3685 				bool (*gimple_test_f) (tree),
3686 				fallback_t fallback)
3687 {
3688   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3689   tree decl = DECL_EXPR_DECL (decl_s);
3690   tree init = DECL_INITIAL (decl);
3691   /* Mark the decl as addressable if the compound literal
3692      expression is addressable now, otherwise it is marked too late
3693      after we gimplify the initialization expression.  */
3694   if (TREE_ADDRESSABLE (*expr_p))
3695     TREE_ADDRESSABLE (decl) = 1;
3696   /* Otherwise, if we don't need an lvalue and have a literal directly
3697      substitute it.  Check if it matches the gimple predicate, as
3698      otherwise we'd generate a new temporary, and we can as well just
3699      use the decl we already have.  */
3700   else if (!TREE_ADDRESSABLE (decl)
3701 	   && init
3702 	   && (fallback & fb_lvalue) == 0
3703 	   && gimple_test_f (init))
3704     {
3705       *expr_p = init;
3706       return GS_OK;
3707     }
3708 
3709   /* Preliminarily mark non-addressed complex variables as eligible
3710      for promotion to gimple registers.  We'll transform their uses
3711      as we find them.  */
3712   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3713        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3714       && !TREE_THIS_VOLATILE (decl)
3715       && !needs_to_live_in_memory (decl))
3716     DECL_GIMPLE_REG_P (decl) = 1;
3717 
3718   /* If the decl is not addressable, then it is being used in some
3719      expression or on the right hand side of a statement, and it can
3720      be put into a readonly data section.  */
3721   if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3722     TREE_READONLY (decl) = 1;
3723 
3724   /* This decl isn't mentioned in the enclosing block, so add it to the
3725      list of temps.  FIXME it seems a bit of a kludge to say that
3726      anonymous artificial vars aren't pushed, but everything else is.  */
3727   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3728     gimple_add_tmp_var (decl);
3729 
3730   gimplify_and_add (decl_s, pre_p);
3731   *expr_p = decl;
3732   return GS_OK;
3733 }
3734 
3735 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3736    return a new CONSTRUCTOR if something changed.  */
3737 
3738 static tree
3739 optimize_compound_literals_in_ctor (tree orig_ctor)
3740 {
3741   tree ctor = orig_ctor;
3742   vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3743   unsigned int idx, num = vec_safe_length (elts);
3744 
3745   for (idx = 0; idx < num; idx++)
3746     {
3747       tree value = (*elts)[idx].value;
3748       tree newval = value;
3749       if (TREE_CODE (value) == CONSTRUCTOR)
3750 	newval = optimize_compound_literals_in_ctor (value);
3751       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3752 	{
3753 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3754 	  tree decl = DECL_EXPR_DECL (decl_s);
3755 	  tree init = DECL_INITIAL (decl);
3756 
3757 	  if (!TREE_ADDRESSABLE (value)
3758 	      && !TREE_ADDRESSABLE (decl)
3759 	      && init
3760 	      && TREE_CODE (init) == CONSTRUCTOR)
3761 	    newval = optimize_compound_literals_in_ctor (init);
3762 	}
3763       if (newval == value)
3764 	continue;
3765 
3766       if (ctor == orig_ctor)
3767 	{
3768 	  ctor = copy_node (orig_ctor);
3769 	  CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3770 	  elts = CONSTRUCTOR_ELTS (ctor);
3771 	}
3772       (*elts)[idx].value = newval;
3773     }
3774   return ctor;
3775 }
3776 
3777 /* A subroutine of gimplify_modify_expr.  Break out elements of a
3778    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3779 
3780    Note that we still need to clear any elements that don't have explicit
3781    initializers, so if not all elements are initialized we keep the
3782    original MODIFY_EXPR, we just remove all of the constructor elements.
3783 
3784    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3785    GS_ERROR if we would have to create a temporary when gimplifying
3786    this constructor.  Otherwise, return GS_OK.
3787 
3788    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3789 
3790 static enum gimplify_status
3791 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3792 			   bool want_value, bool notify_temp_creation)
3793 {
3794   tree object, ctor, type;
3795   enum gimplify_status ret;
3796   vec<constructor_elt, va_gc> *elts;
3797 
3798   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3799 
3800   if (!notify_temp_creation)
3801     {
3802       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3803 			   is_gimple_lvalue, fb_lvalue);
3804       if (ret == GS_ERROR)
3805 	return ret;
3806     }
3807 
3808   object = TREE_OPERAND (*expr_p, 0);
3809   ctor = TREE_OPERAND (*expr_p, 1) =
3810     optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3811   type = TREE_TYPE (ctor);
3812   elts = CONSTRUCTOR_ELTS (ctor);
3813   ret = GS_ALL_DONE;
3814 
3815   switch (TREE_CODE (type))
3816     {
3817     case RECORD_TYPE:
3818     case UNION_TYPE:
3819     case QUAL_UNION_TYPE:
3820     case ARRAY_TYPE:
3821       {
3822 	struct gimplify_init_ctor_preeval_data preeval_data;
3823 	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3824 	bool cleared, complete_p, valid_const_initializer;
3825 
3826 	/* Aggregate types must lower constructors to initialization of
3827 	   individual elements.  The exception is that a CONSTRUCTOR node
3828 	   with no elements indicates zero-initialization of the whole.  */
3829 	if (vec_safe_is_empty (elts))
3830 	  {
3831 	    if (notify_temp_creation)
3832 	      return GS_OK;
3833 	    break;
3834 	  }
3835 
3836 	/* Fetch information about the constructor to direct later processing.
3837 	   We might want to make static versions of it in various cases, and
3838 	   can only do so if it known to be a valid constant initializer.  */
3839 	valid_const_initializer
3840 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
3841 				      &num_ctor_elements, &complete_p);
3842 
3843 	/* If a const aggregate variable is being initialized, then it
3844 	   should never be a lose to promote the variable to be static.  */
3845 	if (valid_const_initializer
3846 	    && num_nonzero_elements > 1
3847 	    && TREE_READONLY (object)
3848 	    && TREE_CODE (object) == VAR_DECL
3849 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3850 	  {
3851 	    if (notify_temp_creation)
3852 	      return GS_ERROR;
3853 	    DECL_INITIAL (object) = ctor;
3854 	    TREE_STATIC (object) = 1;
3855 	    if (!DECL_NAME (object))
3856 	      DECL_NAME (object) = create_tmp_var_name ("C");
3857 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3858 
3859 	    /* ??? C++ doesn't automatically append a .<number> to the
3860 	       assembler name, and even when it does, it looks at FE private
3861 	       data structures to figure out what that number should be,
3862 	       which are not set for this variable.  I suppose this is
3863 	       important for local statics for inline functions, which aren't
3864 	       "local" in the object file sense.  So in order to get a unique
3865 	       TU-local symbol, we must invoke the lhd version now.  */
3866 	    lhd_set_decl_assembler_name (object);
3867 
3868 	    *expr_p = NULL_TREE;
3869 	    break;
3870 	  }
3871 
3872 	/* If there are "lots" of initialized elements, even discounting
3873 	   those that are not address constants (and thus *must* be
3874 	   computed at runtime), then partition the constructor into
3875 	   constant and non-constant parts.  Block copy the constant
3876 	   parts in, then generate code for the non-constant parts.  */
3877 	/* TODO.  There's code in cp/typeck.c to do this.  */
3878 
3879 	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3880 	  /* store_constructor will ignore the clearing of variable-sized
3881 	     objects.  Initializers for such objects must explicitly set
3882 	     every field that needs to be set.  */
3883 	  cleared = false;
3884 	else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3885 	  /* If the constructor isn't complete, clear the whole object
3886 	     beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3887 
3888 	     ??? This ought not to be needed.  For any element not present
3889 	     in the initializer, we should simply set them to zero.  Except
3890 	     we'd need to *find* the elements that are not present, and that
3891 	     requires trickery to avoid quadratic compile-time behavior in
3892 	     large cases or excessive memory use in small cases.  */
3893 	  cleared = true;
3894 	else if (num_ctor_elements - num_nonzero_elements
3895 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3896 		 && num_nonzero_elements < num_ctor_elements / 4)
3897 	  /* If there are "lots" of zeros, it's more efficient to clear
3898 	     the memory and then set the nonzero elements.  */
3899 	  cleared = true;
3900 	else
3901 	  cleared = false;
3902 
3903 	/* If there are "lots" of initialized elements, and all of them
3904 	   are valid address constants, then the entire initializer can
3905 	   be dropped to memory, and then memcpy'd out.  Don't do this
3906 	   for sparse arrays, though, as it's more efficient to follow
3907 	   the standard CONSTRUCTOR behavior of memset followed by
3908 	   individual element initialization.  Also don't do this for small
3909 	   all-zero initializers (which aren't big enough to merit
3910 	   clearing), and don't try to make bitwise copies of
3911 	   TREE_ADDRESSABLE types.
3912 
3913 	   We cannot apply such transformation when compiling chkp static
3914 	   initializer because creation of initializer image in the memory
3915 	   will require static initialization of bounds for it.  It should
3916 	   result in another gimplification of similar initializer and we
3917 	   may fall into infinite loop.  */
3918 	if (valid_const_initializer
3919 	    && !(cleared || num_nonzero_elements == 0)
3920 	    && !TREE_ADDRESSABLE (type)
3921 	    && (!current_function_decl
3922 		|| !lookup_attribute ("chkp ctor",
3923 				      DECL_ATTRIBUTES (current_function_decl))))
3924 	  {
3925 	    HOST_WIDE_INT size = int_size_in_bytes (type);
3926 	    unsigned int align;
3927 
3928 	    /* ??? We can still get unbounded array types, at least
3929 	       from the C++ front end.  This seems wrong, but attempt
3930 	       to work around it for now.  */
3931 	    if (size < 0)
3932 	      {
3933 		size = int_size_in_bytes (TREE_TYPE (object));
3934 		if (size >= 0)
3935 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3936 	      }
3937 
3938 	    /* Find the maximum alignment we can assume for the object.  */
3939 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
3940 	    if (DECL_P (object))
3941 	      align = DECL_ALIGN (object);
3942 	    else
3943 	      align = TYPE_ALIGN (type);
3944 
3945 	    /* Do a block move either if the size is so small as to make
3946 	       each individual move a sub-unit move on average, or if it
3947 	       is so large as to make individual moves inefficient.  */
3948 	    if (size > 0
3949 		&& num_nonzero_elements > 1
3950 		&& (size < num_nonzero_elements
3951 		    || !can_move_by_pieces (size, align)))
3952 	      {
3953 		if (notify_temp_creation)
3954 		  return GS_ERROR;
3955 
3956 		walk_tree (&ctor, force_labels_r, NULL, NULL);
3957 		ctor = tree_output_constant_def (ctor);
3958 		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3959 		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3960 		TREE_OPERAND (*expr_p, 1) = ctor;
3961 
3962 		/* This is no longer an assignment of a CONSTRUCTOR, but
3963 		   we still may have processing to do on the LHS.  So
3964 		   pretend we didn't do anything here to let that happen.  */
3965 		return GS_UNHANDLED;
3966 	      }
3967 	  }
3968 
3969 	/* If the target is volatile, we have non-zero elements and more than
3970 	   one field to assign, initialize the target from a temporary.  */
3971 	if (TREE_THIS_VOLATILE (object)
3972 	    && !TREE_ADDRESSABLE (type)
3973 	    && num_nonzero_elements > 0
3974 	    && vec_safe_length (elts) > 1)
3975 	  {
3976 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
3977 	    TREE_OPERAND (*expr_p, 0) = temp;
3978 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3979 			      *expr_p,
3980 			      build2 (MODIFY_EXPR, void_type_node,
3981 				      object, temp));
3982 	    return GS_OK;
3983 	  }
3984 
3985 	if (notify_temp_creation)
3986 	  return GS_OK;
3987 
3988 	/* If there are nonzero elements and if needed, pre-evaluate to capture
3989 	   elements overlapping with the lhs into temporaries.  We must do this
3990 	   before clearing to fetch the values before they are zeroed-out.  */
3991 	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3992 	  {
3993 	    preeval_data.lhs_base_decl = get_base_address (object);
3994 	    if (!DECL_P (preeval_data.lhs_base_decl))
3995 	      preeval_data.lhs_base_decl = NULL;
3996 	    preeval_data.lhs_alias_set = get_alias_set (object);
3997 
3998 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3999 					pre_p, post_p, &preeval_data);
4000 	  }
4001 
4002 	if (cleared)
4003 	  {
4004 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
4005 	       Note that we still have to gimplify, in order to handle the
4006 	       case of variable sized types.  Avoid shared tree structures.  */
4007 	    CONSTRUCTOR_ELTS (ctor) = NULL;
4008 	    TREE_SIDE_EFFECTS (ctor) = 0;
4009 	    object = unshare_expr (object);
4010 	    gimplify_stmt (expr_p, pre_p);
4011 	  }
4012 
4013 	/* If we have not block cleared the object, or if there are nonzero
4014 	   elements in the constructor, add assignments to the individual
4015 	   scalar fields of the object.  */
4016 	if (!cleared || num_nonzero_elements > 0)
4017 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
4018 
4019 	*expr_p = NULL_TREE;
4020       }
4021       break;
4022 
4023     case COMPLEX_TYPE:
4024       {
4025 	tree r, i;
4026 
4027 	if (notify_temp_creation)
4028 	  return GS_OK;
4029 
4030 	/* Extract the real and imaginary parts out of the ctor.  */
4031 	gcc_assert (elts->length () == 2);
4032 	r = (*elts)[0].value;
4033 	i = (*elts)[1].value;
4034 	if (r == NULL || i == NULL)
4035 	  {
4036 	    tree zero = build_zero_cst (TREE_TYPE (type));
4037 	    if (r == NULL)
4038 	      r = zero;
4039 	    if (i == NULL)
4040 	      i = zero;
4041 	  }
4042 
4043 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4044 	   represent creation of a complex value.  */
4045 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4046 	  {
4047 	    ctor = build_complex (type, r, i);
4048 	    TREE_OPERAND (*expr_p, 1) = ctor;
4049 	  }
4050 	else
4051 	  {
4052 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
4053 	    TREE_OPERAND (*expr_p, 1) = ctor;
4054 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4055 				 pre_p,
4056 				 post_p,
4057 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4058 				 fb_rvalue);
4059 	  }
4060       }
4061       break;
4062 
4063     case VECTOR_TYPE:
4064       {
4065 	unsigned HOST_WIDE_INT ix;
4066 	constructor_elt *ce;
4067 
4068 	if (notify_temp_creation)
4069 	  return GS_OK;
4070 
4071 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
4072 	if (TREE_CONSTANT (ctor))
4073 	  {
4074 	    bool constant_p = true;
4075 	    tree value;
4076 
4077 	    /* Even when ctor is constant, it might contain non-*_CST
4078 	       elements, such as addresses or trapping values like
4079 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
4080 	       in VECTOR_CST nodes.  */
4081 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4082 	      if (!CONSTANT_CLASS_P (value))
4083 		{
4084 		  constant_p = false;
4085 		  break;
4086 		}
4087 
4088 	    if (constant_p)
4089 	      {
4090 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4091 		break;
4092 	      }
4093 
4094 	    TREE_CONSTANT (ctor) = 0;
4095 	  }
4096 
4097 	/* Vector types use CONSTRUCTOR all the way through gimple
4098 	  compilation as a general initializer.  */
4099 	FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
4100 	  {
4101 	    enum gimplify_status tret;
4102 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4103 				  fb_rvalue);
4104 	    if (tret == GS_ERROR)
4105 	      ret = GS_ERROR;
4106 	  }
4107 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4108 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4109       }
4110       break;
4111 
4112     default:
4113       /* So how did we get a CONSTRUCTOR for a scalar type?  */
4114       gcc_unreachable ();
4115     }
4116 
4117   if (ret == GS_ERROR)
4118     return GS_ERROR;
4119   else if (want_value)
4120     {
4121       *expr_p = object;
4122       return GS_OK;
4123     }
4124   else
4125     {
4126       /* If we have gimplified both sides of the initializer but have
4127 	 not emitted an assignment, do so now.  */
4128       if (*expr_p)
4129 	{
4130 	  tree lhs = TREE_OPERAND (*expr_p, 0);
4131 	  tree rhs = TREE_OPERAND (*expr_p, 1);
4132 	  gassign *init = gimple_build_assign (lhs, rhs);
4133 	  gimplify_seq_add_stmt (pre_p, init);
4134 	  *expr_p = NULL;
4135 	}
4136 
4137       return GS_ALL_DONE;
4138     }
4139 }
4140 
4141 /* Given a pointer value OP0, return a simplified version of an
4142    indirection through OP0, or NULL_TREE if no simplification is
4143    possible.  This may only be applied to a rhs of an expression.
4144    Note that the resulting type may be different from the type pointed
4145    to in the sense that it is still compatible from the langhooks
4146    point of view. */
4147 
4148 static tree
4149 gimple_fold_indirect_ref_rhs (tree t)
4150 {
4151   return gimple_fold_indirect_ref (t);
4152 }
4153 
4154 /* Subroutine of gimplify_modify_expr to do simplifications of
4155    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4156    something changes.  */
4157 
4158 static enum gimplify_status
4159 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4160 			  gimple_seq *pre_p, gimple_seq *post_p,
4161 			  bool want_value)
4162 {
4163   enum gimplify_status ret = GS_UNHANDLED;
4164   bool changed;
4165 
4166   do
4167     {
4168       changed = false;
4169       switch (TREE_CODE (*from_p))
4170 	{
4171 	case VAR_DECL:
4172 	  /* If we're assigning from a read-only variable initialized with
4173 	     a constructor, do the direct assignment from the constructor,
4174 	     but only if neither source nor target are volatile since this
4175 	     latter assignment might end up being done on a per-field basis.  */
4176 	  if (DECL_INITIAL (*from_p)
4177 	      && TREE_READONLY (*from_p)
4178 	      && !TREE_THIS_VOLATILE (*from_p)
4179 	      && !TREE_THIS_VOLATILE (*to_p)
4180 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4181 	    {
4182 	      tree old_from = *from_p;
4183 	      enum gimplify_status subret;
4184 
4185 	      /* Move the constructor into the RHS.  */
4186 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
4187 
4188 	      /* Let's see if gimplify_init_constructor will need to put
4189 		 it in memory.  */
4190 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
4191 						  false, true);
4192 	      if (subret == GS_ERROR)
4193 		{
4194 		  /* If so, revert the change.  */
4195 		  *from_p = old_from;
4196 		}
4197 	      else
4198 		{
4199 		  ret = GS_OK;
4200 		  changed = true;
4201 		}
4202 	    }
4203 	  break;
4204 	case INDIRECT_REF:
4205 	  {
4206 	    /* If we have code like
4207 
4208 	     *(const A*)(A*)&x
4209 
4210 	     where the type of "x" is a (possibly cv-qualified variant
4211 	     of "A"), treat the entire expression as identical to "x".
4212 	     This kind of code arises in C++ when an object is bound
4213 	     to a const reference, and if "x" is a TARGET_EXPR we want
4214 	     to take advantage of the optimization below.  */
4215 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4216 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4217 	    if (t)
4218 	      {
4219 		if (TREE_THIS_VOLATILE (t) != volatile_p)
4220 		  {
4221 		    if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4222 		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4223 						    build_fold_addr_expr (t));
4224 		    if (REFERENCE_CLASS_P (t))
4225 		      TREE_THIS_VOLATILE (t) = volatile_p;
4226 		  }
4227 		*from_p = t;
4228 		ret = GS_OK;
4229 		changed = true;
4230 	      }
4231 	    break;
4232 	  }
4233 
4234 	case TARGET_EXPR:
4235 	  {
4236 	    /* If we are initializing something from a TARGET_EXPR, strip the
4237 	       TARGET_EXPR and initialize it directly, if possible.  This can't
4238 	       be done if the initializer is void, since that implies that the
4239 	       temporary is set in some non-trivial way.
4240 
4241 	       ??? What about code that pulls out the temp and uses it
4242 	       elsewhere? I think that such code never uses the TARGET_EXPR as
4243 	       an initializer.  If I'm wrong, we'll die because the temp won't
4244 	       have any RTL.  In that case, I guess we'll need to replace
4245 	       references somehow.  */
4246 	    tree init = TARGET_EXPR_INITIAL (*from_p);
4247 
4248 	    if (init
4249 		&& !VOID_TYPE_P (TREE_TYPE (init)))
4250 	      {
4251 		*from_p = init;
4252 		ret = GS_OK;
4253 		changed = true;
4254 	      }
4255 	  }
4256 	  break;
4257 
4258 	case COMPOUND_EXPR:
4259 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4260 	     caught.  */
4261 	  gimplify_compound_expr (from_p, pre_p, true);
4262 	  ret = GS_OK;
4263 	  changed = true;
4264 	  break;
4265 
4266 	case CONSTRUCTOR:
4267 	  /* If we already made some changes, let the front end have a
4268 	     crack at this before we break it down.  */
4269 	  if (ret != GS_UNHANDLED)
4270 	    break;
4271 	  /* If we're initializing from a CONSTRUCTOR, break this into
4272 	     individual MODIFY_EXPRs.  */
4273 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4274 					    false);
4275 
4276 	case COND_EXPR:
4277 	  /* If we're assigning to a non-register type, push the assignment
4278 	     down into the branches.  This is mandatory for ADDRESSABLE types,
4279 	     since we cannot generate temporaries for such, but it saves a
4280 	     copy in other cases as well.  */
4281 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4282 	    {
4283 	      /* This code should mirror the code in gimplify_cond_expr. */
4284 	      enum tree_code code = TREE_CODE (*expr_p);
4285 	      tree cond = *from_p;
4286 	      tree result = *to_p;
4287 
4288 	      ret = gimplify_expr (&result, pre_p, post_p,
4289 				   is_gimple_lvalue, fb_lvalue);
4290 	      if (ret != GS_ERROR)
4291 		ret = GS_OK;
4292 
4293 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4294 		TREE_OPERAND (cond, 1)
4295 		  = build2 (code, void_type_node, result,
4296 			    TREE_OPERAND (cond, 1));
4297 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4298 		TREE_OPERAND (cond, 2)
4299 		  = build2 (code, void_type_node, unshare_expr (result),
4300 			    TREE_OPERAND (cond, 2));
4301 
4302 	      TREE_TYPE (cond) = void_type_node;
4303 	      recalculate_side_effects (cond);
4304 
4305 	      if (want_value)
4306 		{
4307 		  gimplify_and_add (cond, pre_p);
4308 		  *expr_p = unshare_expr (result);
4309 		}
4310 	      else
4311 		*expr_p = cond;
4312 	      return ret;
4313 	    }
4314 	  break;
4315 
4316 	case CALL_EXPR:
4317 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
4318 	     return slot so that we don't generate a temporary.  */
4319 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4320 	      && aggregate_value_p (*from_p, *from_p))
4321 	    {
4322 	      bool use_target;
4323 
4324 	      if (!(rhs_predicate_for (*to_p))(*from_p))
4325 		/* If we need a temporary, *to_p isn't accurate.  */
4326 		use_target = false;
4327 	      /* It's OK to use the return slot directly unless it's an NRV. */
4328 	      else if (TREE_CODE (*to_p) == RESULT_DECL
4329 		       && DECL_NAME (*to_p) == NULL_TREE
4330 		       && needs_to_live_in_memory (*to_p))
4331 		use_target = true;
4332 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4333 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4334 		/* Don't force regs into memory.  */
4335 		use_target = false;
4336 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
4337 		/* It's OK to use the target directly if it's being
4338 		   initialized. */
4339 		use_target = true;
4340 	      else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4341 		/* Always use the target and thus RSO for variable-sized types.
4342 		   GIMPLE cannot deal with a variable-sized assignment
4343 		   embedded in a call statement.  */
4344 		use_target = true;
4345 	      else if (TREE_CODE (*to_p) != SSA_NAME
4346 		      && (!is_gimple_variable (*to_p)
4347 			  || needs_to_live_in_memory (*to_p)))
4348 		/* Don't use the original target if it's already addressable;
4349 		   if its address escapes, and the called function uses the
4350 		   NRV optimization, a conforming program could see *to_p
4351 		   change before the called function returns; see c++/19317.
4352 		   When optimizing, the return_slot pass marks more functions
4353 		   as safe after we have escape info.  */
4354 		use_target = false;
4355 	      else
4356 		use_target = true;
4357 
4358 	      if (use_target)
4359 		{
4360 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4361 		  mark_addressable (*to_p);
4362 		}
4363 	    }
4364 	  break;
4365 
4366 	case WITH_SIZE_EXPR:
4367 	  /* Likewise for calls that return an aggregate of non-constant size,
4368 	     since we would not be able to generate a temporary at all.  */
4369 	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4370 	    {
4371 	      *from_p = TREE_OPERAND (*from_p, 0);
4372 	      /* We don't change ret in this case because the
4373 		 WITH_SIZE_EXPR might have been added in
4374 		 gimplify_modify_expr, so returning GS_OK would lead to an
4375 		 infinite loop.  */
4376 	      changed = true;
4377 	    }
4378 	  break;
4379 
4380 	  /* If we're initializing from a container, push the initialization
4381 	     inside it.  */
4382 	case CLEANUP_POINT_EXPR:
4383 	case BIND_EXPR:
4384 	case STATEMENT_LIST:
4385 	  {
4386 	    tree wrap = *from_p;
4387 	    tree t;
4388 
4389 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4390 				 fb_lvalue);
4391 	    if (ret != GS_ERROR)
4392 	      ret = GS_OK;
4393 
4394 	    t = voidify_wrapper_expr (wrap, *expr_p);
4395 	    gcc_assert (t == *expr_p);
4396 
4397 	    if (want_value)
4398 	      {
4399 		gimplify_and_add (wrap, pre_p);
4400 		*expr_p = unshare_expr (*to_p);
4401 	      }
4402 	    else
4403 	      *expr_p = wrap;
4404 	    return GS_OK;
4405 	  }
4406 
4407 	case COMPOUND_LITERAL_EXPR:
4408 	  {
4409 	    tree complit = TREE_OPERAND (*expr_p, 1);
4410 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4411 	    tree decl = DECL_EXPR_DECL (decl_s);
4412 	    tree init = DECL_INITIAL (decl);
4413 
4414 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4415 	       into struct T x = { 0, 1, 2 } if the address of the
4416 	       compound literal has never been taken.  */
4417 	    if (!TREE_ADDRESSABLE (complit)
4418 		&& !TREE_ADDRESSABLE (decl)
4419 		&& init)
4420 	      {
4421 		*expr_p = copy_node (*expr_p);
4422 		TREE_OPERAND (*expr_p, 1) = init;
4423 		return GS_OK;
4424 	      }
4425 	  }
4426 
4427 	default:
4428 	  break;
4429 	}
4430     }
4431   while (changed);
4432 
4433   return ret;
4434 }
4435 
4436 
4437 /* Return true if T looks like a valid GIMPLE statement.  */
4438 
4439 static bool
4440 is_gimple_stmt (tree t)
4441 {
4442   const enum tree_code code = TREE_CODE (t);
4443 
4444   switch (code)
4445     {
4446     case NOP_EXPR:
4447       /* The only valid NOP_EXPR is the empty statement.  */
4448       return IS_EMPTY_STMT (t);
4449 
4450     case BIND_EXPR:
4451     case COND_EXPR:
4452       /* These are only valid if they're void.  */
4453       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4454 
4455     case SWITCH_EXPR:
4456     case GOTO_EXPR:
4457     case RETURN_EXPR:
4458     case LABEL_EXPR:
4459     case CASE_LABEL_EXPR:
4460     case TRY_CATCH_EXPR:
4461     case TRY_FINALLY_EXPR:
4462     case EH_FILTER_EXPR:
4463     case CATCH_EXPR:
4464     case ASM_EXPR:
4465     case STATEMENT_LIST:
4466     case OACC_PARALLEL:
4467     case OACC_KERNELS:
4468     case OACC_DATA:
4469     case OACC_HOST_DATA:
4470     case OACC_DECLARE:
4471     case OACC_UPDATE:
4472     case OACC_ENTER_DATA:
4473     case OACC_EXIT_DATA:
4474     case OACC_CACHE:
4475     case OMP_PARALLEL:
4476     case OMP_FOR:
4477     case OMP_SIMD:
4478     case CILK_SIMD:
4479     case OMP_DISTRIBUTE:
4480     case OACC_LOOP:
4481     case OMP_SECTIONS:
4482     case OMP_SECTION:
4483     case OMP_SINGLE:
4484     case OMP_MASTER:
4485     case OMP_TASKGROUP:
4486     case OMP_ORDERED:
4487     case OMP_CRITICAL:
4488     case OMP_TASK:
4489       /* These are always void.  */
4490       return true;
4491 
4492     case CALL_EXPR:
4493     case MODIFY_EXPR:
4494     case PREDICT_EXPR:
4495       /* These are valid regardless of their type.  */
4496       return true;
4497 
4498     default:
4499       return false;
4500     }
4501 }
4502 
4503 
4504 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4505    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4506    DECL_GIMPLE_REG_P set.
4507 
4508    IMPORTANT NOTE: This promotion is performed by introducing a load of the
4509    other, unmodified part of the complex object just before the total store.
4510    As a consequence, if the object is still uninitialized, an undefined value
4511    will be loaded into a register, which may result in a spurious exception
4512    if the register is floating-point and the value happens to be a signaling
4513    NaN for example.  Then the fully-fledged complex operations lowering pass
4514    followed by a DCE pass are necessary in order to fix things up.  */
4515 
4516 static enum gimplify_status
4517 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4518                                    bool want_value)
4519 {
4520   enum tree_code code, ocode;
4521   tree lhs, rhs, new_rhs, other, realpart, imagpart;
4522 
4523   lhs = TREE_OPERAND (*expr_p, 0);
4524   rhs = TREE_OPERAND (*expr_p, 1);
4525   code = TREE_CODE (lhs);
4526   lhs = TREE_OPERAND (lhs, 0);
4527 
4528   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4529   other = build1 (ocode, TREE_TYPE (rhs), lhs);
4530   TREE_NO_WARNING (other) = 1;
4531   other = get_formal_tmp_var (other, pre_p);
4532 
4533   realpart = code == REALPART_EXPR ? rhs : other;
4534   imagpart = code == REALPART_EXPR ? other : rhs;
4535 
4536   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4537     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4538   else
4539     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4540 
4541   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4542   *expr_p = (want_value) ? rhs : NULL_TREE;
4543 
4544   return GS_ALL_DONE;
4545 }
4546 
4547 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4548 
4549       modify_expr
4550 	      : varname '=' rhs
4551 	      | '*' ID '=' rhs
4552 
4553     PRE_P points to the list where side effects that must happen before
4554 	*EXPR_P should be stored.
4555 
4556     POST_P points to the list where side effects that must happen after
4557 	*EXPR_P should be stored.
4558 
4559     WANT_VALUE is nonzero iff we want to use the value of this expression
4560 	in another expression.  */
4561 
4562 static enum gimplify_status
4563 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4564 		      bool want_value)
4565 {
4566   tree *from_p = &TREE_OPERAND (*expr_p, 1);
4567   tree *to_p = &TREE_OPERAND (*expr_p, 0);
4568   enum gimplify_status ret = GS_UNHANDLED;
4569   gimple assign;
4570   location_t loc = EXPR_LOCATION (*expr_p);
4571   gimple_stmt_iterator gsi;
4572 
4573   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4574 	      || TREE_CODE (*expr_p) == INIT_EXPR);
4575 
4576   /* Trying to simplify a clobber using normal logic doesn't work,
4577      so handle it here.  */
4578   if (TREE_CLOBBER_P (*from_p))
4579     {
4580       ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4581       if (ret == GS_ERROR)
4582 	return ret;
4583       gcc_assert (!want_value
4584 		  && (TREE_CODE (*to_p) == VAR_DECL
4585 		      || TREE_CODE (*to_p) == MEM_REF));
4586       gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4587       *expr_p = NULL;
4588       return GS_ALL_DONE;
4589     }
4590 
4591   /* Insert pointer conversions required by the middle-end that are not
4592      required by the frontend.  This fixes middle-end type checking for
4593      for example gcc.dg/redecl-6.c.  */
4594   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4595     {
4596       STRIP_USELESS_TYPE_CONVERSION (*from_p);
4597       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4598 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4599     }
4600 
4601   /* See if any simplifications can be done based on what the RHS is.  */
4602   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4603 				  want_value);
4604   if (ret != GS_UNHANDLED)
4605     return ret;
4606 
4607   /* For zero sized types only gimplify the left hand side and right hand
4608      side as statements and throw away the assignment.  Do this after
4609      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4610      types properly.  */
4611   if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4612     {
4613       gimplify_stmt (from_p, pre_p);
4614       gimplify_stmt (to_p, pre_p);
4615       *expr_p = NULL_TREE;
4616       return GS_ALL_DONE;
4617     }
4618 
4619   /* If the value being copied is of variable width, compute the length
4620      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4621      before gimplifying any of the operands so that we can resolve any
4622      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4623      the size of the expression to be copied, not of the destination, so
4624      that is what we must do here.  */
4625   maybe_with_size_expr (from_p);
4626 
4627   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4628   if (ret == GS_ERROR)
4629     return ret;
4630 
4631   /* As a special case, we have to temporarily allow for assignments
4632      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4633      a toplevel statement, when gimplifying the GENERIC expression
4634      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4635      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4636 
4637      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4638      prevent gimplify_expr from trying to create a new temporary for
4639      foo's LHS, we tell it that it should only gimplify until it
4640      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4641      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4642      and all we need to do here is set 'a' to be its LHS.  */
4643   ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4644 		       fb_rvalue);
4645   if (ret == GS_ERROR)
4646     return ret;
4647 
4648   /* Now see if the above changed *from_p to something we handle specially.  */
4649   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4650 				  want_value);
4651   if (ret != GS_UNHANDLED)
4652     return ret;
4653 
4654   /* If we've got a variable sized assignment between two lvalues (i.e. does
4655      not involve a call), then we can make things a bit more straightforward
4656      by converting the assignment to memcpy or memset.  */
4657   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4658     {
4659       tree from = TREE_OPERAND (*from_p, 0);
4660       tree size = TREE_OPERAND (*from_p, 1);
4661 
4662       if (TREE_CODE (from) == CONSTRUCTOR)
4663 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4664 
4665       if (is_gimple_addressable (from))
4666 	{
4667 	  *from_p = from;
4668 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4669 	      					 pre_p);
4670 	}
4671     }
4672 
4673   /* Transform partial stores to non-addressable complex variables into
4674      total stores.  This allows us to use real instead of virtual operands
4675      for these variables, which improves optimization.  */
4676   if ((TREE_CODE (*to_p) == REALPART_EXPR
4677        || TREE_CODE (*to_p) == IMAGPART_EXPR)
4678       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4679     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4680 
4681   /* Try to alleviate the effects of the gimplification creating artificial
4682      temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4683   if (!gimplify_ctxp->into_ssa
4684       && TREE_CODE (*from_p) == VAR_DECL
4685       && DECL_IGNORED_P (*from_p)
4686       && DECL_P (*to_p)
4687       && !DECL_IGNORED_P (*to_p))
4688     {
4689       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4690 	DECL_NAME (*from_p)
4691 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4692       DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4693       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4694    }
4695 
4696   if (want_value && TREE_THIS_VOLATILE (*to_p))
4697     *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4698 
4699   if (TREE_CODE (*from_p) == CALL_EXPR)
4700     {
4701       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4702 	 instead of a GIMPLE_ASSIGN.  */
4703       gcall *call_stmt;
4704       if (CALL_EXPR_FN (*from_p) == NULL_TREE)
4705 	{
4706 	  /* Gimplify internal functions created in the FEs.  */
4707 	  int nargs = call_expr_nargs (*from_p), i;
4708 	  enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
4709 	  auto_vec<tree> vargs (nargs);
4710 
4711 	  for (i = 0; i < nargs; i++)
4712 	    {
4713 	      gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
4714 			    EXPR_LOCATION (*from_p));
4715 	      vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
4716 	    }
4717 	  call_stmt = gimple_build_call_internal_vec (ifn, vargs);
4718 	  gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
4719 	}
4720       else
4721 	{
4722 	  tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4723 	  CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4724 	  STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4725 	  tree fndecl = get_callee_fndecl (*from_p);
4726 	  if (fndecl
4727 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4728 	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4729 	      && call_expr_nargs (*from_p) == 3)
4730 	    call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4731 						    CALL_EXPR_ARG (*from_p, 0),
4732 						    CALL_EXPR_ARG (*from_p, 1),
4733 						    CALL_EXPR_ARG (*from_p, 2));
4734 	  else
4735 	    {
4736 	      call_stmt = gimple_build_call_from_tree (*from_p);
4737 	      gimple_call_set_fntype (call_stmt, TREE_TYPE (fnptrtype));
4738 	    }
4739 	}
4740       notice_special_calls (call_stmt);
4741       if (!gimple_call_noreturn_p (call_stmt))
4742 	gimple_call_set_lhs (call_stmt, *to_p);
4743       assign = call_stmt;
4744     }
4745   else
4746     {
4747       assign = gimple_build_assign (*to_p, *from_p);
4748       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4749     }
4750 
4751   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4752     {
4753       /* We should have got an SSA name from the start.  */
4754       gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4755     }
4756 
4757   gimplify_seq_add_stmt (pre_p, assign);
4758   gsi = gsi_last (*pre_p);
4759   maybe_fold_stmt (&gsi);
4760 
4761   if (want_value)
4762     {
4763       *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4764       return GS_OK;
4765     }
4766   else
4767     *expr_p = NULL;
4768 
4769   return GS_ALL_DONE;
4770 }
4771 
4772 /* Gimplify a comparison between two variable-sized objects.  Do this
4773    with a call to BUILT_IN_MEMCMP.  */
4774 
4775 static enum gimplify_status
4776 gimplify_variable_sized_compare (tree *expr_p)
4777 {
4778   location_t loc = EXPR_LOCATION (*expr_p);
4779   tree op0 = TREE_OPERAND (*expr_p, 0);
4780   tree op1 = TREE_OPERAND (*expr_p, 1);
4781   tree t, arg, dest, src, expr;
4782 
4783   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4784   arg = unshare_expr (arg);
4785   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4786   src = build_fold_addr_expr_loc (loc, op1);
4787   dest = build_fold_addr_expr_loc (loc, op0);
4788   t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4789   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4790 
4791   expr
4792     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4793   SET_EXPR_LOCATION (expr, loc);
4794   *expr_p = expr;
4795 
4796   return GS_OK;
4797 }
4798 
4799 /* Gimplify a comparison between two aggregate objects of integral scalar
4800    mode as a comparison between the bitwise equivalent scalar values.  */
4801 
4802 static enum gimplify_status
4803 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4804 {
4805   location_t loc = EXPR_LOCATION (*expr_p);
4806   tree op0 = TREE_OPERAND (*expr_p, 0);
4807   tree op1 = TREE_OPERAND (*expr_p, 1);
4808 
4809   tree type = TREE_TYPE (op0);
4810   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4811 
4812   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4813   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4814 
4815   *expr_p
4816     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4817 
4818   return GS_OK;
4819 }
4820 
4821 /* Gimplify an expression sequence.  This function gimplifies each
4822    expression and rewrites the original expression with the last
4823    expression of the sequence in GIMPLE form.
4824 
4825    PRE_P points to the list where the side effects for all the
4826        expressions in the sequence will be emitted.
4827 
4828    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4829 
4830 static enum gimplify_status
4831 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4832 {
4833   tree t = *expr_p;
4834 
4835   do
4836     {
4837       tree *sub_p = &TREE_OPERAND (t, 0);
4838 
4839       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4840 	gimplify_compound_expr (sub_p, pre_p, false);
4841       else
4842 	gimplify_stmt (sub_p, pre_p);
4843 
4844       t = TREE_OPERAND (t, 1);
4845     }
4846   while (TREE_CODE (t) == COMPOUND_EXPR);
4847 
4848   *expr_p = t;
4849   if (want_value)
4850     return GS_OK;
4851   else
4852     {
4853       gimplify_stmt (expr_p, pre_p);
4854       return GS_ALL_DONE;
4855     }
4856 }
4857 
4858 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4859    gimplify.  After gimplification, EXPR_P will point to a new temporary
4860    that holds the original value of the SAVE_EXPR node.
4861 
4862    PRE_P points to the list where side effects that must happen before
4863    *EXPR_P should be stored.  */
4864 
4865 static enum gimplify_status
4866 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4867 {
4868   enum gimplify_status ret = GS_ALL_DONE;
4869   tree val;
4870 
4871   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4872   val = TREE_OPERAND (*expr_p, 0);
4873 
4874   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4875   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4876     {
4877       /* The operand may be a void-valued expression such as SAVE_EXPRs
4878 	 generated by the Java frontend for class initialization.  It is
4879 	 being executed only for its side-effects.  */
4880       if (TREE_TYPE (val) == void_type_node)
4881 	{
4882 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4883 			       is_gimple_stmt, fb_none);
4884 	  val = NULL;
4885 	}
4886       else
4887 	val = get_initialized_tmp_var (val, pre_p, post_p);
4888 
4889       TREE_OPERAND (*expr_p, 0) = val;
4890       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4891     }
4892 
4893   *expr_p = val;
4894 
4895   return ret;
4896 }
4897 
4898 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4899 
4900       unary_expr
4901 	      : ...
4902 	      | '&' varname
4903 	      ...
4904 
4905     PRE_P points to the list where side effects that must happen before
4906 	*EXPR_P should be stored.
4907 
4908     POST_P points to the list where side effects that must happen after
4909 	*EXPR_P should be stored.  */
4910 
4911 static enum gimplify_status
4912 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4913 {
4914   tree expr = *expr_p;
4915   tree op0 = TREE_OPERAND (expr, 0);
4916   enum gimplify_status ret;
4917   location_t loc = EXPR_LOCATION (*expr_p);
4918 
4919   switch (TREE_CODE (op0))
4920     {
4921     case INDIRECT_REF:
4922     do_indirect_ref:
4923       /* Check if we are dealing with an expression of the form '&*ptr'.
4924 	 While the front end folds away '&*ptr' into 'ptr', these
4925 	 expressions may be generated internally by the compiler (e.g.,
4926 	 builtins like __builtin_va_end).  */
4927       /* Caution: the silent array decomposition semantics we allow for
4928 	 ADDR_EXPR means we can't always discard the pair.  */
4929       /* Gimplification of the ADDR_EXPR operand may drop
4930 	 cv-qualification conversions, so make sure we add them if
4931 	 needed.  */
4932       {
4933 	tree op00 = TREE_OPERAND (op0, 0);
4934 	tree t_expr = TREE_TYPE (expr);
4935 	tree t_op00 = TREE_TYPE (op00);
4936 
4937         if (!useless_type_conversion_p (t_expr, t_op00))
4938 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4939         *expr_p = op00;
4940         ret = GS_OK;
4941       }
4942       break;
4943 
4944     case VIEW_CONVERT_EXPR:
4945       /* Take the address of our operand and then convert it to the type of
4946 	 this ADDR_EXPR.
4947 
4948 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4949 	 all clear.  The impact of this transformation is even less clear.  */
4950 
4951       /* If the operand is a useless conversion, look through it.  Doing so
4952 	 guarantees that the ADDR_EXPR and its operand will remain of the
4953 	 same type.  */
4954       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4955 	op0 = TREE_OPERAND (op0, 0);
4956 
4957       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4958 				  build_fold_addr_expr_loc (loc,
4959 							TREE_OPERAND (op0, 0)));
4960       ret = GS_OK;
4961       break;
4962 
4963     default:
4964       /* If we see a call to a declared builtin or see its address
4965 	 being taken (we can unify those cases here) then we can mark
4966 	 the builtin for implicit generation by GCC.  */
4967       if (TREE_CODE (op0) == FUNCTION_DECL
4968 	  && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
4969 	  && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
4970 	set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
4971 
4972       /* We use fb_either here because the C frontend sometimes takes
4973 	 the address of a call that returns a struct; see
4974 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4975 	 the implied temporary explicit.  */
4976 
4977       /* Make the operand addressable.  */
4978       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4979 			   is_gimple_addressable, fb_either);
4980       if (ret == GS_ERROR)
4981 	break;
4982 
4983       /* Then mark it.  Beware that it may not be possible to do so directly
4984 	 if a temporary has been created by the gimplification.  */
4985       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4986 
4987       op0 = TREE_OPERAND (expr, 0);
4988 
4989       /* For various reasons, the gimplification of the expression
4990 	 may have made a new INDIRECT_REF.  */
4991       if (TREE_CODE (op0) == INDIRECT_REF)
4992 	goto do_indirect_ref;
4993 
4994       mark_addressable (TREE_OPERAND (expr, 0));
4995 
4996       /* The FEs may end up building ADDR_EXPRs early on a decl with
4997 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
4998 	 here.  */
4999       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
5000 	*expr_p = build_fold_addr_expr (op0);
5001 
5002       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
5003       recompute_tree_invariant_for_addr_expr (*expr_p);
5004 
5005       /* If we re-built the ADDR_EXPR add a conversion to the original type
5006          if required.  */
5007       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
5008 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
5009 
5010       break;
5011     }
5012 
5013   return ret;
5014 }
5015 
5016 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
5017    value; output operands should be a gimple lvalue.  */
5018 
5019 static enum gimplify_status
5020 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5021 {
5022   tree expr;
5023   int noutputs;
5024   const char **oconstraints;
5025   int i;
5026   tree link;
5027   const char *constraint;
5028   bool allows_mem, allows_reg, is_inout;
5029   enum gimplify_status ret, tret;
5030   gasm *stmt;
5031   vec<tree, va_gc> *inputs;
5032   vec<tree, va_gc> *outputs;
5033   vec<tree, va_gc> *clobbers;
5034   vec<tree, va_gc> *labels;
5035   tree link_next;
5036 
5037   expr = *expr_p;
5038   noutputs = list_length (ASM_OUTPUTS (expr));
5039   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5040 
5041   inputs = NULL;
5042   outputs = NULL;
5043   clobbers = NULL;
5044   labels = NULL;
5045 
5046   ret = GS_ALL_DONE;
5047   link_next = NULL_TREE;
5048   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5049     {
5050       bool ok;
5051       size_t constraint_len;
5052 
5053       link_next = TREE_CHAIN (link);
5054 
5055       oconstraints[i]
5056 	= constraint
5057 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5058       constraint_len = strlen (constraint);
5059       if (constraint_len == 0)
5060         continue;
5061 
5062       ok = parse_output_constraint (&constraint, i, 0, 0,
5063 				    &allows_mem, &allows_reg, &is_inout);
5064       if (!ok)
5065 	{
5066 	  ret = GS_ERROR;
5067 	  is_inout = false;
5068 	}
5069 
5070       if (!allows_reg && allows_mem)
5071 	mark_addressable (TREE_VALUE (link));
5072 
5073       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5074 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5075 			    fb_lvalue | fb_mayfail);
5076       if (tret == GS_ERROR)
5077 	{
5078 	  error ("invalid lvalue in asm output %d", i);
5079 	  ret = tret;
5080 	}
5081 
5082       vec_safe_push (outputs, link);
5083       TREE_CHAIN (link) = NULL_TREE;
5084 
5085       if (is_inout)
5086 	{
5087 	  /* An input/output operand.  To give the optimizers more
5088 	     flexibility, split it into separate input and output
5089  	     operands.  */
5090 	  tree input;
5091 	  char buf[10];
5092 
5093 	  /* Turn the in/out constraint into an output constraint.  */
5094 	  char *p = xstrdup (constraint);
5095 	  p[0] = '=';
5096 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5097 
5098 	  /* And add a matching input constraint.  */
5099 	  if (allows_reg)
5100 	    {
5101 	      sprintf (buf, "%d", i);
5102 
5103 	      /* If there are multiple alternatives in the constraint,
5104 		 handle each of them individually.  Those that allow register
5105 		 will be replaced with operand number, the others will stay
5106 		 unchanged.  */
5107 	      if (strchr (p, ',') != NULL)
5108 		{
5109 		  size_t len = 0, buflen = strlen (buf);
5110 		  char *beg, *end, *str, *dst;
5111 
5112 		  for (beg = p + 1;;)
5113 		    {
5114 		      end = strchr (beg, ',');
5115 		      if (end == NULL)
5116 			end = strchr (beg, '\0');
5117 		      if ((size_t) (end - beg) < buflen)
5118 			len += buflen + 1;
5119 		      else
5120 			len += end - beg + 1;
5121 		      if (*end)
5122 			beg = end + 1;
5123 		      else
5124 			break;
5125 		    }
5126 
5127 		  str = (char *) alloca (len);
5128 		  for (beg = p + 1, dst = str;;)
5129 		    {
5130 		      const char *tem;
5131 		      bool mem_p, reg_p, inout_p;
5132 
5133 		      end = strchr (beg, ',');
5134 		      if (end)
5135 			*end = '\0';
5136 		      beg[-1] = '=';
5137 		      tem = beg - 1;
5138 		      parse_output_constraint (&tem, i, 0, 0,
5139 					       &mem_p, &reg_p, &inout_p);
5140 		      if (dst != str)
5141 			*dst++ = ',';
5142 		      if (reg_p)
5143 			{
5144 			  memcpy (dst, buf, buflen);
5145 			  dst += buflen;
5146 			}
5147 		      else
5148 			{
5149 			  if (end)
5150 			    len = end - beg;
5151 			  else
5152 			    len = strlen (beg);
5153 			  memcpy (dst, beg, len);
5154 			  dst += len;
5155 			}
5156 		      if (end)
5157 			beg = end + 1;
5158 		      else
5159 			break;
5160 		    }
5161 		  *dst = '\0';
5162 		  input = build_string (dst - str, str);
5163 		}
5164 	      else
5165 		input = build_string (strlen (buf), buf);
5166 	    }
5167 	  else
5168 	    input = build_string (constraint_len - 1, constraint + 1);
5169 
5170 	  free (p);
5171 
5172 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
5173 				   unshare_expr (TREE_VALUE (link)));
5174 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5175 	}
5176     }
5177 
5178   link_next = NULL_TREE;
5179   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5180     {
5181       link_next = TREE_CHAIN (link);
5182       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5183       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5184 			      oconstraints, &allows_mem, &allows_reg);
5185 
5186       /* If we can't make copies, we can only accept memory.  */
5187       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5188 	{
5189 	  if (allows_mem)
5190 	    allows_reg = 0;
5191 	  else
5192 	    {
5193 	      error ("impossible constraint in %<asm%>");
5194 	      error ("non-memory input %d must stay in memory", i);
5195 	      return GS_ERROR;
5196 	    }
5197 	}
5198 
5199       /* If the operand is a memory input, it should be an lvalue.  */
5200       if (!allows_reg && allows_mem)
5201 	{
5202 	  tree inputv = TREE_VALUE (link);
5203 	  STRIP_NOPS (inputv);
5204 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5205 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
5206 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5207 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5208 	    TREE_VALUE (link) = error_mark_node;
5209 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5210 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
5211 	  mark_addressable (TREE_VALUE (link));
5212 	  if (tret == GS_ERROR)
5213 	    {
5214 	      if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5215 	        input_location = EXPR_LOCATION (TREE_VALUE (link));
5216 	      error ("memory input %d is not directly addressable", i);
5217 	      ret = tret;
5218 	    }
5219 	}
5220       else
5221 	{
5222 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5223 				is_gimple_asm_val, fb_rvalue);
5224 	  if (tret == GS_ERROR)
5225 	    ret = tret;
5226 	}
5227 
5228       TREE_CHAIN (link) = NULL_TREE;
5229       vec_safe_push (inputs, link);
5230     }
5231 
5232   link_next = NULL_TREE;
5233   for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5234     {
5235       link_next = TREE_CHAIN (link);
5236       TREE_CHAIN (link) = NULL_TREE;
5237       vec_safe_push (clobbers, link);
5238     }
5239 
5240   link_next = NULL_TREE;
5241   for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5242     {
5243       link_next = TREE_CHAIN (link);
5244       TREE_CHAIN (link) = NULL_TREE;
5245       vec_safe_push (labels, link);
5246     }
5247 
5248   /* Do not add ASMs with errors to the gimple IL stream.  */
5249   if (ret != GS_ERROR)
5250     {
5251       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5252 				   inputs, outputs, clobbers, labels);
5253 
5254       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5255       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5256 
5257       gimplify_seq_add_stmt (pre_p, stmt);
5258     }
5259 
5260   return ret;
5261 }
5262 
5263 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5264    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5265    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5266    return to this function.
5267 
5268    FIXME should we complexify the prequeue handling instead?  Or use flags
5269    for all the cleanups and let the optimizer tighten them up?  The current
5270    code seems pretty fragile; it will break on a cleanup within any
5271    non-conditional nesting.  But any such nesting would be broken, anyway;
5272    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5273    and continues out of it.  We can do that at the RTL level, though, so
5274    having an optimizer to tighten up try/finally regions would be a Good
5275    Thing.  */
5276 
5277 static enum gimplify_status
5278 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5279 {
5280   gimple_stmt_iterator iter;
5281   gimple_seq body_sequence = NULL;
5282 
5283   tree temp = voidify_wrapper_expr (*expr_p, NULL);
5284 
5285   /* We only care about the number of conditions between the innermost
5286      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5287      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5288   int old_conds = gimplify_ctxp->conditions;
5289   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5290   bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5291   gimplify_ctxp->conditions = 0;
5292   gimplify_ctxp->conditional_cleanups = NULL;
5293   gimplify_ctxp->in_cleanup_point_expr = true;
5294 
5295   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5296 
5297   gimplify_ctxp->conditions = old_conds;
5298   gimplify_ctxp->conditional_cleanups = old_cleanups;
5299   gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5300 
5301   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5302     {
5303       gimple wce = gsi_stmt (iter);
5304 
5305       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5306 	{
5307 	  if (gsi_one_before_end_p (iter))
5308 	    {
5309               /* Note that gsi_insert_seq_before and gsi_remove do not
5310                  scan operands, unlike some other sequence mutators.  */
5311 	      if (!gimple_wce_cleanup_eh_only (wce))
5312 		gsi_insert_seq_before_without_update (&iter,
5313 						      gimple_wce_cleanup (wce),
5314 						      GSI_SAME_STMT);
5315 	      gsi_remove (&iter, true);
5316 	      break;
5317 	    }
5318 	  else
5319 	    {
5320 	      gtry *gtry;
5321 	      gimple_seq seq;
5322 	      enum gimple_try_flags kind;
5323 
5324 	      if (gimple_wce_cleanup_eh_only (wce))
5325 		kind = GIMPLE_TRY_CATCH;
5326 	      else
5327 		kind = GIMPLE_TRY_FINALLY;
5328 	      seq = gsi_split_seq_after (iter);
5329 
5330 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5331               /* Do not use gsi_replace here, as it may scan operands.
5332                  We want to do a simple structural modification only.  */
5333 	      gsi_set_stmt (&iter, gtry);
5334 	      iter = gsi_start (gtry->eval);
5335 	    }
5336 	}
5337       else
5338 	gsi_next (&iter);
5339     }
5340 
5341   gimplify_seq_add_seq (pre_p, body_sequence);
5342   if (temp)
5343     {
5344       *expr_p = temp;
5345       return GS_OK;
5346     }
5347   else
5348     {
5349       *expr_p = NULL;
5350       return GS_ALL_DONE;
5351     }
5352 }
5353 
5354 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5355    is the cleanup action required.  EH_ONLY is true if the cleanup should
5356    only be executed if an exception is thrown, not on normal exit.  */
5357 
5358 static void
5359 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5360 {
5361   gimple wce;
5362   gimple_seq cleanup_stmts = NULL;
5363 
5364   /* Errors can result in improperly nested cleanups.  Which results in
5365      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5366   if (seen_error ())
5367     return;
5368 
5369   if (gimple_conditional_context ())
5370     {
5371       /* If we're in a conditional context, this is more complex.  We only
5372 	 want to run the cleanup if we actually ran the initialization that
5373 	 necessitates it, but we want to run it after the end of the
5374 	 conditional context.  So we wrap the try/finally around the
5375 	 condition and use a flag to determine whether or not to actually
5376 	 run the destructor.  Thus
5377 
5378 	   test ? f(A()) : 0
5379 
5380 	 becomes (approximately)
5381 
5382 	   flag = 0;
5383 	   try {
5384 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
5385 	     else { val = 0; }
5386 	   } finally {
5387 	     if (flag) A::~A(temp);
5388 	   }
5389 	   val
5390       */
5391       tree flag = create_tmp_var (boolean_type_node, "cleanup");
5392       gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
5393       gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
5394 
5395       cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5396       gimplify_stmt (&cleanup, &cleanup_stmts);
5397       wce = gimple_build_wce (cleanup_stmts);
5398 
5399       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5400       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5401       gimplify_seq_add_stmt (pre_p, ftrue);
5402 
5403       /* Because of this manipulation, and the EH edges that jump
5404 	 threading cannot redirect, the temporary (VAR) will appear
5405 	 to be used uninitialized.  Don't warn.  */
5406       TREE_NO_WARNING (var) = 1;
5407     }
5408   else
5409     {
5410       gimplify_stmt (&cleanup, &cleanup_stmts);
5411       wce = gimple_build_wce (cleanup_stmts);
5412       gimple_wce_set_cleanup_eh_only (wce, eh_only);
5413       gimplify_seq_add_stmt (pre_p, wce);
5414     }
5415 }
5416 
5417 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5418 
5419 static enum gimplify_status
5420 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5421 {
5422   tree targ = *expr_p;
5423   tree temp = TARGET_EXPR_SLOT (targ);
5424   tree init = TARGET_EXPR_INITIAL (targ);
5425   enum gimplify_status ret;
5426 
5427   if (init)
5428     {
5429       tree cleanup = NULL_TREE;
5430 
5431       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5432 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
5433       if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5434 	{
5435 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5436 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5437 	  gimplify_vla_decl (temp, pre_p);
5438 	}
5439       else
5440 	gimple_add_tmp_var (temp);
5441 
5442       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5443 	 expression is supposed to initialize the slot.  */
5444       if (VOID_TYPE_P (TREE_TYPE (init)))
5445 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5446       else
5447 	{
5448 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5449 	  init = init_expr;
5450 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5451 	  init = NULL;
5452 	  ggc_free (init_expr);
5453 	}
5454       if (ret == GS_ERROR)
5455 	{
5456 	  /* PR c++/28266 Make sure this is expanded only once. */
5457 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5458 	  return GS_ERROR;
5459 	}
5460       if (init)
5461 	gimplify_and_add (init, pre_p);
5462 
5463       /* If needed, push the cleanup for the temp.  */
5464       if (TARGET_EXPR_CLEANUP (targ))
5465 	{
5466 	  if (CLEANUP_EH_ONLY (targ))
5467 	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5468 				 CLEANUP_EH_ONLY (targ), pre_p);
5469 	  else
5470 	    cleanup = TARGET_EXPR_CLEANUP (targ);
5471 	}
5472 
5473       /* Add a clobber for the temporary going out of scope, like
5474 	 gimplify_bind_expr.  */
5475       if (gimplify_ctxp->in_cleanup_point_expr
5476 	  && needs_to_live_in_memory (temp)
5477 	  && flag_stack_reuse == SR_ALL)
5478 	{
5479 	  tree clobber = build_constructor (TREE_TYPE (temp),
5480 					    NULL);
5481 	  TREE_THIS_VOLATILE (clobber) = true;
5482 	  clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5483 	  if (cleanup)
5484 	    cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5485 			      clobber);
5486 	  else
5487 	    cleanup = clobber;
5488 	}
5489 
5490       if (cleanup)
5491 	gimple_push_cleanup (temp, cleanup, false, pre_p);
5492 
5493       /* Only expand this once.  */
5494       TREE_OPERAND (targ, 3) = init;
5495       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5496     }
5497   else
5498     /* We should have expanded this before.  */
5499     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5500 
5501   *expr_p = temp;
5502   return GS_OK;
5503 }
5504 
5505 /* Gimplification of expression trees.  */
5506 
5507 /* Gimplify an expression which appears at statement context.  The
5508    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5509    NULL, a new sequence is allocated.
5510 
5511    Return true if we actually added a statement to the queue.  */
5512 
5513 bool
5514 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5515 {
5516   gimple_seq_node last;
5517 
5518   last = gimple_seq_last (*seq_p);
5519   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5520   return last != gimple_seq_last (*seq_p);
5521 }
5522 
5523 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5524    to CTX.  If entries already exist, force them to be some flavor of private.
5525    If there is no enclosing parallel, do nothing.  */
5526 
5527 void
5528 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5529 {
5530   splay_tree_node n;
5531 
5532   if (decl == NULL || !DECL_P (decl))
5533     return;
5534 
5535   do
5536     {
5537       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5538       if (n != NULL)
5539 	{
5540 	  if (n->value & GOVD_SHARED)
5541 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5542 	  else if (n->value & GOVD_MAP)
5543 	    n->value |= GOVD_MAP_TO_ONLY;
5544 	  else
5545 	    return;
5546 	}
5547       else if (ctx->region_type == ORT_TARGET)
5548 	omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5549       else if (ctx->region_type != ORT_WORKSHARE
5550 	       && ctx->region_type != ORT_SIMD
5551 	       && ctx->region_type != ORT_TARGET_DATA)
5552 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5553 
5554       ctx = ctx->outer_context;
5555     }
5556   while (ctx);
5557 }
5558 
5559 /* Similarly for each of the type sizes of TYPE.  */
5560 
5561 static void
5562 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5563 {
5564   if (type == NULL || type == error_mark_node)
5565     return;
5566   type = TYPE_MAIN_VARIANT (type);
5567 
5568   if (ctx->privatized_types->add (type))
5569     return;
5570 
5571   switch (TREE_CODE (type))
5572     {
5573     case INTEGER_TYPE:
5574     case ENUMERAL_TYPE:
5575     case BOOLEAN_TYPE:
5576     case REAL_TYPE:
5577     case FIXED_POINT_TYPE:
5578       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5579       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5580       break;
5581 
5582     case ARRAY_TYPE:
5583       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5584       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5585       break;
5586 
5587     case RECORD_TYPE:
5588     case UNION_TYPE:
5589     case QUAL_UNION_TYPE:
5590       {
5591 	tree field;
5592 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5593 	  if (TREE_CODE (field) == FIELD_DECL)
5594 	    {
5595 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5596 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5597 	    }
5598       }
5599       break;
5600 
5601     case POINTER_TYPE:
5602     case REFERENCE_TYPE:
5603       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5604       break;
5605 
5606     default:
5607       break;
5608     }
5609 
5610   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5611   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5612   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5613 }
5614 
5615 /* Add an entry for DECL in the OMP context CTX with FLAGS.  */
5616 
5617 static void
5618 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5619 {
5620   splay_tree_node n;
5621   unsigned int nflags;
5622   tree t;
5623 
5624   if (error_operand_p (decl))
5625     return;
5626 
5627   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5628      there are constructors involved somewhere.  */
5629   if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5630       || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5631     flags |= GOVD_SEEN;
5632 
5633   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5634   if (n != NULL && n->value != GOVD_ALIGNED)
5635     {
5636       /* We shouldn't be re-adding the decl with the same data
5637 	 sharing class.  */
5638       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5639       /* The only combination of data sharing classes we should see is
5640 	 FIRSTPRIVATE and LASTPRIVATE.  */
5641       nflags = n->value | flags;
5642       gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5643 		  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5644 		  || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5645       n->value = nflags;
5646       return;
5647     }
5648 
5649   /* When adding a variable-sized variable, we have to handle all sorts
5650      of additional bits of data: the pointer replacement variable, and
5651      the parameters of the type.  */
5652   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5653     {
5654       /* Add the pointer replacement variable as PRIVATE if the variable
5655 	 replacement is private, else FIRSTPRIVATE since we'll need the
5656 	 address of the original variable either for SHARED, or for the
5657 	 copy into or out of the context.  */
5658       if (!(flags & GOVD_LOCAL))
5659 	{
5660 	  if (flags & GOVD_MAP)
5661 	    nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
5662 	  else if (flags & GOVD_PRIVATE)
5663 	    nflags = GOVD_PRIVATE;
5664 	  else
5665 	    nflags = GOVD_FIRSTPRIVATE;
5666 	  nflags |= flags & GOVD_SEEN;
5667 	  t = DECL_VALUE_EXPR (decl);
5668 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5669 	  t = TREE_OPERAND (t, 0);
5670 	  gcc_assert (DECL_P (t));
5671 	  omp_add_variable (ctx, t, nflags);
5672 	}
5673 
5674       /* Add all of the variable and type parameters (which should have
5675 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5676       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5677       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5678       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5679 
5680       /* The variable-sized variable itself is never SHARED, only some form
5681 	 of PRIVATE.  The sharing would take place via the pointer variable
5682 	 which we remapped above.  */
5683       if (flags & GOVD_SHARED)
5684 	flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5685 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5686 
5687       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5688 	 alloca statement we generate for the variable, so make sure it
5689 	 is available.  This isn't automatically needed for the SHARED
5690 	 case, since we won't be allocating local storage then.
5691 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5692 	 in this case omp_notice_variable will be called later
5693 	 on when it is gimplified.  */
5694       else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5695 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5696 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5697     }
5698   else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5699 	   && lang_hooks.decls.omp_privatize_by_reference (decl))
5700     {
5701       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5702 
5703       /* Similar to the direct variable sized case above, we'll need the
5704 	 size of references being privatized.  */
5705       if ((flags & GOVD_SHARED) == 0)
5706 	{
5707 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5708 	  if (TREE_CODE (t) != INTEGER_CST)
5709 	    omp_notice_variable (ctx, t, true);
5710 	}
5711     }
5712 
5713   if (n != NULL)
5714     n->value |= flags;
5715   else
5716     splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5717 }
5718 
5719 /* Notice a threadprivate variable DECL used in OMP context CTX.
5720    This just prints out diagnostics about threadprivate variable uses
5721    in untied tasks.  If DECL2 is non-NULL, prevent this warning
5722    on that variable.  */
5723 
5724 static bool
5725 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5726 				   tree decl2)
5727 {
5728   splay_tree_node n;
5729   struct gimplify_omp_ctx *octx;
5730 
5731   for (octx = ctx; octx; octx = octx->outer_context)
5732     if (octx->region_type == ORT_TARGET)
5733       {
5734 	n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5735 	if (n == NULL)
5736 	  {
5737 	    error ("threadprivate variable %qE used in target region",
5738 		   DECL_NAME (decl));
5739 	    error_at (octx->location, "enclosing target region");
5740 	    splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5741 	  }
5742 	if (decl2)
5743 	  splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5744       }
5745 
5746   if (ctx->region_type != ORT_UNTIED_TASK)
5747     return false;
5748   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5749   if (n == NULL)
5750     {
5751       error ("threadprivate variable %qE used in untied task",
5752 	     DECL_NAME (decl));
5753       error_at (ctx->location, "enclosing task");
5754       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5755     }
5756   if (decl2)
5757     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5758   return false;
5759 }
5760 
5761 /* Record the fact that DECL was used within the OMP context CTX.
5762    IN_CODE is true when real code uses DECL, and false when we should
5763    merely emit default(none) errors.  Return true if DECL is going to
5764    be remapped and thus DECL shouldn't be gimplified into its
5765    DECL_VALUE_EXPR (if any).  */
5766 
5767 static bool
5768 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5769 {
5770   splay_tree_node n;
5771   unsigned flags = in_code ? GOVD_SEEN : 0;
5772   bool ret = false, shared;
5773 
5774   if (error_operand_p (decl))
5775     return false;
5776 
5777   /* Threadprivate variables are predetermined.  */
5778   if (is_global_var (decl))
5779     {
5780       if (DECL_THREAD_LOCAL_P (decl))
5781 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5782 
5783       if (DECL_HAS_VALUE_EXPR_P (decl))
5784 	{
5785 	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
5786 
5787 	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5788 	    return omp_notice_threadprivate_variable (ctx, decl, value);
5789 	}
5790     }
5791 
5792   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5793   if (ctx->region_type == ORT_TARGET)
5794     {
5795       ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5796       if (n == NULL)
5797 	{
5798 	  if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5799 	    {
5800 	      error ("%qD referenced in target region does not have "
5801 		     "a mappable type", decl);
5802 	      omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5803 	    }
5804 	  else
5805 	    omp_add_variable (ctx, decl, GOVD_MAP | flags);
5806 	}
5807       else
5808 	{
5809 	  /* If nothing changed, there's nothing left to do.  */
5810 	  if ((n->value & flags) == flags)
5811 	    return ret;
5812 	  n->value |= flags;
5813 	}
5814       goto do_outer;
5815     }
5816 
5817   if (n == NULL)
5818     {
5819       enum omp_clause_default_kind default_kind, kind;
5820       struct gimplify_omp_ctx *octx;
5821 
5822       if (ctx->region_type == ORT_WORKSHARE
5823 	  || ctx->region_type == ORT_SIMD
5824 	  || ctx->region_type == ORT_TARGET_DATA)
5825 	goto do_outer;
5826 
5827       /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5828 	 remapped firstprivate instead of shared.  To some extent this is
5829 	 addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5830       default_kind = ctx->default_kind;
5831       kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5832       if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5833 	default_kind = kind;
5834 
5835       switch (default_kind)
5836 	{
5837 	case OMP_CLAUSE_DEFAULT_NONE:
5838 	  if ((ctx->region_type & ORT_PARALLEL) != 0)
5839 	    {
5840 	      error ("%qE not specified in enclosing parallel",
5841 		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5842 	      error_at (ctx->location, "enclosing parallel");
5843 	    }
5844 	  else if ((ctx->region_type & ORT_TASK) != 0)
5845 	    {
5846 	      error ("%qE not specified in enclosing task",
5847 		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5848 	      error_at (ctx->location, "enclosing task");
5849 	    }
5850 	  else if (ctx->region_type & ORT_TEAMS)
5851 	    {
5852 	      error ("%qE not specified in enclosing teams construct",
5853 		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5854 	      error_at (ctx->location, "enclosing teams construct");
5855 	    }
5856 	  else
5857 	    gcc_unreachable ();
5858 	  /* FALLTHRU */
5859 	case OMP_CLAUSE_DEFAULT_SHARED:
5860 	  flags |= GOVD_SHARED;
5861 	  break;
5862 	case OMP_CLAUSE_DEFAULT_PRIVATE:
5863 	  flags |= GOVD_PRIVATE;
5864 	  break;
5865 	case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5866 	  flags |= GOVD_FIRSTPRIVATE;
5867 	  break;
5868 	case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5869 	  /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5870 	  gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5871 	  if (ctx->outer_context)
5872 	    omp_notice_variable (ctx->outer_context, decl, in_code);
5873 	  for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5874 	    {
5875 	      splay_tree_node n2;
5876 
5877 	      if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5878 		continue;
5879 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5880 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5881 		{
5882 		  flags |= GOVD_FIRSTPRIVATE;
5883 		  break;
5884 		}
5885 	      if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5886 		break;
5887 	    }
5888 	  if (flags & GOVD_FIRSTPRIVATE)
5889 	    break;
5890 	  if (octx == NULL
5891 	      && (TREE_CODE (decl) == PARM_DECL
5892 		  || (!is_global_var (decl)
5893 		      && DECL_CONTEXT (decl) == current_function_decl)))
5894 	    {
5895 	      flags |= GOVD_FIRSTPRIVATE;
5896 	      break;
5897 	    }
5898 	  flags |= GOVD_SHARED;
5899 	  break;
5900 	default:
5901 	  gcc_unreachable ();
5902 	}
5903 
5904       if ((flags & GOVD_PRIVATE)
5905 	  && lang_hooks.decls.omp_private_outer_ref (decl))
5906 	flags |= GOVD_PRIVATE_OUTER_REF;
5907 
5908       omp_add_variable (ctx, decl, flags);
5909 
5910       shared = (flags & GOVD_SHARED) != 0;
5911       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5912       goto do_outer;
5913     }
5914 
5915   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5916       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5917       && DECL_SIZE (decl)
5918       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5919     {
5920       splay_tree_node n2;
5921       tree t = DECL_VALUE_EXPR (decl);
5922       gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5923       t = TREE_OPERAND (t, 0);
5924       gcc_assert (DECL_P (t));
5925       n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5926       n2->value |= GOVD_SEEN;
5927     }
5928 
5929   shared = ((flags | n->value) & GOVD_SHARED) != 0;
5930   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5931 
5932   /* If nothing changed, there's nothing left to do.  */
5933   if ((n->value & flags) == flags)
5934     return ret;
5935   flags |= n->value;
5936   n->value = flags;
5937 
5938  do_outer:
5939   /* If the variable is private in the current context, then we don't
5940      need to propagate anything to an outer context.  */
5941   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5942     return ret;
5943   if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5944       == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5945     return ret;
5946   if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
5947 		| GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5948       == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
5949     return ret;
5950   if (ctx->outer_context
5951       && omp_notice_variable (ctx->outer_context, decl, in_code))
5952     return true;
5953   return ret;
5954 }
5955 
5956 /* Verify that DECL is private within CTX.  If there's specific information
5957    to the contrary in the innermost scope, generate an error.  */
5958 
5959 static bool
5960 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
5961 {
5962   splay_tree_node n;
5963 
5964   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5965   if (n != NULL)
5966     {
5967       if (n->value & GOVD_SHARED)
5968 	{
5969 	  if (ctx == gimplify_omp_ctxp)
5970 	    {
5971 	      if (simd)
5972 		error ("iteration variable %qE is predetermined linear",
5973 		       DECL_NAME (decl));
5974 	      else
5975 		error ("iteration variable %qE should be private",
5976 		       DECL_NAME (decl));
5977 	      n->value = GOVD_PRIVATE;
5978 	      return true;
5979 	    }
5980 	  else
5981 	    return false;
5982 	}
5983       else if ((n->value & GOVD_EXPLICIT) != 0
5984 	       && (ctx == gimplify_omp_ctxp
5985 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
5986 		       && gimplify_omp_ctxp->outer_context == ctx)))
5987 	{
5988 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5989 	    error ("iteration variable %qE should not be firstprivate",
5990 		   DECL_NAME (decl));
5991 	  else if ((n->value & GOVD_REDUCTION) != 0)
5992 	    error ("iteration variable %qE should not be reduction",
5993 		   DECL_NAME (decl));
5994 	  else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
5995 	    error ("iteration variable %qE should not be lastprivate",
5996 		   DECL_NAME (decl));
5997 	  else if (simd && (n->value & GOVD_PRIVATE) != 0)
5998 	    error ("iteration variable %qE should not be private",
5999 		   DECL_NAME (decl));
6000 	  else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
6001 	    error ("iteration variable %qE is predetermined linear",
6002 		   DECL_NAME (decl));
6003 	}
6004       return (ctx == gimplify_omp_ctxp
6005 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
6006 		  && gimplify_omp_ctxp->outer_context == ctx));
6007     }
6008 
6009   if (ctx->region_type != ORT_WORKSHARE
6010       && ctx->region_type != ORT_SIMD)
6011     return false;
6012   else if (ctx->outer_context)
6013     return omp_is_private (ctx->outer_context, decl, simd);
6014   return false;
6015 }
6016 
6017 /* Return true if DECL is private within a parallel region
6018    that binds to the current construct's context or in parallel
6019    region's REDUCTION clause.  */
6020 
6021 static bool
6022 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
6023 {
6024   splay_tree_node n;
6025 
6026   do
6027     {
6028       ctx = ctx->outer_context;
6029       if (ctx == NULL)
6030 	return !(is_global_var (decl)
6031 		 /* References might be private, but might be shared too,
6032 		    when checking for copyprivate, assume they might be
6033 		    private, otherwise assume they might be shared.  */
6034 		 || (!copyprivate
6035 		     && lang_hooks.decls.omp_privatize_by_reference (decl)));
6036 
6037       if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
6038 	continue;
6039 
6040       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6041       if (n != NULL)
6042 	return (n->value & GOVD_SHARED) == 0;
6043     }
6044   while (ctx->region_type == ORT_WORKSHARE
6045 	 || ctx->region_type == ORT_SIMD);
6046   return false;
6047 }
6048 
6049 /* Return true if the CTX is combined with distribute and thus
6050    lastprivate can't be supported.  */
6051 
6052 static bool
6053 omp_no_lastprivate (struct gimplify_omp_ctx *ctx)
6054 {
6055   do
6056     {
6057       if (ctx->outer_context == NULL)
6058 	return false;
6059       ctx = ctx->outer_context;
6060       switch (ctx->region_type)
6061 	{
6062 	case ORT_WORKSHARE:
6063 	  if (!ctx->combined_loop)
6064 	    return false;
6065 	  if (ctx->distribute)
6066 	    return true;
6067 	  break;
6068 	case ORT_COMBINED_PARALLEL:
6069 	  break;
6070 	case ORT_COMBINED_TEAMS:
6071 	  return true;
6072 	default:
6073 	  return false;
6074 	}
6075     }
6076   while (1);
6077 }
6078 
6079 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
6080    and previous omp contexts.  */
6081 
6082 static void
6083 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
6084 			   enum omp_region_type region_type)
6085 {
6086   struct gimplify_omp_ctx *ctx, *outer_ctx;
6087   tree c;
6088 
6089   ctx = new_omp_context (region_type);
6090   outer_ctx = ctx->outer_context;
6091 
6092   while ((c = *list_p) != NULL)
6093     {
6094       bool remove = false;
6095       bool notice_outer = true;
6096       const char *check_non_private = NULL;
6097       unsigned int flags;
6098       tree decl;
6099 
6100       switch (OMP_CLAUSE_CODE (c))
6101 	{
6102 	case OMP_CLAUSE_PRIVATE:
6103 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
6104 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
6105 	    {
6106 	      flags |= GOVD_PRIVATE_OUTER_REF;
6107 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
6108 	    }
6109 	  else
6110 	    notice_outer = false;
6111 	  goto do_add;
6112 	case OMP_CLAUSE_SHARED:
6113 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
6114 	  goto do_add;
6115 	case OMP_CLAUSE_FIRSTPRIVATE:
6116 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
6117 	  check_non_private = "firstprivate";
6118 	  goto do_add;
6119 	case OMP_CLAUSE_LASTPRIVATE:
6120 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
6121 	  check_non_private = "lastprivate";
6122 	  decl = OMP_CLAUSE_DECL (c);
6123 	  if (omp_no_lastprivate (ctx))
6124 	    {
6125 	      notice_outer = false;
6126 	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6127 	    }
6128 	  else if (error_operand_p (decl))
6129 	    goto do_add;
6130 	  else if (outer_ctx
6131 		   && outer_ctx->region_type == ORT_COMBINED_PARALLEL
6132 		   && splay_tree_lookup (outer_ctx->variables,
6133 					 (splay_tree_key) decl) == NULL)
6134 	    omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
6135 	  else if (outer_ctx
6136 		   && outer_ctx->region_type == ORT_WORKSHARE
6137 		   && outer_ctx->combined_loop
6138 		   && splay_tree_lookup (outer_ctx->variables,
6139 					 (splay_tree_key) decl) == NULL
6140 		   && !omp_check_private (outer_ctx, decl, false))
6141 	    {
6142 	      omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
6143 	      if (outer_ctx->outer_context
6144 		  && (outer_ctx->outer_context->region_type
6145 		      == ORT_COMBINED_PARALLEL)
6146 		  && splay_tree_lookup (outer_ctx->outer_context->variables,
6147 					(splay_tree_key) decl) == NULL)
6148 		omp_add_variable (outer_ctx->outer_context, decl,
6149 				  GOVD_SHARED | GOVD_SEEN);
6150 	    }
6151 	  goto do_add;
6152 	case OMP_CLAUSE_REDUCTION:
6153 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
6154 	  check_non_private = "reduction";
6155 	  goto do_add;
6156 	case OMP_CLAUSE_LINEAR:
6157 	  if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
6158 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6159 	    {
6160 	      remove = true;
6161 	      break;
6162 	    }
6163 	  else
6164 	    {
6165 	      /* For combined #pragma omp parallel for simd, need to put
6166 		 lastprivate and perhaps firstprivate too on the
6167 		 parallel.  Similarly for #pragma omp for simd.  */
6168 	      struct gimplify_omp_ctx *octx = outer_ctx;
6169 	      decl = NULL_TREE;
6170 	      if (omp_no_lastprivate (ctx))
6171 		OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6172 	      do
6173 		{
6174 		  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6175 		      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6176 		    break;
6177 		  decl = OMP_CLAUSE_DECL (c);
6178 		  if (error_operand_p (decl))
6179 		    {
6180 		      decl = NULL_TREE;
6181 		      break;
6182 		    }
6183 		  if (octx
6184 		      && octx->region_type == ORT_WORKSHARE
6185 		      && octx->combined_loop)
6186 		    {
6187 		      if (octx->outer_context
6188 			  && (octx->outer_context->region_type
6189 			      == ORT_COMBINED_PARALLEL
6190 			      || (octx->outer_context->region_type
6191 				  == ORT_COMBINED_TEAMS)))
6192 			octx = octx->outer_context;
6193 		      else if (omp_check_private (octx, decl, false))
6194 			break;
6195 		    }
6196 		  else
6197 		    break;
6198 		  if (splay_tree_lookup (octx->variables,
6199 					 (splay_tree_key) decl) != NULL)
6200 		    {
6201 		      octx = NULL;
6202 		      break;
6203 		    }
6204 		  flags = GOVD_SEEN;
6205 		  if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6206 		    flags |= GOVD_FIRSTPRIVATE;
6207 		  if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6208 		    flags |= GOVD_LASTPRIVATE;
6209 		  omp_add_variable (octx, decl, flags);
6210 		  if (octx->outer_context == NULL)
6211 		    break;
6212 		  octx = octx->outer_context;
6213 		}
6214 	      while (1);
6215 	      if (octx
6216 		  && decl
6217 		  && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6218 		      || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6219 		omp_notice_variable (octx, decl, true);
6220 	    }
6221 	  flags = GOVD_LINEAR | GOVD_EXPLICIT;
6222 	  if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6223 	      && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
6224 	    {
6225 	      notice_outer = false;
6226 	      flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
6227 	    }
6228 	  goto do_add;
6229 
6230 	case OMP_CLAUSE_MAP:
6231 	  decl = OMP_CLAUSE_DECL (c);
6232 	  if (error_operand_p (decl))
6233 	    {
6234 	      remove = true;
6235 	      break;
6236 	    }
6237 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6238 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6239 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6240 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6241 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6242 	    {
6243 	      remove = true;
6244 	      break;
6245 	    }
6246 	  if (!DECL_P (decl))
6247 	    {
6248 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6249 				 NULL, is_gimple_lvalue, fb_lvalue)
6250 		  == GS_ERROR)
6251 		{
6252 		  remove = true;
6253 		  break;
6254 		}
6255 	      break;
6256 	    }
6257 	  flags = GOVD_MAP | GOVD_EXPLICIT;
6258 	  goto do_add;
6259 
6260 	case OMP_CLAUSE_DEPEND:
6261 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
6262 	    {
6263 	      gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
6264 			     NULL, is_gimple_val, fb_rvalue);
6265 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
6266 	    }
6267 	  if (error_operand_p (OMP_CLAUSE_DECL (c)))
6268 	    {
6269 	      remove = true;
6270 	      break;
6271 	    }
6272 	  OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
6273 	  if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
6274 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6275 	    {
6276 	      remove = true;
6277 	      break;
6278 	    }
6279 	  break;
6280 
6281 	case OMP_CLAUSE_TO:
6282 	case OMP_CLAUSE_FROM:
6283 	case OMP_CLAUSE__CACHE_:
6284 	  decl = OMP_CLAUSE_DECL (c);
6285 	  if (error_operand_p (decl))
6286 	    {
6287 	      remove = true;
6288 	      break;
6289 	    }
6290 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6291 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6292 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6293 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6294 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6295 	    {
6296 	      remove = true;
6297 	      break;
6298 	    }
6299 	  if (!DECL_P (decl))
6300 	    {
6301 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6302 				 NULL, is_gimple_lvalue, fb_lvalue)
6303 		  == GS_ERROR)
6304 		{
6305 		  remove = true;
6306 		  break;
6307 		}
6308 	      break;
6309 	    }
6310 	  goto do_notice;
6311 
6312 	do_add:
6313 	  decl = OMP_CLAUSE_DECL (c);
6314 	  if (error_operand_p (decl))
6315 	    {
6316 	      remove = true;
6317 	      break;
6318 	    }
6319 	  omp_add_variable (ctx, decl, flags);
6320 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6321 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6322 	    {
6323 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6324 				GOVD_LOCAL | GOVD_SEEN);
6325 	      gimplify_omp_ctxp = ctx;
6326 	      push_gimplify_context ();
6327 
6328 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6329 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6330 
6331 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6332 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6333 	      pop_gimplify_context
6334 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6335 	      push_gimplify_context ();
6336 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6337 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6338 	      pop_gimplify_context
6339 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6340 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6341 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6342 
6343 	      gimplify_omp_ctxp = outer_ctx;
6344 	    }
6345 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6346 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6347 	    {
6348 	      gimplify_omp_ctxp = ctx;
6349 	      push_gimplify_context ();
6350 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6351 		{
6352 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6353 				      NULL, NULL);
6354 		  TREE_SIDE_EFFECTS (bind) = 1;
6355 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6356 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6357 		}
6358 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6359 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6360 	      pop_gimplify_context
6361 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6362 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6363 
6364 	      gimplify_omp_ctxp = outer_ctx;
6365 	    }
6366 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6367 		   && OMP_CLAUSE_LINEAR_STMT (c))
6368 	    {
6369 	      gimplify_omp_ctxp = ctx;
6370 	      push_gimplify_context ();
6371 	      if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6372 		{
6373 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6374 				      NULL, NULL);
6375 		  TREE_SIDE_EFFECTS (bind) = 1;
6376 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6377 		  OMP_CLAUSE_LINEAR_STMT (c) = bind;
6378 		}
6379 	      gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6380 				&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6381 	      pop_gimplify_context
6382 		(gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6383 	      OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6384 
6385 	      gimplify_omp_ctxp = outer_ctx;
6386 	    }
6387 	  if (notice_outer)
6388 	    goto do_notice;
6389 	  break;
6390 
6391 	case OMP_CLAUSE_COPYIN:
6392 	case OMP_CLAUSE_COPYPRIVATE:
6393 	  decl = OMP_CLAUSE_DECL (c);
6394 	  if (error_operand_p (decl))
6395 	    {
6396 	      remove = true;
6397 	      break;
6398 	    }
6399 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6400 	      && !remove
6401 	      && !omp_check_private (ctx, decl, true))
6402 	    {
6403 	      remove = true;
6404 	      if (is_global_var (decl))
6405 		{
6406 		  if (DECL_THREAD_LOCAL_P (decl))
6407 		    remove = false;
6408 		  else if (DECL_HAS_VALUE_EXPR_P (decl))
6409 		    {
6410 		      tree value = get_base_address (DECL_VALUE_EXPR (decl));
6411 
6412 		      if (value
6413 			  && DECL_P (value)
6414 			  && DECL_THREAD_LOCAL_P (value))
6415 			remove = false;
6416 		    }
6417 		}
6418 	      if (remove)
6419 		error_at (OMP_CLAUSE_LOCATION (c),
6420 			  "copyprivate variable %qE is not threadprivate"
6421 			  " or private in outer context", DECL_NAME (decl));
6422 	    }
6423 	do_notice:
6424 	  if (outer_ctx)
6425 	    omp_notice_variable (outer_ctx, decl, true);
6426 	  if (check_non_private
6427 	      && region_type == ORT_WORKSHARE
6428 	      && omp_check_private (ctx, decl, false))
6429 	    {
6430 	      error ("%s variable %qE is private in outer context",
6431 		     check_non_private, DECL_NAME (decl));
6432 	      remove = true;
6433 	    }
6434 	  break;
6435 
6436 	case OMP_CLAUSE_FINAL:
6437 	case OMP_CLAUSE_IF:
6438 	  OMP_CLAUSE_OPERAND (c, 0)
6439 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6440 	  /* Fall through.  */
6441 
6442 	case OMP_CLAUSE_SCHEDULE:
6443 	case OMP_CLAUSE_NUM_THREADS:
6444 	case OMP_CLAUSE_NUM_TEAMS:
6445 	case OMP_CLAUSE_THREAD_LIMIT:
6446 	case OMP_CLAUSE_DIST_SCHEDULE:
6447 	case OMP_CLAUSE_DEVICE:
6448 	case OMP_CLAUSE__CILK_FOR_COUNT_:
6449 	case OMP_CLAUSE_ASYNC:
6450 	case OMP_CLAUSE_WAIT:
6451 	case OMP_CLAUSE_NUM_GANGS:
6452 	case OMP_CLAUSE_NUM_WORKERS:
6453 	case OMP_CLAUSE_VECTOR_LENGTH:
6454 	case OMP_CLAUSE_GANG:
6455 	case OMP_CLAUSE_WORKER:
6456 	case OMP_CLAUSE_VECTOR:
6457 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6458 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6459 	    remove = true;
6460 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_GANG
6461 	      && gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
6462 				is_gimple_val, fb_rvalue) == GS_ERROR)
6463 	    remove = true;
6464 	  break;
6465 
6466 	case OMP_CLAUSE_DEVICE_RESIDENT:
6467 	case OMP_CLAUSE_USE_DEVICE:
6468 	case OMP_CLAUSE_INDEPENDENT:
6469 	  remove = true;
6470 	  break;
6471 
6472 	case OMP_CLAUSE_NOWAIT:
6473 	case OMP_CLAUSE_ORDERED:
6474 	case OMP_CLAUSE_UNTIED:
6475 	case OMP_CLAUSE_COLLAPSE:
6476 	case OMP_CLAUSE_AUTO:
6477 	case OMP_CLAUSE_SEQ:
6478 	case OMP_CLAUSE_MERGEABLE:
6479 	case OMP_CLAUSE_PROC_BIND:
6480 	case OMP_CLAUSE_SAFELEN:
6481 	  break;
6482 
6483 	case OMP_CLAUSE_ALIGNED:
6484 	  decl = OMP_CLAUSE_DECL (c);
6485 	  if (error_operand_p (decl))
6486 	    {
6487 	      remove = true;
6488 	      break;
6489 	    }
6490 	  if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6491 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6492 	    {
6493 	      remove = true;
6494 	      break;
6495 	    }
6496 	  if (!is_global_var (decl)
6497 	      && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6498 	    omp_add_variable (ctx, decl, GOVD_ALIGNED);
6499 	  break;
6500 
6501 	case OMP_CLAUSE_DEFAULT:
6502 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6503 	  break;
6504 
6505 	default:
6506 	  gcc_unreachable ();
6507 	}
6508 
6509       if (remove)
6510 	*list_p = OMP_CLAUSE_CHAIN (c);
6511       else
6512 	list_p = &OMP_CLAUSE_CHAIN (c);
6513     }
6514 
6515   gimplify_omp_ctxp = ctx;
6516 }
6517 
6518 struct gimplify_adjust_omp_clauses_data
6519 {
6520   tree *list_p;
6521   gimple_seq *pre_p;
6522 };
6523 
6524 /* For all variables that were not actually used within the context,
6525    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
6526 
6527 static int
6528 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6529 {
6530   tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6531   gimple_seq *pre_p
6532     = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
6533   tree decl = (tree) n->key;
6534   unsigned flags = n->value;
6535   enum omp_clause_code code;
6536   tree clause;
6537   bool private_debug;
6538 
6539   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6540     return 0;
6541   if ((flags & GOVD_SEEN) == 0)
6542     return 0;
6543   if (flags & GOVD_DEBUG_PRIVATE)
6544     {
6545       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6546       private_debug = true;
6547     }
6548   else if (flags & GOVD_MAP)
6549     private_debug = false;
6550   else
6551     private_debug
6552       = lang_hooks.decls.omp_private_debug_clause (decl,
6553 						   !!(flags & GOVD_SHARED));
6554   if (private_debug)
6555     code = OMP_CLAUSE_PRIVATE;
6556   else if (flags & GOVD_MAP)
6557     code = OMP_CLAUSE_MAP;
6558   else if (flags & GOVD_SHARED)
6559     {
6560       if (is_global_var (decl))
6561 	{
6562 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6563 	  while (ctx != NULL)
6564 	    {
6565 	      splay_tree_node on
6566 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6567 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6568 				      | GOVD_PRIVATE | GOVD_REDUCTION
6569 				      | GOVD_LINEAR | GOVD_MAP)) != 0)
6570 		break;
6571 	      ctx = ctx->outer_context;
6572 	    }
6573 	  if (ctx == NULL)
6574 	    return 0;
6575 	}
6576       code = OMP_CLAUSE_SHARED;
6577     }
6578   else if (flags & GOVD_PRIVATE)
6579     code = OMP_CLAUSE_PRIVATE;
6580   else if (flags & GOVD_FIRSTPRIVATE)
6581     code = OMP_CLAUSE_FIRSTPRIVATE;
6582   else if (flags & GOVD_LASTPRIVATE)
6583     code = OMP_CLAUSE_LASTPRIVATE;
6584   else if (flags & GOVD_ALIGNED)
6585     return 0;
6586   else
6587     gcc_unreachable ();
6588 
6589   clause = build_omp_clause (input_location, code);
6590   OMP_CLAUSE_DECL (clause) = decl;
6591   OMP_CLAUSE_CHAIN (clause) = *list_p;
6592   if (private_debug)
6593     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6594   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6595     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6596   else if (code == OMP_CLAUSE_MAP)
6597     {
6598       OMP_CLAUSE_SET_MAP_KIND (clause,
6599 			       flags & GOVD_MAP_TO_ONLY
6600 			       ? GOMP_MAP_TO
6601 			       : GOMP_MAP_TOFROM);
6602       if (DECL_SIZE (decl)
6603 	  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6604 	{
6605 	  tree decl2 = DECL_VALUE_EXPR (decl);
6606 	  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6607 	  decl2 = TREE_OPERAND (decl2, 0);
6608 	  gcc_assert (DECL_P (decl2));
6609 	  tree mem = build_simple_mem_ref (decl2);
6610 	  OMP_CLAUSE_DECL (clause) = mem;
6611 	  OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6612 	  if (gimplify_omp_ctxp->outer_context)
6613 	    {
6614 	      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6615 	      omp_notice_variable (ctx, decl2, true);
6616 	      omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6617 	    }
6618 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6619 				      OMP_CLAUSE_MAP);
6620 	  OMP_CLAUSE_DECL (nc) = decl;
6621 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
6622 	  OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
6623 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6624 	  OMP_CLAUSE_CHAIN (clause) = nc;
6625 	}
6626       else
6627 	OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
6628     }
6629   if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6630     {
6631       tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6632       OMP_CLAUSE_DECL (nc) = decl;
6633       OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6634       OMP_CLAUSE_CHAIN (nc) = *list_p;
6635       OMP_CLAUSE_CHAIN (clause) = nc;
6636       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6637       gimplify_omp_ctxp = ctx->outer_context;
6638       lang_hooks.decls.omp_finish_clause (nc, pre_p);
6639       gimplify_omp_ctxp = ctx;
6640     }
6641   *list_p = clause;
6642   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6643   gimplify_omp_ctxp = ctx->outer_context;
6644   lang_hooks.decls.omp_finish_clause (clause, pre_p);
6645   gimplify_omp_ctxp = ctx;
6646   return 0;
6647 }
6648 
6649 static void
6650 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
6651 {
6652   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6653   tree c, decl;
6654 
6655   while ((c = *list_p) != NULL)
6656     {
6657       splay_tree_node n;
6658       bool remove = false;
6659 
6660       switch (OMP_CLAUSE_CODE (c))
6661 	{
6662 	case OMP_CLAUSE_PRIVATE:
6663 	case OMP_CLAUSE_SHARED:
6664 	case OMP_CLAUSE_FIRSTPRIVATE:
6665 	case OMP_CLAUSE_LINEAR:
6666 	  decl = OMP_CLAUSE_DECL (c);
6667 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6668 	  remove = !(n->value & GOVD_SEEN);
6669 	  if (! remove)
6670 	    {
6671 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6672 	      if ((n->value & GOVD_DEBUG_PRIVATE)
6673 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6674 		{
6675 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6676 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
6677 				  == GOVD_PRIVATE));
6678 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6679 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6680 		}
6681 	    }
6682 	  break;
6683 
6684 	case OMP_CLAUSE_LASTPRIVATE:
6685 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6686 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
6687 	  decl = OMP_CLAUSE_DECL (c);
6688 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6689 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6690 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
6691 	  if (omp_no_lastprivate (ctx))
6692 	    {
6693 	      if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
6694 		remove = true;
6695 	      else
6696 		OMP_CLAUSE_CODE (c) = OMP_CLAUSE_PRIVATE;
6697 	    }
6698 	  break;
6699 
6700 	case OMP_CLAUSE_ALIGNED:
6701 	  decl = OMP_CLAUSE_DECL (c);
6702 	  if (!is_global_var (decl))
6703 	    {
6704 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6705 	      remove = n == NULL || !(n->value & GOVD_SEEN);
6706 	      if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6707 		{
6708 		  struct gimplify_omp_ctx *octx;
6709 		  if (n != NULL
6710 		      && (n->value & (GOVD_DATA_SHARE_CLASS
6711 				      & ~GOVD_FIRSTPRIVATE)))
6712 		    remove = true;
6713 		  else
6714 		    for (octx = ctx->outer_context; octx;
6715 			 octx = octx->outer_context)
6716 		      {
6717 			n = splay_tree_lookup (octx->variables,
6718 					       (splay_tree_key) decl);
6719 			if (n == NULL)
6720 			  continue;
6721 			if (n->value & GOVD_LOCAL)
6722 			  break;
6723 			/* We have to avoid assigning a shared variable
6724 			   to itself when trying to add
6725 			   __builtin_assume_aligned.  */
6726 			if (n->value & GOVD_SHARED)
6727 			  {
6728 			    remove = true;
6729 			    break;
6730 			  }
6731 		      }
6732 		}
6733 	    }
6734 	  else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6735 	    {
6736 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6737 	      if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6738 		remove = true;
6739 	    }
6740 	  break;
6741 
6742 	case OMP_CLAUSE_MAP:
6743 	  decl = OMP_CLAUSE_DECL (c);
6744 	  if (!DECL_P (decl))
6745 	    break;
6746 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6747 	  if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6748 	    remove = true;
6749 	  else if (DECL_SIZE (decl)
6750 		   && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6751 		   && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER)
6752 	    {
6753 	      /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
6754 		 for these, TREE_CODE (DECL_SIZE (decl)) will always be
6755 		 INTEGER_CST.  */
6756 	      gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
6757 
6758 	      tree decl2 = DECL_VALUE_EXPR (decl);
6759 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6760 	      decl2 = TREE_OPERAND (decl2, 0);
6761 	      gcc_assert (DECL_P (decl2));
6762 	      tree mem = build_simple_mem_ref (decl2);
6763 	      OMP_CLAUSE_DECL (c) = mem;
6764 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6765 	      if (ctx->outer_context)
6766 		{
6767 		  omp_notice_variable (ctx->outer_context, decl2, true);
6768 		  omp_notice_variable (ctx->outer_context,
6769 				       OMP_CLAUSE_SIZE (c), true);
6770 		}
6771 	      tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6772 					  OMP_CLAUSE_MAP);
6773 	      OMP_CLAUSE_DECL (nc) = decl;
6774 	      OMP_CLAUSE_SIZE (nc) = size_zero_node;
6775 	      OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
6776 	      OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6777 	      OMP_CLAUSE_CHAIN (c) = nc;
6778 	      c = nc;
6779 	    }
6780 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6781 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6782 	  break;
6783 
6784 	case OMP_CLAUSE_TO:
6785 	case OMP_CLAUSE_FROM:
6786 	case OMP_CLAUSE__CACHE_:
6787 	  decl = OMP_CLAUSE_DECL (c);
6788 	  if (!DECL_P (decl))
6789 	    break;
6790 	  if (DECL_SIZE (decl)
6791 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6792 	    {
6793 	      tree decl2 = DECL_VALUE_EXPR (decl);
6794 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6795 	      decl2 = TREE_OPERAND (decl2, 0);
6796 	      gcc_assert (DECL_P (decl2));
6797 	      tree mem = build_simple_mem_ref (decl2);
6798 	      OMP_CLAUSE_DECL (c) = mem;
6799 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6800 	      if (ctx->outer_context)
6801 		{
6802 		  omp_notice_variable (ctx->outer_context, decl2, true);
6803 		  omp_notice_variable (ctx->outer_context,
6804 				       OMP_CLAUSE_SIZE (c), true);
6805 		}
6806 	    }
6807 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6808 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6809 	  break;
6810 
6811 	case OMP_CLAUSE_REDUCTION:
6812 	case OMP_CLAUSE_COPYIN:
6813 	case OMP_CLAUSE_COPYPRIVATE:
6814 	case OMP_CLAUSE_IF:
6815 	case OMP_CLAUSE_NUM_THREADS:
6816 	case OMP_CLAUSE_NUM_TEAMS:
6817 	case OMP_CLAUSE_THREAD_LIMIT:
6818 	case OMP_CLAUSE_DIST_SCHEDULE:
6819 	case OMP_CLAUSE_DEVICE:
6820 	case OMP_CLAUSE_SCHEDULE:
6821 	case OMP_CLAUSE_NOWAIT:
6822 	case OMP_CLAUSE_ORDERED:
6823 	case OMP_CLAUSE_DEFAULT:
6824 	case OMP_CLAUSE_UNTIED:
6825 	case OMP_CLAUSE_COLLAPSE:
6826 	case OMP_CLAUSE_FINAL:
6827 	case OMP_CLAUSE_MERGEABLE:
6828 	case OMP_CLAUSE_PROC_BIND:
6829 	case OMP_CLAUSE_SAFELEN:
6830 	case OMP_CLAUSE_DEPEND:
6831 	case OMP_CLAUSE__CILK_FOR_COUNT_:
6832 	case OMP_CLAUSE_ASYNC:
6833 	case OMP_CLAUSE_WAIT:
6834 	case OMP_CLAUSE_DEVICE_RESIDENT:
6835 	case OMP_CLAUSE_USE_DEVICE:
6836 	case OMP_CLAUSE_INDEPENDENT:
6837 	case OMP_CLAUSE_NUM_GANGS:
6838 	case OMP_CLAUSE_NUM_WORKERS:
6839 	case OMP_CLAUSE_VECTOR_LENGTH:
6840 	case OMP_CLAUSE_GANG:
6841 	case OMP_CLAUSE_WORKER:
6842 	case OMP_CLAUSE_VECTOR:
6843 	case OMP_CLAUSE_AUTO:
6844 	case OMP_CLAUSE_SEQ:
6845 	  break;
6846 
6847 	default:
6848 	  gcc_unreachable ();
6849 	}
6850 
6851       if (remove)
6852 	*list_p = OMP_CLAUSE_CHAIN (c);
6853       else
6854 	list_p = &OMP_CLAUSE_CHAIN (c);
6855     }
6856 
6857   /* Add in any implicit data sharing.  */
6858   struct gimplify_adjust_omp_clauses_data data;
6859   data.list_p = list_p;
6860   data.pre_p = pre_p;
6861   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
6862 
6863   gimplify_omp_ctxp = ctx->outer_context;
6864   delete_omp_context (ctx);
6865 }
6866 
6867 /* Gimplify OACC_CACHE.  */
6868 
6869 static void
6870 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
6871 {
6872   tree expr = *expr_p;
6873 
6874   gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6875   gimplify_adjust_omp_clauses (pre_p, &OACC_CACHE_CLAUSES (expr));
6876 
6877   /* TODO: Do something sensible with this information.  */
6878 
6879   *expr_p = NULL_TREE;
6880 }
6881 
6882 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
6883    gimplification of the body, as well as scanning the body for used
6884    variables.  We need to do this scan now, because variable-sized
6885    decls will be decomposed during gimplification.  */
6886 
6887 static void
6888 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6889 {
6890   tree expr = *expr_p;
6891   gimple g;
6892   gimple_seq body = NULL;
6893 
6894   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6895 			     OMP_PARALLEL_COMBINED (expr)
6896 			     ? ORT_COMBINED_PARALLEL
6897 			     : ORT_PARALLEL);
6898 
6899   push_gimplify_context ();
6900 
6901   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6902   if (gimple_code (g) == GIMPLE_BIND)
6903     pop_gimplify_context (g);
6904   else
6905     pop_gimplify_context (NULL);
6906 
6907   gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
6908 
6909   g = gimple_build_omp_parallel (body,
6910 				 OMP_PARALLEL_CLAUSES (expr),
6911 				 NULL_TREE, NULL_TREE);
6912   if (OMP_PARALLEL_COMBINED (expr))
6913     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6914   gimplify_seq_add_stmt (pre_p, g);
6915   *expr_p = NULL_TREE;
6916 }
6917 
6918 /* Gimplify the contents of an OMP_TASK statement.  This involves
6919    gimplification of the body, as well as scanning the body for used
6920    variables.  We need to do this scan now, because variable-sized
6921    decls will be decomposed during gimplification.  */
6922 
6923 static void
6924 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6925 {
6926   tree expr = *expr_p;
6927   gimple g;
6928   gimple_seq body = NULL;
6929 
6930   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6931 			     find_omp_clause (OMP_TASK_CLAUSES (expr),
6932 					      OMP_CLAUSE_UNTIED)
6933 			     ? ORT_UNTIED_TASK : ORT_TASK);
6934 
6935   push_gimplify_context ();
6936 
6937   g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6938   if (gimple_code (g) == GIMPLE_BIND)
6939     pop_gimplify_context (g);
6940   else
6941     pop_gimplify_context (NULL);
6942 
6943   gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
6944 
6945   g = gimple_build_omp_task (body,
6946 			     OMP_TASK_CLAUSES (expr),
6947 			     NULL_TREE, NULL_TREE,
6948 			     NULL_TREE, NULL_TREE, NULL_TREE);
6949   gimplify_seq_add_stmt (pre_p, g);
6950   *expr_p = NULL_TREE;
6951 }
6952 
6953 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6954    with non-NULL OMP_FOR_INIT.  */
6955 
6956 static tree
6957 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6958 {
6959   *walk_subtrees = 0;
6960   switch (TREE_CODE (*tp))
6961     {
6962     case OMP_FOR:
6963       *walk_subtrees = 1;
6964       /* FALLTHRU */
6965     case OMP_SIMD:
6966       if (OMP_FOR_INIT (*tp) != NULL_TREE)
6967 	return *tp;
6968       break;
6969     case BIND_EXPR:
6970     case STATEMENT_LIST:
6971     case OMP_PARALLEL:
6972       *walk_subtrees = 1;
6973       break;
6974     default:
6975       break;
6976     }
6977   return NULL_TREE;
6978 }
6979 
6980 /* Gimplify the gross structure of an OMP_FOR statement.  */
6981 
6982 static enum gimplify_status
6983 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6984 {
6985   tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
6986   enum gimplify_status ret = GS_ALL_DONE;
6987   enum gimplify_status tret;
6988   gomp_for *gfor;
6989   gimple_seq for_body, for_pre_body;
6990   int i;
6991   bool simd;
6992   bitmap has_decl_expr = NULL;
6993 
6994   orig_for_stmt = for_stmt = *expr_p;
6995 
6996   switch (TREE_CODE (for_stmt))
6997     {
6998     case OMP_FOR:
6999     case CILK_FOR:
7000     case OMP_DISTRIBUTE:
7001     case OACC_LOOP:
7002       simd = false;
7003       break;
7004     case OMP_SIMD:
7005     case CILK_SIMD:
7006       simd = true;
7007       break;
7008     default:
7009       gcc_unreachable ();
7010     }
7011 
7012   /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
7013      clause for the IV.  */
7014   if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7015     {
7016       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
7017       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7018       decl = TREE_OPERAND (t, 0);
7019       for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
7020 	if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7021 	    && OMP_CLAUSE_DECL (c) == decl)
7022 	  {
7023 	    OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7024 	    break;
7025 	  }
7026     }
7027 
7028   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7029     {
7030       gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
7031       inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
7032 				  find_combined_omp_for, NULL, NULL);
7033       if (inner_for_stmt == NULL_TREE)
7034 	{
7035 	  gcc_assert (seen_error ());
7036 	  *expr_p = NULL_TREE;
7037 	  return GS_ERROR;
7038 	}
7039     }
7040 
7041   gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
7042 			     simd ? ORT_SIMD : ORT_WORKSHARE);
7043   if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
7044     gimplify_omp_ctxp->distribute = true;
7045 
7046   /* Handle OMP_FOR_INIT.  */
7047   for_pre_body = NULL;
7048   if (simd && OMP_FOR_PRE_BODY (for_stmt))
7049     {
7050       has_decl_expr = BITMAP_ALLOC (NULL);
7051       if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
7052 	  && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
7053 	     == VAR_DECL)
7054 	{
7055 	  t = OMP_FOR_PRE_BODY (for_stmt);
7056 	  bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7057 	}
7058       else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
7059 	{
7060 	  tree_stmt_iterator si;
7061 	  for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
7062 	       tsi_next (&si))
7063 	    {
7064 	      t = tsi_stmt (si);
7065 	      if (TREE_CODE (t) == DECL_EXPR
7066 		  && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
7067 		bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
7068 	    }
7069 	}
7070     }
7071   gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
7072   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
7073 
7074   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
7075     {
7076       for_stmt = inner_for_stmt;
7077       gimplify_omp_ctxp->combined_loop = true;
7078     }
7079 
7080   for_body = NULL;
7081   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7082 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
7083   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7084 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
7085   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7086     {
7087       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7088       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7089       decl = TREE_OPERAND (t, 0);
7090       gcc_assert (DECL_P (decl));
7091       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
7092 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
7093 
7094       /* Make sure the iteration variable is private.  */
7095       tree c = NULL_TREE;
7096       tree c2 = NULL_TREE;
7097       if (orig_for_stmt != for_stmt)
7098 	/* Do this only on innermost construct for combined ones.  */;
7099       else if (simd)
7100 	{
7101 	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
7102 						 (splay_tree_key)decl);
7103 	  omp_is_private (gimplify_omp_ctxp, decl,
7104 			  1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
7105 			       != 1));
7106 	  if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7107 	    omp_notice_variable (gimplify_omp_ctxp, decl, true);
7108 	  else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7109 	    {
7110 	      c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
7111 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
7112 	      unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
7113 	      if ((has_decl_expr
7114 		   && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
7115 		  || omp_no_lastprivate (gimplify_omp_ctxp))
7116 		{
7117 		  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
7118 		  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7119 		}
7120 	      struct gimplify_omp_ctx *outer
7121 		= gimplify_omp_ctxp->outer_context;
7122 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7123 		{
7124 		  if (outer->region_type == ORT_WORKSHARE
7125 		      && outer->combined_loop)
7126 		    {
7127 		      n = splay_tree_lookup (outer->variables,
7128 					     (splay_tree_key)decl);
7129 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
7130 			{
7131 			  OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
7132 			  flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7133 			}
7134 		    }
7135 		}
7136 
7137 	      OMP_CLAUSE_DECL (c) = decl;
7138 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
7139 	      OMP_FOR_CLAUSES (for_stmt) = c;
7140 	      omp_add_variable (gimplify_omp_ctxp, decl, flags);
7141 	      if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7142 		{
7143 		  if (outer->region_type == ORT_WORKSHARE
7144 		      && outer->combined_loop)
7145 		    {
7146 		      if (outer->outer_context
7147 			  && (outer->outer_context->region_type
7148 			      == ORT_COMBINED_PARALLEL))
7149 			outer = outer->outer_context;
7150 		      else if (omp_check_private (outer, decl, false))
7151 			outer = NULL;
7152 		    }
7153 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
7154 		    outer = NULL;
7155 		  if (outer)
7156 		    {
7157 		      n = splay_tree_lookup (outer->variables,
7158 					     (splay_tree_key)decl);
7159 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
7160 			{
7161 			  omp_add_variable (outer, decl,
7162 					    GOVD_LASTPRIVATE | GOVD_SEEN);
7163 			  if (outer->outer_context)
7164 			    omp_notice_variable (outer->outer_context, decl,
7165 						 true);
7166 			}
7167 		    }
7168 		}
7169 	    }
7170 	  else
7171 	    {
7172 	      bool lastprivate
7173 		= (!has_decl_expr
7174 		   || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
7175 		  && !omp_no_lastprivate (gimplify_omp_ctxp);
7176 	      struct gimplify_omp_ctx *outer
7177 		= gimplify_omp_ctxp->outer_context;
7178 	      if (outer && lastprivate)
7179 		{
7180 		  if (outer->region_type == ORT_WORKSHARE
7181 		      && outer->combined_loop)
7182 		    {
7183 		      n = splay_tree_lookup (outer->variables,
7184 					     (splay_tree_key)decl);
7185 		      if (n != NULL && (n->value & GOVD_LOCAL) != 0)
7186 			{
7187 			  lastprivate = false;
7188 			  outer = NULL;
7189 			}
7190 		      else if (outer->outer_context
7191 			       && (outer->outer_context->region_type
7192 				   == ORT_COMBINED_PARALLEL))
7193 			outer = outer->outer_context;
7194 		      else if (omp_check_private (outer, decl, false))
7195 			outer = NULL;
7196 		    }
7197 		  else if (outer->region_type != ORT_COMBINED_PARALLEL)
7198 		    outer = NULL;
7199 		  if (outer)
7200 		    {
7201 		      n = splay_tree_lookup (outer->variables,
7202 					     (splay_tree_key)decl);
7203 		      if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
7204 			{
7205 			  omp_add_variable (outer, decl,
7206 					    GOVD_LASTPRIVATE | GOVD_SEEN);
7207 			  if (outer->outer_context)
7208 			    omp_notice_variable (outer->outer_context, decl,
7209 						 true);
7210 			}
7211 		    }
7212 		}
7213 
7214 	      c = build_omp_clause (input_location,
7215 				    lastprivate ? OMP_CLAUSE_LASTPRIVATE
7216 						: OMP_CLAUSE_PRIVATE);
7217 	      OMP_CLAUSE_DECL (c) = decl;
7218 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
7219 	      OMP_FOR_CLAUSES (for_stmt) = c;
7220 	      omp_add_variable (gimplify_omp_ctxp, decl,
7221 				(lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
7222 				| GOVD_EXPLICIT | GOVD_SEEN);
7223 	      c = NULL_TREE;
7224 	    }
7225 	}
7226       else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
7227 	omp_notice_variable (gimplify_omp_ctxp, decl, true);
7228       else
7229 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
7230 
7231       /* If DECL is not a gimple register, create a temporary variable to act
7232 	 as an iteration counter.  This is valid, since DECL cannot be
7233 	 modified in the body of the loop.  Similarly for any iteration vars
7234 	 in simd with collapse > 1 where the iterator vars must be
7235 	 lastprivate.  */
7236       if (orig_for_stmt != for_stmt)
7237 	var = decl;
7238       else if (!is_gimple_reg (decl)
7239 	       || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
7240 	{
7241 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7242 	  TREE_OPERAND (t, 0) = var;
7243 
7244 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
7245 
7246 	  if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
7247 	    {
7248 	      c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
7249 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
7250 	      OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
7251 	      OMP_CLAUSE_DECL (c2) = var;
7252 	      OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
7253 	      OMP_FOR_CLAUSES (for_stmt) = c2;
7254 	      omp_add_variable (gimplify_omp_ctxp, var,
7255 				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
7256 	      if (c == NULL_TREE)
7257 		{
7258 		  c = c2;
7259 		  c2 = NULL_TREE;
7260 		}
7261 	    }
7262 	  else
7263 	    omp_add_variable (gimplify_omp_ctxp, var,
7264 			      GOVD_PRIVATE | GOVD_SEEN);
7265 	}
7266       else
7267 	var = decl;
7268 
7269       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7270 			    is_gimple_val, fb_rvalue);
7271       ret = MIN (ret, tret);
7272       if (ret == GS_ERROR)
7273 	return ret;
7274 
7275       /* Handle OMP_FOR_COND.  */
7276       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7277       gcc_assert (COMPARISON_CLASS_P (t));
7278       gcc_assert (TREE_OPERAND (t, 0) == decl);
7279 
7280       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7281 			    is_gimple_val, fb_rvalue);
7282       ret = MIN (ret, tret);
7283 
7284       /* Handle OMP_FOR_INCR.  */
7285       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7286       switch (TREE_CODE (t))
7287 	{
7288 	case PREINCREMENT_EXPR:
7289 	case POSTINCREMENT_EXPR:
7290 	  {
7291 	    tree decl = TREE_OPERAND (t, 0);
7292 	    /* c_omp_for_incr_canonicalize_ptr() should have been
7293 	       called to massage things appropriately.  */
7294 	    gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
7295 
7296 	    if (orig_for_stmt != for_stmt)
7297 	      break;
7298 	    t = build_int_cst (TREE_TYPE (decl), 1);
7299 	    if (c)
7300 	      OMP_CLAUSE_LINEAR_STEP (c) = t;
7301 	    t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
7302 	    t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
7303 	    TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7304 	    break;
7305 	  }
7306 
7307 	case PREDECREMENT_EXPR:
7308 	case POSTDECREMENT_EXPR:
7309 	  /* c_omp_for_incr_canonicalize_ptr() should have been
7310 	     called to massage things appropriately.  */
7311 	  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
7312 	  if (orig_for_stmt != for_stmt)
7313 	    break;
7314 	  t = build_int_cst (TREE_TYPE (decl), -1);
7315 	  if (c)
7316 	    OMP_CLAUSE_LINEAR_STEP (c) = t;
7317 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
7318 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
7319 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
7320 	  break;
7321 
7322 	case MODIFY_EXPR:
7323 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
7324 	  TREE_OPERAND (t, 0) = var;
7325 
7326 	  t = TREE_OPERAND (t, 1);
7327 	  switch (TREE_CODE (t))
7328 	    {
7329 	    case PLUS_EXPR:
7330 	      if (TREE_OPERAND (t, 1) == decl)
7331 		{
7332 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
7333 		  TREE_OPERAND (t, 0) = var;
7334 		  break;
7335 		}
7336 
7337 	      /* Fallthru.  */
7338 	    case MINUS_EXPR:
7339 	    case POINTER_PLUS_EXPR:
7340 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
7341 	      TREE_OPERAND (t, 0) = var;
7342 	      break;
7343 	    default:
7344 	      gcc_unreachable ();
7345 	    }
7346 
7347 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
7348 				is_gimple_val, fb_rvalue);
7349 	  ret = MIN (ret, tret);
7350 	  if (c)
7351 	    {
7352 	      tree step = TREE_OPERAND (t, 1);
7353 	      tree stept = TREE_TYPE (decl);
7354 	      if (POINTER_TYPE_P (stept))
7355 		stept = sizetype;
7356 	      step = fold_convert (stept, step);
7357 	      if (TREE_CODE (t) == MINUS_EXPR)
7358 		step = fold_build1 (NEGATE_EXPR, stept, step);
7359 	      OMP_CLAUSE_LINEAR_STEP (c) = step;
7360 	      if (step != TREE_OPERAND (t, 1))
7361 		{
7362 		  tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
7363 					&for_pre_body, NULL,
7364 					is_gimple_val, fb_rvalue);
7365 		  ret = MIN (ret, tret);
7366 		}
7367 	    }
7368 	  break;
7369 
7370 	default:
7371 	  gcc_unreachable ();
7372 	}
7373 
7374       if (c2)
7375 	{
7376 	  gcc_assert (c);
7377 	  OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
7378 	}
7379 
7380       if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
7381 	  && orig_for_stmt == for_stmt)
7382 	{
7383 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
7384 	    if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
7385 		  && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
7386 		 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
7387 		     && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
7388 		     && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
7389 		&& OMP_CLAUSE_DECL (c) == decl)
7390 	      {
7391 		t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7392 		gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
7393 		gcc_assert (TREE_OPERAND (t, 0) == var);
7394 		t = TREE_OPERAND (t, 1);
7395 		gcc_assert (TREE_CODE (t) == PLUS_EXPR
7396 			    || TREE_CODE (t) == MINUS_EXPR
7397 			    || TREE_CODE (t) == POINTER_PLUS_EXPR);
7398 		gcc_assert (TREE_OPERAND (t, 0) == var);
7399 		t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
7400 			    TREE_OPERAND (t, 1));
7401 		gimple_seq *seq;
7402 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
7403 		  seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
7404 		else
7405 		  seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
7406 		gimplify_assign (decl, t, seq);
7407 	    }
7408 	}
7409     }
7410 
7411   BITMAP_FREE (has_decl_expr);
7412 
7413   gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
7414 
7415   if (orig_for_stmt != for_stmt)
7416     for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7417       {
7418 	t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7419 	decl = TREE_OPERAND (t, 0);
7420 	var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
7421 	omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7422 	TREE_OPERAND (t, 0) = var;
7423 	t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7424 	TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7425 	TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7426       }
7427 
7428   gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
7429 
7430   int kind;
7431   switch (TREE_CODE (orig_for_stmt))
7432     {
7433     case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7434     case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7435     case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
7436     case CILK_FOR: kind = GF_OMP_FOR_KIND_CILKFOR; break;
7437     case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7438     case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
7439     default:
7440       gcc_unreachable ();
7441     }
7442   gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7443 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7444 			       for_pre_body);
7445   if (orig_for_stmt != for_stmt)
7446     gimple_omp_for_set_combined_p (gfor, true);
7447   if (gimplify_omp_ctxp
7448       && (gimplify_omp_ctxp->combined_loop
7449 	  || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7450 	      && gimplify_omp_ctxp->outer_context
7451 	      && gimplify_omp_ctxp->outer_context->combined_loop)))
7452     {
7453       gimple_omp_for_set_combined_into_p (gfor, true);
7454       if (gimplify_omp_ctxp->combined_loop)
7455 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7456       else
7457 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7458     }
7459 
7460   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7461     {
7462       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7463       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7464       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7465       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7466       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7467       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7468       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7469       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7470     }
7471 
7472   gimplify_seq_add_stmt (pre_p, gfor);
7473   if (ret != GS_ALL_DONE)
7474     return GS_ERROR;
7475   *expr_p = NULL_TREE;
7476   return GS_ALL_DONE;
7477 }
7478 
7479 /* Gimplify the gross structure of several OMP constructs.  */
7480 
7481 static void
7482 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7483 {
7484   tree expr = *expr_p;
7485   gimple stmt;
7486   gimple_seq body = NULL;
7487   enum omp_region_type ort;
7488 
7489   switch (TREE_CODE (expr))
7490     {
7491     case OMP_SECTIONS:
7492     case OMP_SINGLE:
7493       ort = ORT_WORKSHARE;
7494       break;
7495     case OACC_KERNELS:
7496     case OACC_PARALLEL:
7497     case OMP_TARGET:
7498       ort = ORT_TARGET;
7499       break;
7500     case OACC_DATA:
7501     case OMP_TARGET_DATA:
7502       ort = ORT_TARGET_DATA;
7503       break;
7504     case OMP_TEAMS:
7505       ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
7506       break;
7507     default:
7508       gcc_unreachable ();
7509     }
7510   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7511   if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7512     {
7513       push_gimplify_context ();
7514       gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7515       if (gimple_code (g) == GIMPLE_BIND)
7516 	pop_gimplify_context (g);
7517       else
7518 	pop_gimplify_context (NULL);
7519       if (ort == ORT_TARGET_DATA)
7520 	{
7521 	  enum built_in_function end_ix;
7522 	  switch (TREE_CODE (expr))
7523 	    {
7524 	    case OACC_DATA:
7525 	      end_ix = BUILT_IN_GOACC_DATA_END;
7526 	      break;
7527 	    case OMP_TARGET_DATA:
7528 	      end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
7529 	      break;
7530 	    default:
7531 	      gcc_unreachable ();
7532 	    }
7533 	  tree fn = builtin_decl_explicit (end_ix);
7534 	  g = gimple_build_call (fn, 0);
7535 	  gimple_seq cleanup = NULL;
7536 	  gimple_seq_add_stmt (&cleanup, g);
7537 	  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7538 	  body = NULL;
7539 	  gimple_seq_add_stmt (&body, g);
7540 	}
7541     }
7542   else
7543     gimplify_and_add (OMP_BODY (expr), &body);
7544   gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
7545 
7546   switch (TREE_CODE (expr))
7547     {
7548     case OACC_DATA:
7549       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
7550 				      OMP_CLAUSES (expr));
7551       break;
7552     case OACC_KERNELS:
7553       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
7554 				      OMP_CLAUSES (expr));
7555       break;
7556     case OACC_PARALLEL:
7557       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
7558 				      OMP_CLAUSES (expr));
7559       break;
7560     case OMP_SECTIONS:
7561       stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7562       break;
7563     case OMP_SINGLE:
7564       stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7565       break;
7566     case OMP_TARGET:
7567       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7568 				      OMP_CLAUSES (expr));
7569       break;
7570     case OMP_TARGET_DATA:
7571       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7572 				      OMP_CLAUSES (expr));
7573       break;
7574     case OMP_TEAMS:
7575       stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7576       break;
7577     default:
7578       gcc_unreachable ();
7579     }
7580 
7581   gimplify_seq_add_stmt (pre_p, stmt);
7582   *expr_p = NULL_TREE;
7583 }
7584 
7585 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
7586    target update constructs.  */
7587 
7588 static void
7589 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7590 {
7591   tree expr = *expr_p, clauses;
7592   int kind;
7593   gomp_target *stmt;
7594 
7595   switch (TREE_CODE (expr))
7596     {
7597     case OACC_ENTER_DATA:
7598       clauses = OACC_ENTER_DATA_CLAUSES (expr);
7599       kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
7600       break;
7601     case OACC_EXIT_DATA:
7602       clauses = OACC_EXIT_DATA_CLAUSES (expr);
7603       kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
7604       break;
7605     case OACC_UPDATE:
7606       clauses = OACC_UPDATE_CLAUSES (expr);
7607       kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
7608       break;
7609     case OMP_TARGET_UPDATE:
7610       clauses = OMP_TARGET_UPDATE_CLAUSES (expr);
7611       kind = GF_OMP_TARGET_KIND_UPDATE;
7612       break;
7613     default:
7614       gcc_unreachable ();
7615     }
7616   gimplify_scan_omp_clauses (&clauses, pre_p, ORT_WORKSHARE);
7617   gimplify_adjust_omp_clauses (pre_p, &clauses);
7618   stmt = gimple_build_omp_target (NULL, kind, clauses);
7619 
7620   gimplify_seq_add_stmt (pre_p, stmt);
7621   *expr_p = NULL_TREE;
7622 }
7623 
7624 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
7625    stabilized the lhs of the atomic operation as *ADDR.  Return true if
7626    EXPR is this stabilized form.  */
7627 
7628 static bool
7629 goa_lhs_expr_p (tree expr, tree addr)
7630 {
7631   /* Also include casts to other type variants.  The C front end is fond
7632      of adding these for e.g. volatile variables.  This is like
7633      STRIP_TYPE_NOPS but includes the main variant lookup.  */
7634   STRIP_USELESS_TYPE_CONVERSION (expr);
7635 
7636   if (TREE_CODE (expr) == INDIRECT_REF)
7637     {
7638       expr = TREE_OPERAND (expr, 0);
7639       while (expr != addr
7640 	     && (CONVERT_EXPR_P (expr)
7641 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7642 	     && TREE_CODE (expr) == TREE_CODE (addr)
7643 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7644 	{
7645 	  expr = TREE_OPERAND (expr, 0);
7646 	  addr = TREE_OPERAND (addr, 0);
7647 	}
7648       if (expr == addr)
7649 	return true;
7650       return (TREE_CODE (addr) == ADDR_EXPR
7651 	      && TREE_CODE (expr) == ADDR_EXPR
7652 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7653     }
7654   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7655     return true;
7656   return false;
7657 }
7658 
7659 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
7660    expression does not involve the lhs, evaluate it into a temporary.
7661    Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7662    or -1 if an error was encountered.  */
7663 
7664 static int
7665 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7666 		    tree lhs_var)
7667 {
7668   tree expr = *expr_p;
7669   int saw_lhs;
7670 
7671   if (goa_lhs_expr_p (expr, lhs_addr))
7672     {
7673       *expr_p = lhs_var;
7674       return 1;
7675     }
7676   if (is_gimple_val (expr))
7677     return 0;
7678 
7679   saw_lhs = 0;
7680   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7681     {
7682     case tcc_binary:
7683     case tcc_comparison:
7684       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7685 				     lhs_var);
7686     case tcc_unary:
7687       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7688 				     lhs_var);
7689       break;
7690     case tcc_expression:
7691       switch (TREE_CODE (expr))
7692 	{
7693 	case TRUTH_ANDIF_EXPR:
7694 	case TRUTH_ORIF_EXPR:
7695 	case TRUTH_AND_EXPR:
7696 	case TRUTH_OR_EXPR:
7697 	case TRUTH_XOR_EXPR:
7698 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7699 					 lhs_addr, lhs_var);
7700 	case TRUTH_NOT_EXPR:
7701 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7702 					 lhs_addr, lhs_var);
7703 	  break;
7704 	case COMPOUND_EXPR:
7705 	  /* Break out any preevaluations from cp_build_modify_expr.  */
7706 	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
7707 	       expr = TREE_OPERAND (expr, 1))
7708 	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7709 	  *expr_p = expr;
7710 	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7711 	default:
7712 	  break;
7713 	}
7714       break;
7715     default:
7716       break;
7717     }
7718 
7719   if (saw_lhs == 0)
7720     {
7721       enum gimplify_status gs;
7722       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7723       if (gs != GS_ALL_DONE)
7724 	saw_lhs = -1;
7725     }
7726 
7727   return saw_lhs;
7728 }
7729 
7730 /* Gimplify an OMP_ATOMIC statement.  */
7731 
7732 static enum gimplify_status
7733 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7734 {
7735   tree addr = TREE_OPERAND (*expr_p, 0);
7736   tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7737 	     ? NULL : TREE_OPERAND (*expr_p, 1);
7738   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7739   tree tmp_load;
7740   gomp_atomic_load *loadstmt;
7741   gomp_atomic_store *storestmt;
7742 
7743   tmp_load = create_tmp_reg (type);
7744   if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7745     return GS_ERROR;
7746 
7747   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7748       != GS_ALL_DONE)
7749     return GS_ERROR;
7750 
7751   loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7752   gimplify_seq_add_stmt (pre_p, loadstmt);
7753   if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7754       != GS_ALL_DONE)
7755     return GS_ERROR;
7756 
7757   if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7758     rhs = tmp_load;
7759   storestmt = gimple_build_omp_atomic_store (rhs);
7760   gimplify_seq_add_stmt (pre_p, storestmt);
7761   if (OMP_ATOMIC_SEQ_CST (*expr_p))
7762     {
7763       gimple_omp_atomic_set_seq_cst (loadstmt);
7764       gimple_omp_atomic_set_seq_cst (storestmt);
7765     }
7766   switch (TREE_CODE (*expr_p))
7767     {
7768     case OMP_ATOMIC_READ:
7769     case OMP_ATOMIC_CAPTURE_OLD:
7770       *expr_p = tmp_load;
7771       gimple_omp_atomic_set_need_value (loadstmt);
7772       break;
7773     case OMP_ATOMIC_CAPTURE_NEW:
7774       *expr_p = rhs;
7775       gimple_omp_atomic_set_need_value (storestmt);
7776       break;
7777     default:
7778       *expr_p = NULL;
7779       break;
7780     }
7781 
7782   return GS_ALL_DONE;
7783 }
7784 
7785 /* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
7786    body, and adding some EH bits.  */
7787 
7788 static enum gimplify_status
7789 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7790 {
7791   tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7792   gimple body_stmt;
7793   gtransaction *trans_stmt;
7794   gimple_seq body = NULL;
7795   int subcode = 0;
7796 
7797   /* Wrap the transaction body in a BIND_EXPR so we have a context
7798      where to put decls for OMP.  */
7799   if (TREE_CODE (tbody) != BIND_EXPR)
7800     {
7801       tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7802       TREE_SIDE_EFFECTS (bind) = 1;
7803       SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7804       TRANSACTION_EXPR_BODY (expr) = bind;
7805     }
7806 
7807   push_gimplify_context ();
7808   temp = voidify_wrapper_expr (*expr_p, NULL);
7809 
7810   body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7811   pop_gimplify_context (body_stmt);
7812 
7813   trans_stmt = gimple_build_transaction (body, NULL);
7814   if (TRANSACTION_EXPR_OUTER (expr))
7815     subcode = GTMA_IS_OUTER;
7816   else if (TRANSACTION_EXPR_RELAXED (expr))
7817     subcode = GTMA_IS_RELAXED;
7818   gimple_transaction_set_subcode (trans_stmt, subcode);
7819 
7820   gimplify_seq_add_stmt (pre_p, trans_stmt);
7821 
7822   if (temp)
7823     {
7824       *expr_p = temp;
7825       return GS_OK;
7826     }
7827 
7828   *expr_p = NULL_TREE;
7829   return GS_ALL_DONE;
7830 }
7831 
7832 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
7833    expression produces a value to be used as an operand inside a GIMPLE
7834    statement, the value will be stored back in *EXPR_P.  This value will
7835    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7836    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
7837    emitted in PRE_P and POST_P.
7838 
7839    Additionally, this process may overwrite parts of the input
7840    expression during gimplification.  Ideally, it should be
7841    possible to do non-destructive gimplification.
7842 
7843    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
7844       the expression needs to evaluate to a value to be used as
7845       an operand in a GIMPLE statement, this value will be stored in
7846       *EXPR_P on exit.  This happens when the caller specifies one
7847       of fb_lvalue or fb_rvalue fallback flags.
7848 
7849    PRE_P will contain the sequence of GIMPLE statements corresponding
7850        to the evaluation of EXPR and all the side-effects that must
7851        be executed before the main expression.  On exit, the last
7852        statement of PRE_P is the core statement being gimplified.  For
7853        instance, when gimplifying 'if (++a)' the last statement in
7854        PRE_P will be 'if (t.1)' where t.1 is the result of
7855        pre-incrementing 'a'.
7856 
7857    POST_P will contain the sequence of GIMPLE statements corresponding
7858        to the evaluation of all the side-effects that must be executed
7859        after the main expression.  If this is NULL, the post
7860        side-effects are stored at the end of PRE_P.
7861 
7862        The reason why the output is split in two is to handle post
7863        side-effects explicitly.  In some cases, an expression may have
7864        inner and outer post side-effects which need to be emitted in
7865        an order different from the one given by the recursive
7866        traversal.  For instance, for the expression (*p--)++ the post
7867        side-effects of '--' must actually occur *after* the post
7868        side-effects of '++'.  However, gimplification will first visit
7869        the inner expression, so if a separate POST sequence was not
7870        used, the resulting sequence would be:
7871 
7872        	    1	t.1 = *p
7873        	    2	p = p - 1
7874        	    3	t.2 = t.1 + 1
7875        	    4	*p = t.2
7876 
7877        However, the post-decrement operation in line #2 must not be
7878        evaluated until after the store to *p at line #4, so the
7879        correct sequence should be:
7880 
7881        	    1	t.1 = *p
7882        	    2	t.2 = t.1 + 1
7883        	    3	*p = t.2
7884        	    4	p = p - 1
7885 
7886        So, by specifying a separate post queue, it is possible
7887        to emit the post side-effects in the correct order.
7888        If POST_P is NULL, an internal queue will be used.  Before
7889        returning to the caller, the sequence POST_P is appended to
7890        the main output sequence PRE_P.
7891 
7892    GIMPLE_TEST_F points to a function that takes a tree T and
7893        returns nonzero if T is in the GIMPLE form requested by the
7894        caller.  The GIMPLE predicates are in gimple.c.
7895 
7896    FALLBACK tells the function what sort of a temporary we want if
7897        gimplification cannot produce an expression that complies with
7898        GIMPLE_TEST_F.
7899 
7900        fb_none means that no temporary should be generated
7901        fb_rvalue means that an rvalue is OK to generate
7902        fb_lvalue means that an lvalue is OK to generate
7903        fb_either means that either is OK, but an lvalue is preferable.
7904        fb_mayfail means that gimplification may fail (in which case
7905        GS_ERROR will be returned)
7906 
7907    The return value is either GS_ERROR or GS_ALL_DONE, since this
7908    function iterates until EXPR is completely gimplified or an error
7909    occurs.  */
7910 
7911 enum gimplify_status
7912 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7913 	       bool (*gimple_test_f) (tree), fallback_t fallback)
7914 {
7915   tree tmp;
7916   gimple_seq internal_pre = NULL;
7917   gimple_seq internal_post = NULL;
7918   tree save_expr;
7919   bool is_statement;
7920   location_t saved_location;
7921   enum gimplify_status ret;
7922   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7923 
7924   save_expr = *expr_p;
7925   if (save_expr == NULL_TREE)
7926     return GS_ALL_DONE;
7927 
7928   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
7929   is_statement = gimple_test_f == is_gimple_stmt;
7930   if (is_statement)
7931     gcc_assert (pre_p);
7932 
7933   /* Consistency checks.  */
7934   if (gimple_test_f == is_gimple_reg)
7935     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7936   else if (gimple_test_f == is_gimple_val
7937            || gimple_test_f == is_gimple_call_addr
7938            || gimple_test_f == is_gimple_condexpr
7939            || gimple_test_f == is_gimple_mem_rhs
7940            || gimple_test_f == is_gimple_mem_rhs_or_call
7941            || gimple_test_f == is_gimple_reg_rhs
7942            || gimple_test_f == is_gimple_reg_rhs_or_call
7943            || gimple_test_f == is_gimple_asm_val
7944 	   || gimple_test_f == is_gimple_mem_ref_addr)
7945     gcc_assert (fallback & fb_rvalue);
7946   else if (gimple_test_f == is_gimple_min_lval
7947 	   || gimple_test_f == is_gimple_lvalue)
7948     gcc_assert (fallback & fb_lvalue);
7949   else if (gimple_test_f == is_gimple_addressable)
7950     gcc_assert (fallback & fb_either);
7951   else if (gimple_test_f == is_gimple_stmt)
7952     gcc_assert (fallback == fb_none);
7953   else
7954     {
7955       /* We should have recognized the GIMPLE_TEST_F predicate to
7956 	 know what kind of fallback to use in case a temporary is
7957 	 needed to hold the value or address of *EXPR_P.  */
7958       gcc_unreachable ();
7959     }
7960 
7961   /* We used to check the predicate here and return immediately if it
7962      succeeds.  This is wrong; the design is for gimplification to be
7963      idempotent, and for the predicates to only test for valid forms, not
7964      whether they are fully simplified.  */
7965   if (pre_p == NULL)
7966     pre_p = &internal_pre;
7967 
7968   if (post_p == NULL)
7969     post_p = &internal_post;
7970 
7971   /* Remember the last statements added to PRE_P and POST_P.  Every
7972      new statement added by the gimplification helpers needs to be
7973      annotated with location information.  To centralize the
7974      responsibility, we remember the last statement that had been
7975      added to both queues before gimplifying *EXPR_P.  If
7976      gimplification produces new statements in PRE_P and POST_P, those
7977      statements will be annotated with the same location information
7978      as *EXPR_P.  */
7979   pre_last_gsi = gsi_last (*pre_p);
7980   post_last_gsi = gsi_last (*post_p);
7981 
7982   saved_location = input_location;
7983   if (save_expr != error_mark_node
7984       && EXPR_HAS_LOCATION (*expr_p))
7985     input_location = EXPR_LOCATION (*expr_p);
7986 
7987   /* Loop over the specific gimplifiers until the toplevel node
7988      remains the same.  */
7989   do
7990     {
7991       /* Strip away as many useless type conversions as possible
7992 	 at the toplevel.  */
7993       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7994 
7995       /* Remember the expr.  */
7996       save_expr = *expr_p;
7997 
7998       /* Die, die, die, my darling.  */
7999       if (save_expr == error_mark_node
8000 	  || (TREE_TYPE (save_expr)
8001 	      && TREE_TYPE (save_expr) == error_mark_node))
8002 	{
8003 	  ret = GS_ERROR;
8004 	  break;
8005 	}
8006 
8007       /* Do any language-specific gimplification.  */
8008       ret = ((enum gimplify_status)
8009 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
8010       if (ret == GS_OK)
8011 	{
8012 	  if (*expr_p == NULL_TREE)
8013 	    break;
8014 	  if (*expr_p != save_expr)
8015 	    continue;
8016 	}
8017       else if (ret != GS_UNHANDLED)
8018 	break;
8019 
8020       /* Make sure that all the cases set 'ret' appropriately.  */
8021       ret = GS_UNHANDLED;
8022       switch (TREE_CODE (*expr_p))
8023 	{
8024 	  /* First deal with the special cases.  */
8025 
8026 	case POSTINCREMENT_EXPR:
8027 	case POSTDECREMENT_EXPR:
8028 	case PREINCREMENT_EXPR:
8029 	case PREDECREMENT_EXPR:
8030 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
8031 					fallback != fb_none,
8032 					TREE_TYPE (*expr_p));
8033 	  break;
8034 
8035 	case VIEW_CONVERT_EXPR:
8036 	  if (is_gimple_reg_type (TREE_TYPE (*expr_p))
8037 	      && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
8038 	    {
8039 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8040 				   post_p, is_gimple_val, fb_rvalue);
8041 	      recalculate_side_effects (*expr_p);
8042 	      break;
8043 	    }
8044 	  /* Fallthru.  */
8045 
8046 	case ARRAY_REF:
8047 	case ARRAY_RANGE_REF:
8048 	case REALPART_EXPR:
8049 	case IMAGPART_EXPR:
8050 	case COMPONENT_REF:
8051 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
8052 					fallback ? fallback : fb_rvalue);
8053 	  break;
8054 
8055 	case COND_EXPR:
8056 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
8057 
8058 	  /* C99 code may assign to an array in a structure value of a
8059 	     conditional expression, and this has undefined behavior
8060 	     only on execution, so create a temporary if an lvalue is
8061 	     required.  */
8062 	  if (fallback == fb_lvalue)
8063 	    {
8064 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8065 	      mark_addressable (*expr_p);
8066 	      ret = GS_OK;
8067 	    }
8068 	  break;
8069 
8070 	case CALL_EXPR:
8071 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
8072 
8073 	  /* C99 code may assign to an array in a structure returned
8074 	     from a function, and this has undefined behavior only on
8075 	     execution, so create a temporary if an lvalue is
8076 	     required.  */
8077 	  if (fallback == fb_lvalue)
8078 	    {
8079 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8080 	      mark_addressable (*expr_p);
8081 	      ret = GS_OK;
8082 	    }
8083 	  break;
8084 
8085 	case TREE_LIST:
8086 	  gcc_unreachable ();
8087 
8088 	case COMPOUND_EXPR:
8089 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
8090 	  break;
8091 
8092 	case COMPOUND_LITERAL_EXPR:
8093 	  ret = gimplify_compound_literal_expr (expr_p, pre_p,
8094 						gimple_test_f, fallback);
8095 	  break;
8096 
8097 	case MODIFY_EXPR:
8098 	case INIT_EXPR:
8099 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
8100 				      fallback != fb_none);
8101 	  break;
8102 
8103 	case TRUTH_ANDIF_EXPR:
8104 	case TRUTH_ORIF_EXPR:
8105 	  {
8106 	    /* Preserve the original type of the expression and the
8107 	       source location of the outer expression.  */
8108 	    tree org_type = TREE_TYPE (*expr_p);
8109 	    *expr_p = gimple_boolify (*expr_p);
8110 	    *expr_p = build3_loc (input_location, COND_EXPR,
8111 				  org_type, *expr_p,
8112 				  fold_convert_loc
8113 				    (input_location,
8114 				     org_type, boolean_true_node),
8115 				  fold_convert_loc
8116 				    (input_location,
8117 				     org_type, boolean_false_node));
8118 	    ret = GS_OK;
8119 	    break;
8120 	  }
8121 
8122 	case TRUTH_NOT_EXPR:
8123 	  {
8124 	    tree type = TREE_TYPE (*expr_p);
8125 	    /* The parsers are careful to generate TRUTH_NOT_EXPR
8126 	       only with operands that are always zero or one.
8127 	       We do not fold here but handle the only interesting case
8128 	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
8129 	    *expr_p = gimple_boolify (*expr_p);
8130 	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
8131 	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
8132 				    TREE_TYPE (*expr_p),
8133 				    TREE_OPERAND (*expr_p, 0));
8134 	    else
8135 	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
8136 				    TREE_TYPE (*expr_p),
8137 				    TREE_OPERAND (*expr_p, 0),
8138 				    build_int_cst (TREE_TYPE (*expr_p), 1));
8139 	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
8140 	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
8141 	    ret = GS_OK;
8142 	    break;
8143 	  }
8144 
8145 	case ADDR_EXPR:
8146 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
8147 	  break;
8148 
8149 	case ANNOTATE_EXPR:
8150 	  {
8151 	    tree cond = TREE_OPERAND (*expr_p, 0);
8152 	    tree kind = TREE_OPERAND (*expr_p, 1);
8153 	    tree type = TREE_TYPE (cond);
8154 	    if (!INTEGRAL_TYPE_P (type))
8155 	      {
8156 		*expr_p = cond;
8157 		ret = GS_OK;
8158 		break;
8159 	      }
8160 	    tree tmp = create_tmp_var (type);
8161 	    gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
8162 	    gcall *call
8163 	      = gimple_build_call_internal (IFN_ANNOTATE, 2, cond, kind);
8164 	    gimple_call_set_lhs (call, tmp);
8165 	    gimplify_seq_add_stmt (pre_p, call);
8166 	    *expr_p = tmp;
8167 	    ret = GS_ALL_DONE;
8168 	    break;
8169 	  }
8170 
8171 	case VA_ARG_EXPR:
8172 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
8173 	  break;
8174 
8175 	CASE_CONVERT:
8176 	  if (IS_EMPTY_STMT (*expr_p))
8177 	    {
8178 	      ret = GS_ALL_DONE;
8179 	      break;
8180 	    }
8181 
8182 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
8183 	      || fallback == fb_none)
8184 	    {
8185 	      /* Just strip a conversion to void (or in void context) and
8186 		 try again.  */
8187 	      *expr_p = TREE_OPERAND (*expr_p, 0);
8188 	      ret = GS_OK;
8189 	      break;
8190 	    }
8191 
8192 	  ret = gimplify_conversion (expr_p);
8193 	  if (ret == GS_ERROR)
8194 	    break;
8195 	  if (*expr_p != save_expr)
8196 	    break;
8197 	  /* FALLTHRU */
8198 
8199 	case FIX_TRUNC_EXPR:
8200 	  /* unary_expr: ... | '(' cast ')' val | ...  */
8201 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8202 			       is_gimple_val, fb_rvalue);
8203 	  recalculate_side_effects (*expr_p);
8204 	  break;
8205 
8206 	case INDIRECT_REF:
8207 	  {
8208 	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
8209 	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
8210 	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
8211 
8212 	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
8213 	    if (*expr_p != save_expr)
8214 	      {
8215 		ret = GS_OK;
8216 		break;
8217 	      }
8218 
8219 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8220 				 is_gimple_reg, fb_rvalue);
8221 	    if (ret == GS_ERROR)
8222 	      break;
8223 
8224 	    recalculate_side_effects (*expr_p);
8225 	    *expr_p = fold_build2_loc (input_location, MEM_REF,
8226 				       TREE_TYPE (*expr_p),
8227 				       TREE_OPERAND (*expr_p, 0),
8228 				       build_int_cst (saved_ptr_type, 0));
8229 	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
8230 	    TREE_THIS_NOTRAP (*expr_p) = notrap;
8231 	    ret = GS_OK;
8232 	    break;
8233 	  }
8234 
8235 	/* We arrive here through the various re-gimplifcation paths.  */
8236 	case MEM_REF:
8237 	  /* First try re-folding the whole thing.  */
8238 	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
8239 			     TREE_OPERAND (*expr_p, 0),
8240 			     TREE_OPERAND (*expr_p, 1));
8241 	  if (tmp)
8242 	    {
8243 	      *expr_p = tmp;
8244 	      recalculate_side_effects (*expr_p);
8245 	      ret = GS_OK;
8246 	      break;
8247 	    }
8248 	  /* Avoid re-gimplifying the address operand if it is already
8249 	     in suitable form.  Re-gimplifying would mark the address
8250 	     operand addressable.  Always gimplify when not in SSA form
8251 	     as we still may have to gimplify decls with value-exprs.  */
8252 	  if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
8253 	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
8254 	    {
8255 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8256 				   is_gimple_mem_ref_addr, fb_rvalue);
8257 	      if (ret == GS_ERROR)
8258 		break;
8259 	    }
8260 	  recalculate_side_effects (*expr_p);
8261 	  ret = GS_ALL_DONE;
8262 	  break;
8263 
8264 	/* Constants need not be gimplified.  */
8265 	case INTEGER_CST:
8266 	case REAL_CST:
8267 	case FIXED_CST:
8268 	case STRING_CST:
8269 	case COMPLEX_CST:
8270 	case VECTOR_CST:
8271 	  /* Drop the overflow flag on constants, we do not want
8272 	     that in the GIMPLE IL.  */
8273 	  if (TREE_OVERFLOW_P (*expr_p))
8274 	    *expr_p = drop_tree_overflow (*expr_p);
8275 	  ret = GS_ALL_DONE;
8276 	  break;
8277 
8278 	case CONST_DECL:
8279 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
8280 	     CONST_DECL node.  Otherwise the decl is replaceable by its
8281 	     value.  */
8282 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
8283 	  if (fallback & fb_lvalue)
8284 	    ret = GS_ALL_DONE;
8285 	  else
8286 	    {
8287 	      *expr_p = DECL_INITIAL (*expr_p);
8288 	      ret = GS_OK;
8289 	    }
8290 	  break;
8291 
8292 	case DECL_EXPR:
8293 	  ret = gimplify_decl_expr (expr_p, pre_p);
8294 	  break;
8295 
8296 	case BIND_EXPR:
8297 	  ret = gimplify_bind_expr (expr_p, pre_p);
8298 	  break;
8299 
8300 	case LOOP_EXPR:
8301 	  ret = gimplify_loop_expr (expr_p, pre_p);
8302 	  break;
8303 
8304 	case SWITCH_EXPR:
8305 	  ret = gimplify_switch_expr (expr_p, pre_p);
8306 	  break;
8307 
8308 	case EXIT_EXPR:
8309 	  ret = gimplify_exit_expr (expr_p);
8310 	  break;
8311 
8312 	case GOTO_EXPR:
8313 	  /* If the target is not LABEL, then it is a computed jump
8314 	     and the target needs to be gimplified.  */
8315 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
8316 	    {
8317 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
8318 				   NULL, is_gimple_val, fb_rvalue);
8319 	      if (ret == GS_ERROR)
8320 		break;
8321 	    }
8322 	  gimplify_seq_add_stmt (pre_p,
8323 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
8324 	  ret = GS_ALL_DONE;
8325 	  break;
8326 
8327 	case PREDICT_EXPR:
8328 	  gimplify_seq_add_stmt (pre_p,
8329 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
8330 					      PREDICT_EXPR_OUTCOME (*expr_p)));
8331 	  ret = GS_ALL_DONE;
8332 	  break;
8333 
8334 	case LABEL_EXPR:
8335 	  ret = GS_ALL_DONE;
8336 	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
8337 		      == current_function_decl);
8338 	  gimplify_seq_add_stmt (pre_p,
8339 			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
8340 	  break;
8341 
8342 	case CASE_LABEL_EXPR:
8343 	  ret = gimplify_case_label_expr (expr_p, pre_p);
8344 	  break;
8345 
8346 	case RETURN_EXPR:
8347 	  ret = gimplify_return_expr (*expr_p, pre_p);
8348 	  break;
8349 
8350 	case CONSTRUCTOR:
8351 	  /* Don't reduce this in place; let gimplify_init_constructor work its
8352 	     magic.  Buf if we're just elaborating this for side effects, just
8353 	     gimplify any element that has side-effects.  */
8354 	  if (fallback == fb_none)
8355 	    {
8356 	      unsigned HOST_WIDE_INT ix;
8357 	      tree val;
8358 	      tree temp = NULL_TREE;
8359 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
8360 		if (TREE_SIDE_EFFECTS (val))
8361 		  append_to_statement_list (val, &temp);
8362 
8363 	      *expr_p = temp;
8364 	      ret = temp ? GS_OK : GS_ALL_DONE;
8365 	    }
8366 	  /* C99 code may assign to an array in a constructed
8367 	     structure or union, and this has undefined behavior only
8368 	     on execution, so create a temporary if an lvalue is
8369 	     required.  */
8370 	  else if (fallback == fb_lvalue)
8371 	    {
8372 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
8373 	      mark_addressable (*expr_p);
8374 	      ret = GS_OK;
8375 	    }
8376 	  else
8377 	    ret = GS_ALL_DONE;
8378 	  break;
8379 
8380 	  /* The following are special cases that are not handled by the
8381 	     original GIMPLE grammar.  */
8382 
8383 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
8384 	     eliminated.  */
8385 	case SAVE_EXPR:
8386 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
8387 	  break;
8388 
8389 	case BIT_FIELD_REF:
8390 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8391 			       post_p, is_gimple_lvalue, fb_either);
8392 	  recalculate_side_effects (*expr_p);
8393 	  break;
8394 
8395 	case TARGET_MEM_REF:
8396 	  {
8397 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
8398 
8399 	    if (TMR_BASE (*expr_p))
8400 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
8401 				  post_p, is_gimple_mem_ref_addr, fb_either);
8402 	    if (TMR_INDEX (*expr_p))
8403 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
8404 				  post_p, is_gimple_val, fb_rvalue);
8405 	    if (TMR_INDEX2 (*expr_p))
8406 	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
8407 				  post_p, is_gimple_val, fb_rvalue);
8408 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
8409 	    ret = MIN (r0, r1);
8410 	  }
8411 	  break;
8412 
8413 	case NON_LVALUE_EXPR:
8414 	  /* This should have been stripped above.  */
8415 	  gcc_unreachable ();
8416 
8417 	case ASM_EXPR:
8418 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
8419 	  break;
8420 
8421 	case TRY_FINALLY_EXPR:
8422 	case TRY_CATCH_EXPR:
8423 	  {
8424 	    gimple_seq eval, cleanup;
8425 	    gtry *try_;
8426 
8427 	    /* Calls to destructors are generated automatically in FINALLY/CATCH
8428 	       block. They should have location as UNKNOWN_LOCATION. However,
8429 	       gimplify_call_expr will reset these call stmts to input_location
8430 	       if it finds stmt's location is unknown. To prevent resetting for
8431 	       destructors, we set the input_location to unknown.
8432 	       Note that this only affects the destructor calls in FINALLY/CATCH
8433 	       block, and will automatically reset to its original value by the
8434 	       end of gimplify_expr.  */
8435 	    input_location = UNKNOWN_LOCATION;
8436 	    eval = cleanup = NULL;
8437 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
8438 	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
8439 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
8440 	    if (gimple_seq_empty_p (cleanup))
8441 	      {
8442 		gimple_seq_add_seq (pre_p, eval);
8443 		ret = GS_ALL_DONE;
8444 		break;
8445 	      }
8446 	    try_ = gimple_build_try (eval, cleanup,
8447 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
8448 				     ? GIMPLE_TRY_FINALLY
8449 				     : GIMPLE_TRY_CATCH);
8450 	    if (EXPR_HAS_LOCATION (save_expr))
8451 	      gimple_set_location (try_, EXPR_LOCATION (save_expr));
8452 	    else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
8453 	      gimple_set_location (try_, saved_location);
8454 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
8455 	      gimple_try_set_catch_is_cleanup (try_,
8456 					       TRY_CATCH_IS_CLEANUP (*expr_p));
8457 	    gimplify_seq_add_stmt (pre_p, try_);
8458 	    ret = GS_ALL_DONE;
8459 	    break;
8460 	  }
8461 
8462 	case CLEANUP_POINT_EXPR:
8463 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
8464 	  break;
8465 
8466 	case TARGET_EXPR:
8467 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
8468 	  break;
8469 
8470 	case CATCH_EXPR:
8471 	  {
8472 	    gimple c;
8473 	    gimple_seq handler = NULL;
8474 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8475 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8476 	    gimplify_seq_add_stmt (pre_p, c);
8477 	    ret = GS_ALL_DONE;
8478 	    break;
8479 	  }
8480 
8481 	case EH_FILTER_EXPR:
8482 	  {
8483 	    gimple ehf;
8484 	    gimple_seq failure = NULL;
8485 
8486 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8487 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8488 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8489 	    gimplify_seq_add_stmt (pre_p, ehf);
8490 	    ret = GS_ALL_DONE;
8491 	    break;
8492 	  }
8493 
8494 	case OBJ_TYPE_REF:
8495 	  {
8496 	    enum gimplify_status r0, r1;
8497 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8498 				post_p, is_gimple_val, fb_rvalue);
8499 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8500 				post_p, is_gimple_val, fb_rvalue);
8501 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
8502 	    ret = MIN (r0, r1);
8503 	  }
8504 	  break;
8505 
8506 	case LABEL_DECL:
8507 	  /* We get here when taking the address of a label.  We mark
8508 	     the label as "forced"; meaning it can never be removed and
8509 	     it is a potential target for any computed goto.  */
8510 	  FORCED_LABEL (*expr_p) = 1;
8511 	  ret = GS_ALL_DONE;
8512 	  break;
8513 
8514 	case STATEMENT_LIST:
8515 	  ret = gimplify_statement_list (expr_p, pre_p);
8516 	  break;
8517 
8518 	case WITH_SIZE_EXPR:
8519 	  {
8520 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8521 			   post_p == &internal_post ? NULL : post_p,
8522 			   gimple_test_f, fallback);
8523 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8524 			   is_gimple_val, fb_rvalue);
8525 	    ret = GS_ALL_DONE;
8526 	  }
8527 	  break;
8528 
8529 	case VAR_DECL:
8530 	case PARM_DECL:
8531 	  ret = gimplify_var_or_parm_decl (expr_p);
8532 	  break;
8533 
8534 	case RESULT_DECL:
8535 	  /* When within an OMP context, notice uses of variables.  */
8536 	  if (gimplify_omp_ctxp)
8537 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8538 	  ret = GS_ALL_DONE;
8539 	  break;
8540 
8541 	case SSA_NAME:
8542 	  /* Allow callbacks into the gimplifier during optimization.  */
8543 	  ret = GS_ALL_DONE;
8544 	  break;
8545 
8546 	case OMP_PARALLEL:
8547 	  gimplify_omp_parallel (expr_p, pre_p);
8548 	  ret = GS_ALL_DONE;
8549 	  break;
8550 
8551 	case OMP_TASK:
8552 	  gimplify_omp_task (expr_p, pre_p);
8553 	  ret = GS_ALL_DONE;
8554 	  break;
8555 
8556 	case OMP_FOR:
8557 	case OMP_SIMD:
8558 	case CILK_SIMD:
8559 	case CILK_FOR:
8560 	case OMP_DISTRIBUTE:
8561 	case OACC_LOOP:
8562 	  ret = gimplify_omp_for (expr_p, pre_p);
8563 	  break;
8564 
8565 	case OACC_CACHE:
8566 	  gimplify_oacc_cache (expr_p, pre_p);
8567 	  ret = GS_ALL_DONE;
8568 	  break;
8569 
8570 	case OACC_HOST_DATA:
8571 	case OACC_DECLARE:
8572 	  sorry ("directive not yet implemented");
8573 	  ret = GS_ALL_DONE;
8574 	  break;
8575 
8576 	case OACC_KERNELS:
8577 	  if (OACC_KERNELS_COMBINED (*expr_p))
8578 	    sorry ("directive not yet implemented");
8579 	  else
8580 	    gimplify_omp_workshare (expr_p, pre_p);
8581 	  ret = GS_ALL_DONE;
8582 	  break;
8583 
8584 	case OACC_PARALLEL:
8585 	  if (OACC_PARALLEL_COMBINED (*expr_p))
8586 	    sorry ("directive not yet implemented");
8587 	  else
8588 	    gimplify_omp_workshare (expr_p, pre_p);
8589 	  ret = GS_ALL_DONE;
8590 	  break;
8591 
8592 	case OACC_DATA:
8593 	case OMP_SECTIONS:
8594 	case OMP_SINGLE:
8595 	case OMP_TARGET:
8596 	case OMP_TARGET_DATA:
8597 	case OMP_TEAMS:
8598 	  gimplify_omp_workshare (expr_p, pre_p);
8599 	  ret = GS_ALL_DONE;
8600 	  break;
8601 
8602 	case OACC_ENTER_DATA:
8603 	case OACC_EXIT_DATA:
8604 	case OACC_UPDATE:
8605 	case OMP_TARGET_UPDATE:
8606 	  gimplify_omp_target_update (expr_p, pre_p);
8607 	  ret = GS_ALL_DONE;
8608 	  break;
8609 
8610 	case OMP_SECTION:
8611 	case OMP_MASTER:
8612 	case OMP_TASKGROUP:
8613 	case OMP_ORDERED:
8614 	case OMP_CRITICAL:
8615 	  {
8616 	    gimple_seq body = NULL;
8617 	    gimple g;
8618 
8619 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
8620 	    switch (TREE_CODE (*expr_p))
8621 	      {
8622 	      case OMP_SECTION:
8623 	        g = gimple_build_omp_section (body);
8624 	        break;
8625 	      case OMP_MASTER:
8626 	        g = gimple_build_omp_master (body);
8627 		break;
8628 	      case OMP_TASKGROUP:
8629 		{
8630 		  gimple_seq cleanup = NULL;
8631 		  tree fn
8632 		    = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8633 		  g = gimple_build_call (fn, 0);
8634 		  gimple_seq_add_stmt (&cleanup, g);
8635 		  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8636 		  body = NULL;
8637 		  gimple_seq_add_stmt (&body, g);
8638 		  g = gimple_build_omp_taskgroup (body);
8639 		}
8640 		break;
8641 	      case OMP_ORDERED:
8642 		g = gimple_build_omp_ordered (body);
8643 		break;
8644 	      case OMP_CRITICAL:
8645 		g = gimple_build_omp_critical (body,
8646 		    			       OMP_CRITICAL_NAME (*expr_p));
8647 		break;
8648 	      default:
8649 		gcc_unreachable ();
8650 	      }
8651 	    gimplify_seq_add_stmt (pre_p, g);
8652 	    ret = GS_ALL_DONE;
8653 	    break;
8654 	  }
8655 
8656 	case OMP_ATOMIC:
8657 	case OMP_ATOMIC_READ:
8658 	case OMP_ATOMIC_CAPTURE_OLD:
8659 	case OMP_ATOMIC_CAPTURE_NEW:
8660 	  ret = gimplify_omp_atomic (expr_p, pre_p);
8661 	  break;
8662 
8663 	case TRANSACTION_EXPR:
8664 	  ret = gimplify_transaction (expr_p, pre_p);
8665 	  break;
8666 
8667 	case TRUTH_AND_EXPR:
8668 	case TRUTH_OR_EXPR:
8669 	case TRUTH_XOR_EXPR:
8670 	  {
8671 	    tree orig_type = TREE_TYPE (*expr_p);
8672 	    tree new_type, xop0, xop1;
8673 	    *expr_p = gimple_boolify (*expr_p);
8674 	    new_type = TREE_TYPE (*expr_p);
8675 	    if (!useless_type_conversion_p (orig_type, new_type))
8676 	      {
8677 		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8678 		ret = GS_OK;
8679 		break;
8680 	      }
8681 
8682 	  /* Boolified binary truth expressions are semantically equivalent
8683 	     to bitwise binary expressions.  Canonicalize them to the
8684 	     bitwise variant.  */
8685 	    switch (TREE_CODE (*expr_p))
8686 	      {
8687 	      case TRUTH_AND_EXPR:
8688 		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8689 		break;
8690 	      case TRUTH_OR_EXPR:
8691 		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8692 		break;
8693 	      case TRUTH_XOR_EXPR:
8694 		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8695 		break;
8696 	      default:
8697 		break;
8698 	      }
8699 	    /* Now make sure that operands have compatible type to
8700 	       expression's new_type.  */
8701 	    xop0 = TREE_OPERAND (*expr_p, 0);
8702 	    xop1 = TREE_OPERAND (*expr_p, 1);
8703 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8704 	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8705 							    new_type,
8706 	      						    xop0);
8707 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8708 	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8709 							    new_type,
8710 	      						    xop1);
8711 	    /* Continue classified as tcc_binary.  */
8712 	    goto expr_2;
8713 	  }
8714 
8715 	case FMA_EXPR:
8716 	case VEC_COND_EXPR:
8717 	case VEC_PERM_EXPR:
8718 	  /* Classified as tcc_expression.  */
8719 	  goto expr_3;
8720 
8721 	case POINTER_PLUS_EXPR:
8722 	  {
8723 	    enum gimplify_status r0, r1;
8724 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8725 				post_p, is_gimple_val, fb_rvalue);
8726 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8727 				post_p, is_gimple_val, fb_rvalue);
8728 	    recalculate_side_effects (*expr_p);
8729 	    ret = MIN (r0, r1);
8730 	    break;
8731 	  }
8732 
8733 	case CILK_SYNC_STMT:
8734 	  {
8735 	    if (!fn_contains_cilk_spawn_p (cfun))
8736 	      {
8737 		error_at (EXPR_LOCATION (*expr_p),
8738 			  "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8739 		ret = GS_ERROR;
8740 	      }
8741 	    else
8742 	      {
8743 		gimplify_cilk_sync (expr_p, pre_p);
8744 		ret = GS_ALL_DONE;
8745 	      }
8746 	    break;
8747 	  }
8748 
8749 	default:
8750 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8751 	    {
8752 	    case tcc_comparison:
8753 	      /* Handle comparison of objects of non scalar mode aggregates
8754 	     	 with a call to memcmp.  It would be nice to only have to do
8755 	     	 this for variable-sized objects, but then we'd have to allow
8756 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
8757 	     	 that's too complex.
8758 
8759 		 Compare scalar mode aggregates as scalar mode values.  Using
8760 		 memcmp for them would be very inefficient at best, and is
8761 		 plain wrong if bitfields are involved.  */
8762 		{
8763 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8764 
8765 		  /* Vector comparisons need no boolification.  */
8766 		  if (TREE_CODE (type) == VECTOR_TYPE)
8767 		    goto expr_2;
8768 		  else if (!AGGREGATE_TYPE_P (type))
8769 		    {
8770 		      tree org_type = TREE_TYPE (*expr_p);
8771 		      *expr_p = gimple_boolify (*expr_p);
8772 		      if (!useless_type_conversion_p (org_type,
8773 						      TREE_TYPE (*expr_p)))
8774 			{
8775 			  *expr_p = fold_convert_loc (input_location,
8776 						      org_type, *expr_p);
8777 			  ret = GS_OK;
8778 			}
8779 		      else
8780 			goto expr_2;
8781 		    }
8782 		  else if (TYPE_MODE (type) != BLKmode)
8783 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8784 		  else
8785 		    ret = gimplify_variable_sized_compare (expr_p);
8786 
8787 		  break;
8788 		}
8789 
8790 	    /* If *EXPR_P does not need to be special-cased, handle it
8791 	       according to its class.  */
8792 	    case tcc_unary:
8793 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8794 				   post_p, is_gimple_val, fb_rvalue);
8795 	      break;
8796 
8797 	    case tcc_binary:
8798 	    expr_2:
8799 	      {
8800 		enum gimplify_status r0, r1;
8801 
8802 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8803 		                    post_p, is_gimple_val, fb_rvalue);
8804 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8805 				    post_p, is_gimple_val, fb_rvalue);
8806 
8807 		ret = MIN (r0, r1);
8808 		break;
8809 	      }
8810 
8811 	    expr_3:
8812 	      {
8813 		enum gimplify_status r0, r1, r2;
8814 
8815 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8816 		                    post_p, is_gimple_val, fb_rvalue);
8817 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8818 				    post_p, is_gimple_val, fb_rvalue);
8819 		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8820 				    post_p, is_gimple_val, fb_rvalue);
8821 
8822 		ret = MIN (MIN (r0, r1), r2);
8823 		break;
8824 	      }
8825 
8826 	    case tcc_declaration:
8827 	    case tcc_constant:
8828 	      ret = GS_ALL_DONE;
8829 	      goto dont_recalculate;
8830 
8831 	    default:
8832 	      gcc_unreachable ();
8833 	    }
8834 
8835 	  recalculate_side_effects (*expr_p);
8836 
8837 	dont_recalculate:
8838 	  break;
8839 	}
8840 
8841       gcc_assert (*expr_p || ret != GS_OK);
8842     }
8843   while (ret == GS_OK);
8844 
8845   /* If we encountered an error_mark somewhere nested inside, either
8846      stub out the statement or propagate the error back out.  */
8847   if (ret == GS_ERROR)
8848     {
8849       if (is_statement)
8850 	*expr_p = NULL;
8851       goto out;
8852     }
8853 
8854   /* This was only valid as a return value from the langhook, which
8855      we handled.  Make sure it doesn't escape from any other context.  */
8856   gcc_assert (ret != GS_UNHANDLED);
8857 
8858   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8859     {
8860       /* We aren't looking for a value, and we don't have a valid
8861 	 statement.  If it doesn't have side-effects, throw it away.  */
8862       if (!TREE_SIDE_EFFECTS (*expr_p))
8863 	*expr_p = NULL;
8864       else if (!TREE_THIS_VOLATILE (*expr_p))
8865 	{
8866 	  /* This is probably a _REF that contains something nested that
8867 	     has side effects.  Recurse through the operands to find it.  */
8868 	  enum tree_code code = TREE_CODE (*expr_p);
8869 
8870 	  switch (code)
8871 	    {
8872 	    case COMPONENT_REF:
8873 	    case REALPART_EXPR:
8874 	    case IMAGPART_EXPR:
8875 	    case VIEW_CONVERT_EXPR:
8876 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8877 			     gimple_test_f, fallback);
8878 	      break;
8879 
8880 	    case ARRAY_REF:
8881 	    case ARRAY_RANGE_REF:
8882 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8883 			     gimple_test_f, fallback);
8884 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8885 			     gimple_test_f, fallback);
8886 	      break;
8887 
8888 	    default:
8889 	       /* Anything else with side-effects must be converted to
8890 		  a valid statement before we get here.  */
8891 	      gcc_unreachable ();
8892 	    }
8893 
8894 	  *expr_p = NULL;
8895 	}
8896       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8897 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8898 	{
8899 	  /* Historically, the compiler has treated a bare reference
8900 	     to a non-BLKmode volatile lvalue as forcing a load.  */
8901 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8902 
8903 	  /* Normally, we do not want to create a temporary for a
8904 	     TREE_ADDRESSABLE type because such a type should not be
8905 	     copied by bitwise-assignment.  However, we make an
8906 	     exception here, as all we are doing here is ensuring that
8907 	     we read the bytes that make up the type.  We use
8908 	     create_tmp_var_raw because create_tmp_var will abort when
8909 	     given a TREE_ADDRESSABLE type.  */
8910 	  tree tmp = create_tmp_var_raw (type, "vol");
8911 	  gimple_add_tmp_var (tmp);
8912 	  gimplify_assign (tmp, *expr_p, pre_p);
8913 	  *expr_p = NULL;
8914 	}
8915       else
8916 	/* We can't do anything useful with a volatile reference to
8917 	   an incomplete type, so just throw it away.  Likewise for
8918 	   a BLKmode type, since any implicit inner load should
8919 	   already have been turned into an explicit one by the
8920 	   gimplification process.  */
8921 	*expr_p = NULL;
8922     }
8923 
8924   /* If we are gimplifying at the statement level, we're done.  Tack
8925      everything together and return.  */
8926   if (fallback == fb_none || is_statement)
8927     {
8928       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8929          it out for GC to reclaim it.  */
8930       *expr_p = NULL_TREE;
8931 
8932       if (!gimple_seq_empty_p (internal_pre)
8933 	  || !gimple_seq_empty_p (internal_post))
8934 	{
8935 	  gimplify_seq_add_seq (&internal_pre, internal_post);
8936 	  gimplify_seq_add_seq (pre_p, internal_pre);
8937 	}
8938 
8939       /* The result of gimplifying *EXPR_P is going to be the last few
8940 	 statements in *PRE_P and *POST_P.  Add location information
8941 	 to all the statements that were added by the gimplification
8942 	 helpers.  */
8943       if (!gimple_seq_empty_p (*pre_p))
8944 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8945 
8946       if (!gimple_seq_empty_p (*post_p))
8947 	annotate_all_with_location_after (*post_p, post_last_gsi,
8948 					  input_location);
8949 
8950       goto out;
8951     }
8952 
8953 #ifdef ENABLE_GIMPLE_CHECKING
8954   if (*expr_p)
8955     {
8956       enum tree_code code = TREE_CODE (*expr_p);
8957       /* These expressions should already be in gimple IR form.  */
8958       gcc_assert (code != MODIFY_EXPR
8959 		  && code != ASM_EXPR
8960 		  && code != BIND_EXPR
8961 		  && code != CATCH_EXPR
8962 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8963 		  && code != EH_FILTER_EXPR
8964 		  && code != GOTO_EXPR
8965 		  && code != LABEL_EXPR
8966 		  && code != LOOP_EXPR
8967 		  && code != SWITCH_EXPR
8968 		  && code != TRY_FINALLY_EXPR
8969 		  && code != OACC_PARALLEL
8970 		  && code != OACC_KERNELS
8971 		  && code != OACC_DATA
8972 		  && code != OACC_HOST_DATA
8973 		  && code != OACC_DECLARE
8974 		  && code != OACC_UPDATE
8975 		  && code != OACC_ENTER_DATA
8976 		  && code != OACC_EXIT_DATA
8977 		  && code != OACC_CACHE
8978 		  && code != OMP_CRITICAL
8979 		  && code != OMP_FOR
8980 		  && code != OACC_LOOP
8981 		  && code != OMP_MASTER
8982 		  && code != OMP_TASKGROUP
8983 		  && code != OMP_ORDERED
8984 		  && code != OMP_PARALLEL
8985 		  && code != OMP_SECTIONS
8986 		  && code != OMP_SECTION
8987 		  && code != OMP_SINGLE);
8988     }
8989 #endif
8990 
8991   /* Otherwise we're gimplifying a subexpression, so the resulting
8992      value is interesting.  If it's a valid operand that matches
8993      GIMPLE_TEST_F, we're done. Unless we are handling some
8994      post-effects internally; if that's the case, we need to copy into
8995      a temporary before adding the post-effects to POST_P.  */
8996   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8997     goto out;
8998 
8999   /* Otherwise, we need to create a new temporary for the gimplified
9000      expression.  */
9001 
9002   /* We can't return an lvalue if we have an internal postqueue.  The
9003      object the lvalue refers to would (probably) be modified by the
9004      postqueue; we need to copy the value out first, which means an
9005      rvalue.  */
9006   if ((fallback & fb_lvalue)
9007       && gimple_seq_empty_p (internal_post)
9008       && is_gimple_addressable (*expr_p))
9009     {
9010       /* An lvalue will do.  Take the address of the expression, store it
9011 	 in a temporary, and replace the expression with an INDIRECT_REF of
9012 	 that temporary.  */
9013       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
9014       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
9015       *expr_p = build_simple_mem_ref (tmp);
9016     }
9017   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
9018     {
9019       /* An rvalue will do.  Assign the gimplified expression into a
9020 	 new temporary TMP and replace the original expression with
9021 	 TMP.  First, make sure that the expression has a type so that
9022 	 it can be assigned into a temporary.  */
9023       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
9024       *expr_p = get_formal_tmp_var (*expr_p, pre_p);
9025     }
9026   else
9027     {
9028 #ifdef ENABLE_GIMPLE_CHECKING
9029       if (!(fallback & fb_mayfail))
9030 	{
9031 	  fprintf (stderr, "gimplification failed:\n");
9032 	  print_generic_expr (stderr, *expr_p, 0);
9033 	  debug_tree (*expr_p);
9034 	  internal_error ("gimplification failed");
9035 	}
9036 #endif
9037       gcc_assert (fallback & fb_mayfail);
9038 
9039       /* If this is an asm statement, and the user asked for the
9040 	 impossible, don't die.  Fail and let gimplify_asm_expr
9041 	 issue an error.  */
9042       ret = GS_ERROR;
9043       goto out;
9044     }
9045 
9046   /* Make sure the temporary matches our predicate.  */
9047   gcc_assert ((*gimple_test_f) (*expr_p));
9048 
9049   if (!gimple_seq_empty_p (internal_post))
9050     {
9051       annotate_all_with_location (internal_post, input_location);
9052       gimplify_seq_add_seq (pre_p, internal_post);
9053     }
9054 
9055  out:
9056   input_location = saved_location;
9057   return ret;
9058 }
9059 
9060 /* Look through TYPE for variable-sized objects and gimplify each such
9061    size that we find.  Add to LIST_P any statements generated.  */
9062 
9063 void
9064 gimplify_type_sizes (tree type, gimple_seq *list_p)
9065 {
9066   tree field, t;
9067 
9068   if (type == NULL || type == error_mark_node)
9069     return;
9070 
9071   /* We first do the main variant, then copy into any other variants.  */
9072   type = TYPE_MAIN_VARIANT (type);
9073 
9074   /* Avoid infinite recursion.  */
9075   if (TYPE_SIZES_GIMPLIFIED (type))
9076     return;
9077 
9078   TYPE_SIZES_GIMPLIFIED (type) = 1;
9079 
9080   switch (TREE_CODE (type))
9081     {
9082     case INTEGER_TYPE:
9083     case ENUMERAL_TYPE:
9084     case BOOLEAN_TYPE:
9085     case REAL_TYPE:
9086     case FIXED_POINT_TYPE:
9087       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
9088       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
9089 
9090       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
9091 	{
9092 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
9093 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
9094 	}
9095       break;
9096 
9097     case ARRAY_TYPE:
9098       /* These types may not have declarations, so handle them here.  */
9099       gimplify_type_sizes (TREE_TYPE (type), list_p);
9100       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
9101       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
9102 	 with assigned stack slots, for -O1+ -g they should be tracked
9103 	 by VTA.  */
9104       if (!(TYPE_NAME (type)
9105 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
9106 	    && DECL_IGNORED_P (TYPE_NAME (type)))
9107 	  && TYPE_DOMAIN (type)
9108 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
9109 	{
9110 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
9111 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
9112 	    DECL_IGNORED_P (t) = 0;
9113 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
9114 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
9115 	    DECL_IGNORED_P (t) = 0;
9116 	}
9117       break;
9118 
9119     case RECORD_TYPE:
9120     case UNION_TYPE:
9121     case QUAL_UNION_TYPE:
9122       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
9123 	if (TREE_CODE (field) == FIELD_DECL)
9124 	  {
9125 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
9126 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
9127 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
9128 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
9129 	  }
9130       break;
9131 
9132     case POINTER_TYPE:
9133     case REFERENCE_TYPE:
9134 	/* We used to recurse on the pointed-to type here, which turned out to
9135 	   be incorrect because its definition might refer to variables not
9136 	   yet initialized at this point if a forward declaration is involved.
9137 
9138 	   It was actually useful for anonymous pointed-to types to ensure
9139 	   that the sizes evaluation dominates every possible later use of the
9140 	   values.  Restricting to such types here would be safe since there
9141 	   is no possible forward declaration around, but would introduce an
9142 	   undesirable middle-end semantic to anonymity.  We then defer to
9143 	   front-ends the responsibility of ensuring that the sizes are
9144 	   evaluated both early and late enough, e.g. by attaching artificial
9145 	   type declarations to the tree.  */
9146       break;
9147 
9148     default:
9149       break;
9150     }
9151 
9152   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
9153   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
9154 
9155   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
9156     {
9157       TYPE_SIZE (t) = TYPE_SIZE (type);
9158       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
9159       TYPE_SIZES_GIMPLIFIED (t) = 1;
9160     }
9161 }
9162 
9163 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
9164    a size or position, has had all of its SAVE_EXPRs evaluated.
9165    We add any required statements to *STMT_P.  */
9166 
9167 void
9168 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
9169 {
9170   tree expr = *expr_p;
9171 
9172   /* We don't do anything if the value isn't there, is constant, or contains
9173      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
9174      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
9175      will want to replace it with a new variable, but that will cause problems
9176      if this type is from outside the function.  It's OK to have that here.  */
9177   if (is_gimple_sizepos (expr))
9178     return;
9179 
9180   *expr_p = unshare_expr (expr);
9181 
9182   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
9183 }
9184 
9185 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
9186    containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
9187    is true, also gimplify the parameters.  */
9188 
9189 gbind *
9190 gimplify_body (tree fndecl, bool do_parms)
9191 {
9192   location_t saved_location = input_location;
9193   gimple_seq parm_stmts, seq;
9194   gimple outer_stmt;
9195   gbind *outer_bind;
9196   struct cgraph_node *cgn;
9197 
9198   timevar_push (TV_TREE_GIMPLIFY);
9199 
9200   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
9201      gimplification.  */
9202   default_rtl_profile ();
9203 
9204   gcc_assert (gimplify_ctxp == NULL);
9205   push_gimplify_context ();
9206 
9207   if (flag_openacc || flag_openmp)
9208     {
9209       gcc_assert (gimplify_omp_ctxp == NULL);
9210       if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
9211 	gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
9212     }
9213 
9214   /* Unshare most shared trees in the body and in that of any nested functions.
9215      It would seem we don't have to do this for nested functions because
9216      they are supposed to be output and then the outer function gimplified
9217      first, but the g++ front end doesn't always do it that way.  */
9218   unshare_body (fndecl);
9219   unvisit_body (fndecl);
9220 
9221   cgn = cgraph_node::get (fndecl);
9222   if (cgn && cgn->origin)
9223     nonlocal_vlas = new hash_set<tree>;
9224 
9225   /* Make sure input_location isn't set to something weird.  */
9226   input_location = DECL_SOURCE_LOCATION (fndecl);
9227 
9228   /* Resolve callee-copies.  This has to be done before processing
9229      the body so that DECL_VALUE_EXPR gets processed correctly.  */
9230   parm_stmts = do_parms ? gimplify_parameters () : NULL;
9231 
9232   /* Gimplify the function's body.  */
9233   seq = NULL;
9234   gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
9235   outer_stmt = gimple_seq_first_stmt (seq);
9236   if (!outer_stmt)
9237     {
9238       outer_stmt = gimple_build_nop ();
9239       gimplify_seq_add_stmt (&seq, outer_stmt);
9240     }
9241 
9242   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
9243      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
9244   if (gimple_code (outer_stmt) == GIMPLE_BIND
9245       && gimple_seq_first (seq) == gimple_seq_last (seq))
9246     outer_bind = as_a <gbind *> (outer_stmt);
9247   else
9248     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
9249 
9250   DECL_SAVED_TREE (fndecl) = NULL_TREE;
9251 
9252   /* If we had callee-copies statements, insert them at the beginning
9253      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
9254   if (!gimple_seq_empty_p (parm_stmts))
9255     {
9256       tree parm;
9257 
9258       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
9259       gimple_bind_set_body (outer_bind, parm_stmts);
9260 
9261       for (parm = DECL_ARGUMENTS (current_function_decl);
9262 	   parm; parm = DECL_CHAIN (parm))
9263 	if (DECL_HAS_VALUE_EXPR_P (parm))
9264 	  {
9265 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
9266 	    DECL_IGNORED_P (parm) = 0;
9267 	  }
9268     }
9269 
9270   if (nonlocal_vlas)
9271     {
9272       if (nonlocal_vla_vars)
9273 	{
9274 	  /* tree-nested.c may later on call declare_vars (..., true);
9275 	     which relies on BLOCK_VARS chain to be the tail of the
9276 	     gimple_bind_vars chain.  Ensure we don't violate that
9277 	     assumption.  */
9278 	  if (gimple_bind_block (outer_bind)
9279 	      == DECL_INITIAL (current_function_decl))
9280 	    declare_vars (nonlocal_vla_vars, outer_bind, true);
9281 	  else
9282 	    BLOCK_VARS (DECL_INITIAL (current_function_decl))
9283 	      = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
9284 			 nonlocal_vla_vars);
9285 	  nonlocal_vla_vars = NULL_TREE;
9286 	}
9287       delete nonlocal_vlas;
9288       nonlocal_vlas = NULL;
9289     }
9290 
9291   if ((flag_openacc || flag_openmp || flag_openmp_simd)
9292       && gimplify_omp_ctxp)
9293     {
9294       delete_omp_context (gimplify_omp_ctxp);
9295       gimplify_omp_ctxp = NULL;
9296     }
9297 
9298   pop_gimplify_context (outer_bind);
9299   gcc_assert (gimplify_ctxp == NULL);
9300 
9301 #ifdef ENABLE_CHECKING
9302   if (!seen_error ())
9303     verify_gimple_in_seq (gimple_bind_body (outer_bind));
9304 #endif
9305 
9306   timevar_pop (TV_TREE_GIMPLIFY);
9307   input_location = saved_location;
9308 
9309   return outer_bind;
9310 }
9311 
9312 typedef char *char_p; /* For DEF_VEC_P.  */
9313 
9314 /* Return whether we should exclude FNDECL from instrumentation.  */
9315 
9316 static bool
9317 flag_instrument_functions_exclude_p (tree fndecl)
9318 {
9319   vec<char_p> *v;
9320 
9321   v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
9322   if (v && v->length () > 0)
9323     {
9324       const char *name;
9325       int i;
9326       char *s;
9327 
9328       name = lang_hooks.decl_printable_name (fndecl, 0);
9329       FOR_EACH_VEC_ELT (*v, i, s)
9330 	if (strstr (name, s) != NULL)
9331 	  return true;
9332     }
9333 
9334   v = (vec<char_p> *) flag_instrument_functions_exclude_files;
9335   if (v && v->length () > 0)
9336     {
9337       const char *name;
9338       int i;
9339       char *s;
9340 
9341       name = DECL_SOURCE_FILE (fndecl);
9342       FOR_EACH_VEC_ELT (*v, i, s)
9343 	if (strstr (name, s) != NULL)
9344 	  return true;
9345     }
9346 
9347   return false;
9348 }
9349 
9350 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
9351    node for the function we want to gimplify.
9352 
9353    Return the sequence of GIMPLE statements corresponding to the body
9354    of FNDECL.  */
9355 
9356 void
9357 gimplify_function_tree (tree fndecl)
9358 {
9359   tree parm, ret;
9360   gimple_seq seq;
9361   gbind *bind;
9362 
9363   gcc_assert (!gimple_body (fndecl));
9364 
9365   if (DECL_STRUCT_FUNCTION (fndecl))
9366     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
9367   else
9368     push_struct_function (fndecl);
9369 
9370   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
9371     {
9372       /* Preliminarily mark non-addressed complex variables as eligible
9373          for promotion to gimple registers.  We'll transform their uses
9374          as we find them.  */
9375       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
9376 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
9377           && !TREE_THIS_VOLATILE (parm)
9378           && !needs_to_live_in_memory (parm))
9379         DECL_GIMPLE_REG_P (parm) = 1;
9380     }
9381 
9382   ret = DECL_RESULT (fndecl);
9383   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
9384        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
9385       && !needs_to_live_in_memory (ret))
9386     DECL_GIMPLE_REG_P (ret) = 1;
9387 
9388   bind = gimplify_body (fndecl, true);
9389 
9390   /* The tree body of the function is no longer needed, replace it
9391      with the new GIMPLE body.  */
9392   seq = NULL;
9393   gimple_seq_add_stmt (&seq, bind);
9394   gimple_set_body (fndecl, seq);
9395 
9396   /* If we're instrumenting function entry/exit, then prepend the call to
9397      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
9398      catch the exit hook.  */
9399   /* ??? Add some way to ignore exceptions for this TFE.  */
9400   if (flag_instrument_function_entry_exit
9401       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
9402       && !flag_instrument_functions_exclude_p (fndecl))
9403     {
9404       tree x;
9405       gbind *new_bind;
9406       gimple tf;
9407       gimple_seq cleanup = NULL, body = NULL;
9408       tree tmp_var;
9409       gcall *call;
9410 
9411       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
9412       call = gimple_build_call (x, 1, integer_zero_node);
9413       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9414       gimple_call_set_lhs (call, tmp_var);
9415       gimplify_seq_add_stmt (&cleanup, call);
9416       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
9417       call = gimple_build_call (x, 2,
9418 				build_fold_addr_expr (current_function_decl),
9419 				tmp_var);
9420       gimplify_seq_add_stmt (&cleanup, call);
9421       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
9422 
9423       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
9424       call = gimple_build_call (x, 1, integer_zero_node);
9425       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
9426       gimple_call_set_lhs (call, tmp_var);
9427       gimplify_seq_add_stmt (&body, call);
9428       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
9429       call = gimple_build_call (x, 2,
9430 				build_fold_addr_expr (current_function_decl),
9431 				tmp_var);
9432       gimplify_seq_add_stmt (&body, call);
9433       gimplify_seq_add_stmt (&body, tf);
9434       new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
9435       /* Clear the block for BIND, since it is no longer directly inside
9436          the function, but within a try block.  */
9437       gimple_bind_set_block (bind, NULL);
9438 
9439       /* Replace the current function body with the body
9440          wrapped in the try/finally TF.  */
9441       seq = NULL;
9442       gimple_seq_add_stmt (&seq, new_bind);
9443       gimple_set_body (fndecl, seq);
9444       bind = new_bind;
9445     }
9446 
9447   if ((flag_sanitize & SANITIZE_THREAD) != 0
9448       && !lookup_attribute ("no_sanitize_thread", DECL_ATTRIBUTES (fndecl)))
9449     {
9450       gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
9451       gimple tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
9452       gbind *new_bind = gimple_build_bind (NULL, tf, gimple_bind_block (bind));
9453       /* Clear the block for BIND, since it is no longer directly inside
9454 	 the function, but within a try block.  */
9455       gimple_bind_set_block (bind, NULL);
9456       /* Replace the current function body with the body
9457 	 wrapped in the try/finally TF.  */
9458       seq = NULL;
9459       gimple_seq_add_stmt (&seq, new_bind);
9460       gimple_set_body (fndecl, seq);
9461     }
9462 
9463   DECL_SAVED_TREE (fndecl) = NULL_TREE;
9464   cfun->curr_properties = PROP_gimple_any;
9465 
9466   pop_cfun ();
9467 }
9468 
9469 /* Return a dummy expression of type TYPE in order to keep going after an
9470    error.  */
9471 
9472 static tree
9473 dummy_object (tree type)
9474 {
9475   tree t = build_int_cst (build_pointer_type (type), 0);
9476   return build2 (MEM_REF, type, t, t);
9477 }
9478 
9479 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
9480    builtin function, but a very special sort of operator.  */
9481 
9482 enum gimplify_status
9483 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
9484 {
9485   tree promoted_type, have_va_type;
9486   tree valist = TREE_OPERAND (*expr_p, 0);
9487   tree type = TREE_TYPE (*expr_p);
9488   tree t;
9489   location_t loc = EXPR_LOCATION (*expr_p);
9490 
9491   /* Verify that valist is of the proper type.  */
9492   have_va_type = TREE_TYPE (valist);
9493   if (have_va_type == error_mark_node)
9494     return GS_ERROR;
9495   have_va_type = targetm.canonical_va_list_type (have_va_type);
9496 
9497   if (have_va_type == NULL_TREE)
9498     {
9499       error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
9500       return GS_ERROR;
9501     }
9502 
9503   /* Generate a diagnostic for requesting data of a type that cannot
9504      be passed through `...' due to type promotion at the call site.  */
9505   if ((promoted_type = lang_hooks.types.type_promotes_to (type))
9506 	   != type)
9507     {
9508       static bool gave_help;
9509       bool warned;
9510 
9511       /* Unfortunately, this is merely undefined, rather than a constraint
9512 	 violation, so we cannot make this an error.  If this call is never
9513 	 executed, the program is still strictly conforming.  */
9514       warned = warning_at (loc, 0,
9515 	  		   "%qT is promoted to %qT when passed through %<...%>",
9516 			   type, promoted_type);
9517       if (!gave_help && warned)
9518 	{
9519 	  gave_help = true;
9520 	  inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9521 		  promoted_type, type);
9522 	}
9523 
9524       /* We can, however, treat "undefined" any way we please.
9525 	 Call abort to encourage the user to fix the program.  */
9526       if (warned)
9527 	inform (loc, "if this code is reached, the program will abort");
9528       /* Before the abort, allow the evaluation of the va_list
9529 	 expression to exit or longjmp.  */
9530       gimplify_and_add (valist, pre_p);
9531       t = build_call_expr_loc (loc,
9532 			       builtin_decl_implicit (BUILT_IN_TRAP), 0);
9533       gimplify_and_add (t, pre_p);
9534 
9535       /* This is dead code, but go ahead and finish so that the
9536 	 mode of the result comes out right.  */
9537       *expr_p = dummy_object (type);
9538       return GS_ALL_DONE;
9539     }
9540   else
9541     {
9542       /* Make it easier for the backends by protecting the valist argument
9543 	 from multiple evaluations.  */
9544       if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9545 	{
9546 	  /* For this case, the backends will be expecting a pointer to
9547 	     TREE_TYPE (abi), but it's possible we've
9548 	     actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9549 	     So fix it.  */
9550 	  if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9551 	    {
9552 	      tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9553 	      valist = fold_convert_loc (loc, p1,
9554 					 build_fold_addr_expr_loc (loc, valist));
9555 	    }
9556 
9557 	  gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9558 	}
9559       else
9560 	gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
9561 
9562       if (!targetm.gimplify_va_arg_expr)
9563 	/* FIXME: Once most targets are converted we should merely
9564 	   assert this is non-null.  */
9565 	return GS_ALL_DONE;
9566 
9567       *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9568       return GS_OK;
9569     }
9570 }
9571 
9572 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9573 
9574    DST/SRC are the destination and source respectively.  You can pass
9575    ungimplified trees in DST or SRC, in which case they will be
9576    converted to a gimple operand if necessary.
9577 
9578    This function returns the newly created GIMPLE_ASSIGN tuple.  */
9579 
9580 gimple
9581 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9582 {
9583   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9584   gimplify_and_add (t, seq_p);
9585   ggc_free (t);
9586   return gimple_seq_last_stmt (*seq_p);
9587 }
9588 
9589 inline hashval_t
9590 gimplify_hasher::hash (const value_type *p)
9591 {
9592   tree t = p->val;
9593   return iterative_hash_expr (t, 0);
9594 }
9595 
9596 inline bool
9597 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9598 {
9599   tree t1 = p1->val;
9600   tree t2 = p2->val;
9601   enum tree_code code = TREE_CODE (t1);
9602 
9603   if (TREE_CODE (t2) != code
9604       || TREE_TYPE (t1) != TREE_TYPE (t2))
9605     return false;
9606 
9607   if (!operand_equal_p (t1, t2, 0))
9608     return false;
9609 
9610 #ifdef ENABLE_CHECKING
9611   /* Only allow them to compare equal if they also hash equal; otherwise
9612      results are nondeterminate, and we fail bootstrap comparison.  */
9613   gcc_assert (hash (p1) == hash (p2));
9614 #endif
9615 
9616   return true;
9617 }
9618