xref: /dragonfly/contrib/gcc-4.7/gcc/gimplify.c (revision 19380330)
1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
4    2012 Free Software Foundation, Inc.
5    Major work done by Sebastian Pop <s.pop@laposte.net>,
6    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
7 
8 This file is part of GCC.
9 
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
14 
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
18 for more details.
19 
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3.  If not see
22 <http://www.gnu.org/licenses/>.  */
23 
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "gimple.h"
30 #include "tree-iterator.h"
31 #include "tree-inline.h"
32 #include "tree-pretty-print.h"
33 #include "langhooks.h"
34 #include "tree-flow.h"
35 #include "cgraph.h"
36 #include "timevar.h"
37 #include "hashtab.h"
38 #include "flags.h"
39 #include "function.h"
40 #include "output.h"
41 #include "ggc.h"
42 #include "diagnostic-core.h"
43 #include "target.h"
44 #include "pointer-set.h"
45 #include "splay-tree.h"
46 #include "vec.h"
47 #include "gimple.h"
48 #include "tree-pass.h"
49 
50 #include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name.  */
51 #include "expr.h"		/* FIXME: for can_move_by_pieces
52 				   and STACK_CHECK_MAX_VAR_SIZE.  */
53 
54 enum gimplify_omp_var_data
55 {
56   GOVD_SEEN = 1,
57   GOVD_EXPLICIT = 2,
58   GOVD_SHARED = 4,
59   GOVD_PRIVATE = 8,
60   GOVD_FIRSTPRIVATE = 16,
61   GOVD_LASTPRIVATE = 32,
62   GOVD_REDUCTION = 64,
63   GOVD_LOCAL = 128,
64   GOVD_DEBUG_PRIVATE = 256,
65   GOVD_PRIVATE_OUTER_REF = 512,
66   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
67 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
68 };
69 
70 
71 enum omp_region_type
72 {
73   ORT_WORKSHARE = 0,
74   ORT_PARALLEL = 2,
75   ORT_COMBINED_PARALLEL = 3,
76   ORT_TASK = 4,
77   ORT_UNTIED_TASK = 5
78 };
79 
80 struct gimplify_omp_ctx
81 {
82   struct gimplify_omp_ctx *outer_context;
83   splay_tree variables;
84   struct pointer_set_t *privatized_types;
85   location_t location;
86   enum omp_clause_default_kind default_kind;
87   enum omp_region_type region_type;
88 };
89 
90 static struct gimplify_ctx *gimplify_ctxp;
91 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
92 
93 
94 /* Formal (expression) temporary table handling: multiple occurrences of
95    the same scalar expression are evaluated into the same temporary.  */
96 
97 typedef struct gimple_temp_hash_elt
98 {
99   tree val;   /* Key */
100   tree temp;  /* Value */
101 } elt_t;
102 
103 /* Forward declaration.  */
104 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
105 
106 /* Mark X addressable.  Unlike the langhook we expect X to be in gimple
107    form and we don't do any syntax checking.  */
108 
109 void
110 mark_addressable (tree x)
111 {
112   while (handled_component_p (x))
113     x = TREE_OPERAND (x, 0);
114   if (TREE_CODE (x) == MEM_REF
115       && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
116     x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
117   if (TREE_CODE (x) != VAR_DECL
118       && TREE_CODE (x) != PARM_DECL
119       && TREE_CODE (x) != RESULT_DECL)
120     return;
121   TREE_ADDRESSABLE (x) = 1;
122 }
123 
124 /* Return a hash value for a formal temporary table entry.  */
125 
126 static hashval_t
127 gimple_tree_hash (const void *p)
128 {
129   tree t = ((const elt_t *) p)->val;
130   return iterative_hash_expr (t, 0);
131 }
132 
133 /* Compare two formal temporary table entries.  */
134 
135 static int
136 gimple_tree_eq (const void *p1, const void *p2)
137 {
138   tree t1 = ((const elt_t *) p1)->val;
139   tree t2 = ((const elt_t *) p2)->val;
140   enum tree_code code = TREE_CODE (t1);
141 
142   if (TREE_CODE (t2) != code
143       || TREE_TYPE (t1) != TREE_TYPE (t2))
144     return 0;
145 
146   if (!operand_equal_p (t1, t2, 0))
147     return 0;
148 
149 #ifdef ENABLE_CHECKING
150   /* Only allow them to compare equal if they also hash equal; otherwise
151      results are nondeterminate, and we fail bootstrap comparison.  */
152   gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
153 #endif
154 
155   return 1;
156 }
157 
158 /* Link gimple statement GS to the end of the sequence *SEQ_P.  If
159    *SEQ_P is NULL, a new sequence is allocated.  This function is
160    similar to gimple_seq_add_stmt, but does not scan the operands.
161    During gimplification, we need to manipulate statement sequences
162    before the def/use vectors have been constructed.  */
163 
164 void
165 gimple_seq_add_stmt_without_update (gimple_seq *seq_p, gimple gs)
166 {
167   gimple_stmt_iterator si;
168 
169   if (gs == NULL)
170     return;
171 
172   if (*seq_p == NULL)
173     *seq_p = gimple_seq_alloc ();
174 
175   si = gsi_last (*seq_p);
176 
177   gsi_insert_after_without_update (&si, gs, GSI_NEW_STMT);
178 }
179 
180 /* Shorter alias name for the above function for use in gimplify.c
181    only.  */
182 
183 static inline void
184 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
185 {
186   gimple_seq_add_stmt_without_update (seq_p, gs);
187 }
188 
189 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
190    NULL, a new sequence is allocated.   This function is
191    similar to gimple_seq_add_seq, but does not scan the operands.
192    During gimplification, we need to manipulate statement sequences
193    before the def/use vectors have been constructed.  */
194 
195 static void
196 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
197 {
198   gimple_stmt_iterator si;
199 
200   if (src == NULL)
201     return;
202 
203   if (*dst_p == NULL)
204     *dst_p = gimple_seq_alloc ();
205 
206   si = gsi_last (*dst_p);
207   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
208 }
209 
210 /* Set up a context for the gimplifier.  */
211 
212 void
213 push_gimplify_context (struct gimplify_ctx *c)
214 {
215   memset (c, '\0', sizeof (*c));
216   c->prev_context = gimplify_ctxp;
217   gimplify_ctxp = c;
218 }
219 
220 /* Tear down a context for the gimplifier.  If BODY is non-null, then
221    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
222    in the local_decls.
223 
224    BODY is not a sequence, but the first tuple in a sequence.  */
225 
226 void
227 pop_gimplify_context (gimple body)
228 {
229   struct gimplify_ctx *c = gimplify_ctxp;
230 
231   gcc_assert (c && (c->bind_expr_stack == NULL
232 		    || VEC_empty (gimple, c->bind_expr_stack)));
233   VEC_free (gimple, heap, c->bind_expr_stack);
234   gimplify_ctxp = c->prev_context;
235 
236   if (body)
237     declare_vars (c->temps, body, false);
238   else
239     record_vars (c->temps);
240 
241   if (c->temp_htab)
242     htab_delete (c->temp_htab);
243 }
244 
245 /* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
246 
247 static void
248 gimple_push_bind_expr (gimple gimple_bind)
249 {
250   if (gimplify_ctxp->bind_expr_stack == NULL)
251     gimplify_ctxp->bind_expr_stack = VEC_alloc (gimple, heap, 8);
252   VEC_safe_push (gimple, heap, gimplify_ctxp->bind_expr_stack, gimple_bind);
253 }
254 
255 /* Pop the first element off the stack of bindings.  */
256 
257 static void
258 gimple_pop_bind_expr (void)
259 {
260   VEC_pop (gimple, gimplify_ctxp->bind_expr_stack);
261 }
262 
263 /* Return the first element of the stack of bindings.  */
264 
265 gimple
266 gimple_current_bind_expr (void)
267 {
268   return VEC_last (gimple, gimplify_ctxp->bind_expr_stack);
269 }
270 
271 /* Return the stack of bindings created during gimplification.  */
272 
273 VEC(gimple, heap) *
274 gimple_bind_expr_stack (void)
275 {
276   return gimplify_ctxp->bind_expr_stack;
277 }
278 
279 /* Return true iff there is a COND_EXPR between us and the innermost
280    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
281 
282 static bool
283 gimple_conditional_context (void)
284 {
285   return gimplify_ctxp->conditions > 0;
286 }
287 
288 /* Note that we've entered a COND_EXPR.  */
289 
290 static void
291 gimple_push_condition (void)
292 {
293 #ifdef ENABLE_GIMPLE_CHECKING
294   if (gimplify_ctxp->conditions == 0)
295     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
296 #endif
297   ++(gimplify_ctxp->conditions);
298 }
299 
300 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
301    now, add any conditional cleanups we've seen to the prequeue.  */
302 
303 static void
304 gimple_pop_condition (gimple_seq *pre_p)
305 {
306   int conds = --(gimplify_ctxp->conditions);
307 
308   gcc_assert (conds >= 0);
309   if (conds == 0)
310     {
311       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
312       gimplify_ctxp->conditional_cleanups = NULL;
313     }
314 }
315 
316 /* A stable comparison routine for use with splay trees and DECLs.  */
317 
318 static int
319 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
320 {
321   tree a = (tree) xa;
322   tree b = (tree) xb;
323 
324   return DECL_UID (a) - DECL_UID (b);
325 }
326 
327 /* Create a new omp construct that deals with variable remapping.  */
328 
329 static struct gimplify_omp_ctx *
330 new_omp_context (enum omp_region_type region_type)
331 {
332   struct gimplify_omp_ctx *c;
333 
334   c = XCNEW (struct gimplify_omp_ctx);
335   c->outer_context = gimplify_omp_ctxp;
336   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
337   c->privatized_types = pointer_set_create ();
338   c->location = input_location;
339   c->region_type = region_type;
340   if ((region_type & ORT_TASK) == 0)
341     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
342   else
343     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
344 
345   return c;
346 }
347 
348 /* Destroy an omp construct that deals with variable remapping.  */
349 
350 static void
351 delete_omp_context (struct gimplify_omp_ctx *c)
352 {
353   splay_tree_delete (c->variables);
354   pointer_set_destroy (c->privatized_types);
355   XDELETE (c);
356 }
357 
358 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
359 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
360 
361 /* Both gimplify the statement T and append it to *SEQ_P.  This function
362    behaves exactly as gimplify_stmt, but you don't have to pass T as a
363    reference.  */
364 
365 void
366 gimplify_and_add (tree t, gimple_seq *seq_p)
367 {
368   gimplify_stmt (&t, seq_p);
369 }
370 
371 /* Gimplify statement T into sequence *SEQ_P, and return the first
372    tuple in the sequence of generated tuples for this statement.
373    Return NULL if gimplifying T produced no tuples.  */
374 
375 static gimple
376 gimplify_and_return_first (tree t, gimple_seq *seq_p)
377 {
378   gimple_stmt_iterator last = gsi_last (*seq_p);
379 
380   gimplify_and_add (t, seq_p);
381 
382   if (!gsi_end_p (last))
383     {
384       gsi_next (&last);
385       return gsi_stmt (last);
386     }
387   else
388     return gimple_seq_first_stmt (*seq_p);
389 }
390 
391 /* Strip off a legitimate source ending from the input string NAME of
392    length LEN.  Rather than having to know the names used by all of
393    our front ends, we strip off an ending of a period followed by
394    up to five characters.  (Java uses ".class".)  */
395 
396 static inline void
397 remove_suffix (char *name, int len)
398 {
399   int i;
400 
401   for (i = 2;  i < 8 && len > i;  i++)
402     {
403       if (name[len - i] == '.')
404 	{
405 	  name[len - i] = '\0';
406 	  break;
407 	}
408     }
409 }
410 
411 /* Create a new temporary name with PREFIX.  Return an identifier.  */
412 
413 static GTY(()) unsigned int tmp_var_id_num;
414 
415 tree
416 create_tmp_var_name (const char *prefix)
417 {
418   char *tmp_name;
419 
420   if (prefix)
421     {
422       char *preftmp = ASTRDUP (prefix);
423 
424       remove_suffix (preftmp, strlen (preftmp));
425       clean_symbol_name (preftmp);
426 
427       prefix = preftmp;
428     }
429 
430   ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
431   return get_identifier (tmp_name);
432 }
433 
434 /* Create a new temporary variable declaration of type TYPE.
435    Do NOT push it into the current binding.  */
436 
437 tree
438 create_tmp_var_raw (tree type, const char *prefix)
439 {
440   tree tmp_var;
441 
442   tmp_var = build_decl (input_location,
443 			VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
444 			type);
445 
446   /* The variable was declared by the compiler.  */
447   DECL_ARTIFICIAL (tmp_var) = 1;
448   /* And we don't want debug info for it.  */
449   DECL_IGNORED_P (tmp_var) = 1;
450 
451   /* Make the variable writable.  */
452   TREE_READONLY (tmp_var) = 0;
453 
454   DECL_EXTERNAL (tmp_var) = 0;
455   TREE_STATIC (tmp_var) = 0;
456   TREE_USED (tmp_var) = 1;
457 
458   return tmp_var;
459 }
460 
461 /* Create a new temporary variable declaration of type TYPE.  DO push the
462    variable into the current binding.  Further, assume that this is called
463    only from gimplification or optimization, at which point the creation of
464    certain types are bugs.  */
465 
466 tree
467 create_tmp_var (tree type, const char *prefix)
468 {
469   tree tmp_var;
470 
471   /* We don't allow types that are addressable (meaning we can't make copies),
472      or incomplete.  We also used to reject every variable size objects here,
473      but now support those for which a constant upper bound can be obtained.
474      The processing for variable sizes is performed in gimple_add_tmp_var,
475      point at which it really matters and possibly reached via paths not going
476      through this function, e.g. after direct calls to create_tmp_var_raw.  */
477   gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
478 
479   tmp_var = create_tmp_var_raw (type, prefix);
480   gimple_add_tmp_var (tmp_var);
481   return tmp_var;
482 }
483 
484 /* Create a new temporary variable declaration of type TYPE by calling
485    create_tmp_var and if TYPE is a vector or a complex number, mark the new
486    temporary as gimple register.  */
487 
488 tree
489 create_tmp_reg (tree type, const char *prefix)
490 {
491   tree tmp;
492 
493   tmp = create_tmp_var (type, prefix);
494   if (TREE_CODE (type) == COMPLEX_TYPE
495       || TREE_CODE (type) == VECTOR_TYPE)
496     DECL_GIMPLE_REG_P (tmp) = 1;
497 
498   return tmp;
499 }
500 
501 /* Create a temporary with a name derived from VAL.  Subroutine of
502    lookup_tmp_var; nobody else should call this function.  */
503 
504 static inline tree
505 create_tmp_from_val (tree val)
506 {
507   /* Drop all qualifiers and address-space information from the value type.  */
508   return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
509 }
510 
511 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
512    an existing expression temporary.  */
513 
514 static tree
515 lookup_tmp_var (tree val, bool is_formal)
516 {
517   tree ret;
518 
519   /* If not optimizing, never really reuse a temporary.  local-alloc
520      won't allocate any variable that is used in more than one basic
521      block, which means it will go into memory, causing much extra
522      work in reload and final and poorer code generation, outweighing
523      the extra memory allocation here.  */
524   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
525     ret = create_tmp_from_val (val);
526   else
527     {
528       elt_t elt, *elt_p;
529       void **slot;
530 
531       elt.val = val;
532       if (gimplify_ctxp->temp_htab == NULL)
533         gimplify_ctxp->temp_htab
534 	  = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
535       slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
536       if (*slot == NULL)
537 	{
538 	  elt_p = XNEW (elt_t);
539 	  elt_p->val = val;
540 	  elt_p->temp = ret = create_tmp_from_val (val);
541 	  *slot = (void *) elt_p;
542 	}
543       else
544 	{
545 	  elt_p = (elt_t *) *slot;
546           ret = elt_p->temp;
547 	}
548     }
549 
550   return ret;
551 }
552 
553 /* Return true if T is a CALL_EXPR or an expression that can be
554    assigned to a temporary.  Note that this predicate should only be
555    used during gimplification.  See the rationale for this in
556    gimplify_modify_expr.  */
557 
558 static bool
559 is_gimple_reg_rhs_or_call (tree t)
560 {
561   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
562 	  || TREE_CODE (t) == CALL_EXPR);
563 }
564 
565 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
566    this predicate should only be used during gimplification.  See the
567    rationale for this in gimplify_modify_expr.  */
568 
569 static bool
570 is_gimple_mem_rhs_or_call (tree t)
571 {
572   /* If we're dealing with a renamable type, either source or dest must be
573      a renamed variable.  */
574   if (is_gimple_reg_type (TREE_TYPE (t)))
575     return is_gimple_val (t);
576   else
577     return (is_gimple_val (t) || is_gimple_lvalue (t)
578 	    || TREE_CODE (t) == CALL_EXPR);
579 }
580 
581 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
582 
583 static tree
584 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
585                       bool is_formal)
586 {
587   tree t, mod;
588 
589   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
590      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
591   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
592 		 fb_rvalue);
593 
594   t = lookup_tmp_var (val, is_formal);
595 
596   if (is_formal
597       && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
598 	  || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
599     DECL_GIMPLE_REG_P (t) = 1;
600 
601   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
602 
603   SET_EXPR_LOCATION (mod, EXPR_LOC_OR_HERE (val));
604 
605   /* gimplify_modify_expr might want to reduce this further.  */
606   gimplify_and_add (mod, pre_p);
607   ggc_free (mod);
608 
609   /* If we're gimplifying into ssa, gimplify_modify_expr will have
610      given our temporary an SSA name.  Find and return it.  */
611   if (gimplify_ctxp->into_ssa)
612     {
613       gimple last = gimple_seq_last_stmt (*pre_p);
614       t = gimple_get_lhs (last);
615     }
616 
617   return t;
618 }
619 
620 /* Return a formal temporary variable initialized with VAL.  PRE_P is as
621    in gimplify_expr.  Only use this function if:
622 
623    1) The value of the unfactored expression represented by VAL will not
624       change between the initialization and use of the temporary, and
625    2) The temporary will not be otherwise modified.
626 
627    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
628    and #2 means it is inappropriate for && temps.
629 
630    For other cases, use get_initialized_tmp_var instead.  */
631 
632 tree
633 get_formal_tmp_var (tree val, gimple_seq *pre_p)
634 {
635   return internal_get_tmp_var (val, pre_p, NULL, true);
636 }
637 
638 /* Return a temporary variable initialized with VAL.  PRE_P and POST_P
639    are as in gimplify_expr.  */
640 
641 tree
642 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
643 {
644   return internal_get_tmp_var (val, pre_p, post_p, false);
645 }
646 
647 /* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
648    generate debug info for them; otherwise don't.  */
649 
650 void
651 declare_vars (tree vars, gimple scope, bool debug_info)
652 {
653   tree last = vars;
654   if (last)
655     {
656       tree temps, block;
657 
658       gcc_assert (gimple_code (scope) == GIMPLE_BIND);
659 
660       temps = nreverse (last);
661 
662       block = gimple_bind_block (scope);
663       gcc_assert (!block || TREE_CODE (block) == BLOCK);
664       if (!block || !debug_info)
665 	{
666 	  DECL_CHAIN (last) = gimple_bind_vars (scope);
667 	  gimple_bind_set_vars (scope, temps);
668 	}
669       else
670 	{
671 	  /* We need to attach the nodes both to the BIND_EXPR and to its
672 	     associated BLOCK for debugging purposes.  The key point here
673 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
674 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
675 	  if (BLOCK_VARS (block))
676 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
677 	  else
678 	    {
679 	      gimple_bind_set_vars (scope,
680 	      			    chainon (gimple_bind_vars (scope), temps));
681 	      BLOCK_VARS (block) = temps;
682 	    }
683 	}
684     }
685 }
686 
687 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
688    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
689    no such upper bound can be obtained.  */
690 
691 static void
692 force_constant_size (tree var)
693 {
694   /* The only attempt we make is by querying the maximum size of objects
695      of the variable's type.  */
696 
697   HOST_WIDE_INT max_size;
698 
699   gcc_assert (TREE_CODE (var) == VAR_DECL);
700 
701   max_size = max_int_size_in_bytes (TREE_TYPE (var));
702 
703   gcc_assert (max_size >= 0);
704 
705   DECL_SIZE_UNIT (var)
706     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
707   DECL_SIZE (var)
708     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
709 }
710 
711 /* Push the temporary variable TMP into the current binding.  */
712 
713 void
714 gimple_add_tmp_var (tree tmp)
715 {
716   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
717 
718   /* Later processing assumes that the object size is constant, which might
719      not be true at this point.  Force the use of a constant upper bound in
720      this case.  */
721   if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
722     force_constant_size (tmp);
723 
724   DECL_CONTEXT (tmp) = current_function_decl;
725   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
726 
727   if (gimplify_ctxp)
728     {
729       DECL_CHAIN (tmp) = gimplify_ctxp->temps;
730       gimplify_ctxp->temps = tmp;
731 
732       /* Mark temporaries local within the nearest enclosing parallel.  */
733       if (gimplify_omp_ctxp)
734 	{
735 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
736 	  while (ctx && ctx->region_type == ORT_WORKSHARE)
737 	    ctx = ctx->outer_context;
738 	  if (ctx)
739 	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
740 	}
741     }
742   else if (cfun)
743     record_vars (tmp);
744   else
745     {
746       gimple_seq body_seq;
747 
748       /* This case is for nested functions.  We need to expose the locals
749 	 they create.  */
750       body_seq = gimple_body (current_function_decl);
751       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
752     }
753 }
754 
755 /* Determine whether to assign a location to the statement GS.  */
756 
757 static bool
758 should_carry_location_p (gimple gs)
759 {
760   /* Don't emit a line note for a label.  We particularly don't want to
761      emit one for the break label, since it doesn't actually correspond
762      to the beginning of the loop/switch.  */
763   if (gimple_code (gs) == GIMPLE_LABEL)
764     return false;
765 
766   return true;
767 }
768 
769 /* Return true if a location should not be emitted for this statement
770    by annotate_one_with_location.  */
771 
772 static inline bool
773 gimple_do_not_emit_location_p (gimple g)
774 {
775   return gimple_plf (g, GF_PLF_1);
776 }
777 
778 /* Mark statement G so a location will not be emitted by
779    annotate_one_with_location.  */
780 
781 static inline void
782 gimple_set_do_not_emit_location (gimple g)
783 {
784   /* The PLF flags are initialized to 0 when a new tuple is created,
785      so no need to initialize it anywhere.  */
786   gimple_set_plf (g, GF_PLF_1, true);
787 }
788 
789 /* Set the location for gimple statement GS to LOCATION.  */
790 
791 static void
792 annotate_one_with_location (gimple gs, location_t location)
793 {
794   if (!gimple_has_location (gs)
795       && !gimple_do_not_emit_location_p (gs)
796       && should_carry_location_p (gs))
797     gimple_set_location (gs, location);
798 }
799 
800 /* Set LOCATION for all the statements after iterator GSI in sequence
801    SEQ.  If GSI is pointing to the end of the sequence, start with the
802    first statement in SEQ.  */
803 
804 static void
805 annotate_all_with_location_after (gimple_seq seq, gimple_stmt_iterator gsi,
806 				  location_t location)
807 {
808   if (gsi_end_p (gsi))
809     gsi = gsi_start (seq);
810   else
811     gsi_next (&gsi);
812 
813   for (; !gsi_end_p (gsi); gsi_next (&gsi))
814     annotate_one_with_location (gsi_stmt (gsi), location);
815 }
816 
817 /* Set the location for all the statements in a sequence STMT_P to LOCATION.  */
818 
819 void
820 annotate_all_with_location (gimple_seq stmt_p, location_t location)
821 {
822   gimple_stmt_iterator i;
823 
824   if (gimple_seq_empty_p (stmt_p))
825     return;
826 
827   for (i = gsi_start (stmt_p); !gsi_end_p (i); gsi_next (&i))
828     {
829       gimple gs = gsi_stmt (i);
830       annotate_one_with_location (gs, location);
831     }
832 }
833 
834 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
835    nodes that are referenced more than once in GENERIC functions.  This is
836    necessary because gimplification (translation into GIMPLE) is performed
837    by modifying tree nodes in-place, so gimplication of a shared node in a
838    first context could generate an invalid GIMPLE form in a second context.
839 
840    This is achieved with a simple mark/copy/unmark algorithm that walks the
841    GENERIC representation top-down, marks nodes with TREE_VISITED the first
842    time it encounters them, duplicates them if they already have TREE_VISITED
843    set, and finally removes the TREE_VISITED marks it has set.
844 
845    The algorithm works only at the function level, i.e. it generates a GENERIC
846    representation of a function with no nodes shared within the function when
847    passed a GENERIC function (except for nodes that are allowed to be shared).
848 
849    At the global level, it is also necessary to unshare tree nodes that are
850    referenced in more than one function, for the same aforementioned reason.
851    This requires some cooperation from the front-end.  There are 2 strategies:
852 
853      1. Manual unsharing.  The front-end needs to call unshare_expr on every
854         expression that might end up being shared across functions.
855 
856      2. Deep unsharing.  This is an extension of regular unsharing.  Instead
857         of calling unshare_expr on expressions that might be shared across
858         functions, the front-end pre-marks them with TREE_VISITED.  This will
859         ensure that they are unshared on the first reference within functions
860         when the regular unsharing algorithm runs.  The counterpart is that
861         this algorithm must look deeper than for manual unsharing, which is
862         specified by LANG_HOOKS_DEEP_UNSHARING.
863 
864   If there are only few specific cases of node sharing across functions, it is
865   probably easier for a front-end to unshare the expressions manually.  On the
866   contrary, if the expressions generated at the global level are as widespread
867   as expressions generated within functions, deep unsharing is very likely the
868   way to go.  */
869 
870 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
871    These nodes model computations that must be done once.  If we were to
872    unshare something like SAVE_EXPR(i++), the gimplification process would
873    create wrong code.  However, if DATA is non-null, it must hold a pointer
874    set that is used to unshare the subtrees of these nodes.  */
875 
876 static tree
877 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
878 {
879   tree t = *tp;
880   enum tree_code code = TREE_CODE (t);
881 
882   /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
883      copy their subtrees if we can make sure to do it only once.  */
884   if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
885     {
886       if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
887 	;
888       else
889 	*walk_subtrees = 0;
890     }
891 
892   /* Stop at types, decls, constants like copy_tree_r.  */
893   else if (TREE_CODE_CLASS (code) == tcc_type
894 	   || TREE_CODE_CLASS (code) == tcc_declaration
895 	   || TREE_CODE_CLASS (code) == tcc_constant
896 	   /* We can't do anything sensible with a BLOCK used as an
897 	      expression, but we also can't just die when we see it
898 	      because of non-expression uses.  So we avert our eyes
899 	      and cross our fingers.  Silly Java.  */
900 	   || code == BLOCK)
901     *walk_subtrees = 0;
902 
903   /* Cope with the statement expression extension.  */
904   else if (code == STATEMENT_LIST)
905     ;
906 
907   /* Leave the bulk of the work to copy_tree_r itself.  */
908   else
909     copy_tree_r (tp, walk_subtrees, NULL);
910 
911   return NULL_TREE;
912 }
913 
914 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
915    If *TP has been visited already, then *TP is deeply copied by calling
916    mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
917 
918 static tree
919 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
920 {
921   tree t = *tp;
922   enum tree_code code = TREE_CODE (t);
923 
924   /* Skip types, decls, and constants.  But we do want to look at their
925      types and the bounds of types.  Mark them as visited so we properly
926      unmark their subtrees on the unmark pass.  If we've already seen them,
927      don't look down further.  */
928   if (TREE_CODE_CLASS (code) == tcc_type
929       || TREE_CODE_CLASS (code) == tcc_declaration
930       || TREE_CODE_CLASS (code) == tcc_constant)
931     {
932       if (TREE_VISITED (t))
933 	*walk_subtrees = 0;
934       else
935 	TREE_VISITED (t) = 1;
936     }
937 
938   /* If this node has been visited already, unshare it and don't look
939      any deeper.  */
940   else if (TREE_VISITED (t))
941     {
942       walk_tree (tp, mostly_copy_tree_r, data, NULL);
943       *walk_subtrees = 0;
944     }
945 
946   /* Otherwise, mark the node as visited and keep looking.  */
947   else
948     TREE_VISITED (t) = 1;
949 
950   return NULL_TREE;
951 }
952 
953 /* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
954    copy_if_shared_r callback unmodified.  */
955 
956 static inline void
957 copy_if_shared (tree *tp, void *data)
958 {
959   walk_tree (tp, copy_if_shared_r, data, NULL);
960 }
961 
962 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
963    any nested functions.  */
964 
965 static void
966 unshare_body (tree fndecl)
967 {
968   struct cgraph_node *cgn = cgraph_get_node (fndecl);
969   /* If the language requires deep unsharing, we need a pointer set to make
970      sure we don't repeatedly unshare subtrees of unshareable nodes.  */
971   struct pointer_set_t *visited
972     = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
973 
974   copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
975   copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
976   copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
977 
978   if (visited)
979     pointer_set_destroy (visited);
980 
981   if (cgn)
982     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
983       unshare_body (cgn->decl);
984 }
985 
986 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
987    Subtrees are walked until the first unvisited node is encountered.  */
988 
989 static tree
990 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
991 {
992   tree t = *tp;
993 
994   /* If this node has been visited, unmark it and keep looking.  */
995   if (TREE_VISITED (t))
996     TREE_VISITED (t) = 0;
997 
998   /* Otherwise, don't look any deeper.  */
999   else
1000     *walk_subtrees = 0;
1001 
1002   return NULL_TREE;
1003 }
1004 
1005 /* Unmark the visited trees rooted at *TP.  */
1006 
1007 static inline void
1008 unmark_visited (tree *tp)
1009 {
1010   walk_tree (tp, unmark_visited_r, NULL, NULL);
1011 }
1012 
1013 /* Likewise, but mark all trees as not visited.  */
1014 
1015 static void
1016 unvisit_body (tree fndecl)
1017 {
1018   struct cgraph_node *cgn = cgraph_get_node (fndecl);
1019 
1020   unmark_visited (&DECL_SAVED_TREE (fndecl));
1021   unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
1022   unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
1023 
1024   if (cgn)
1025     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
1026       unvisit_body (cgn->decl);
1027 }
1028 
1029 /* Unconditionally make an unshared copy of EXPR.  This is used when using
1030    stored expressions which span multiple functions, such as BINFO_VTABLE,
1031    as the normal unsharing process can't tell that they're shared.  */
1032 
1033 tree
1034 unshare_expr (tree expr)
1035 {
1036   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
1037   return expr;
1038 }
1039 
1040 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1041    contain statements and have a value.  Assign its value to a temporary
1042    and give it void_type_node.  Return the temporary, or NULL_TREE if
1043    WRAPPER was already void.  */
1044 
1045 tree
1046 voidify_wrapper_expr (tree wrapper, tree temp)
1047 {
1048   tree type = TREE_TYPE (wrapper);
1049   if (type && !VOID_TYPE_P (type))
1050     {
1051       tree *p;
1052 
1053       /* Set p to point to the body of the wrapper.  Loop until we find
1054 	 something that isn't a wrapper.  */
1055       for (p = &wrapper; p && *p; )
1056 	{
1057 	  switch (TREE_CODE (*p))
1058 	    {
1059 	    case BIND_EXPR:
1060 	      TREE_SIDE_EFFECTS (*p) = 1;
1061 	      TREE_TYPE (*p) = void_type_node;
1062 	      /* For a BIND_EXPR, the body is operand 1.  */
1063 	      p = &BIND_EXPR_BODY (*p);
1064 	      break;
1065 
1066 	    case CLEANUP_POINT_EXPR:
1067 	    case TRY_FINALLY_EXPR:
1068 	    case TRY_CATCH_EXPR:
1069 	      TREE_SIDE_EFFECTS (*p) = 1;
1070 	      TREE_TYPE (*p) = void_type_node;
1071 	      p = &TREE_OPERAND (*p, 0);
1072 	      break;
1073 
1074 	    case STATEMENT_LIST:
1075 	      {
1076 		tree_stmt_iterator i = tsi_last (*p);
1077 		TREE_SIDE_EFFECTS (*p) = 1;
1078 		TREE_TYPE (*p) = void_type_node;
1079 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1080 	      }
1081 	      break;
1082 
1083 	    case COMPOUND_EXPR:
1084 	      /* Advance to the last statement.  Set all container types to
1085 		 void.  */
1086 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1087 		{
1088 		  TREE_SIDE_EFFECTS (*p) = 1;
1089 		  TREE_TYPE (*p) = void_type_node;
1090 		}
1091 	      break;
1092 
1093 	    case TRANSACTION_EXPR:
1094 	      TREE_SIDE_EFFECTS (*p) = 1;
1095 	      TREE_TYPE (*p) = void_type_node;
1096 	      p = &TRANSACTION_EXPR_BODY (*p);
1097 	      break;
1098 
1099 	    default:
1100 	      /* Assume that any tree upon which voidify_wrapper_expr is
1101 		 directly called is a wrapper, and that its body is op0.  */
1102 	      if (p == &wrapper)
1103 		{
1104 		  TREE_SIDE_EFFECTS (*p) = 1;
1105 		  TREE_TYPE (*p) = void_type_node;
1106 		  p = &TREE_OPERAND (*p, 0);
1107 		  break;
1108 		}
1109 	      goto out;
1110 	    }
1111 	}
1112 
1113     out:
1114       if (p == NULL || IS_EMPTY_STMT (*p))
1115 	temp = NULL_TREE;
1116       else if (temp)
1117 	{
1118 	  /* The wrapper is on the RHS of an assignment that we're pushing
1119 	     down.  */
1120 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1121 		      || TREE_CODE (temp) == MODIFY_EXPR);
1122 	  TREE_OPERAND (temp, 1) = *p;
1123 	  *p = temp;
1124 	}
1125       else
1126 	{
1127 	  temp = create_tmp_var (type, "retval");
1128 	  *p = build2 (INIT_EXPR, type, temp, *p);
1129 	}
1130 
1131       return temp;
1132     }
1133 
1134   return NULL_TREE;
1135 }
1136 
1137 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1138    a temporary through which they communicate.  */
1139 
1140 static void
1141 build_stack_save_restore (gimple *save, gimple *restore)
1142 {
1143   tree tmp_var;
1144 
1145   *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1146   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1147   gimple_call_set_lhs (*save, tmp_var);
1148 
1149   *restore
1150     = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1151 			 1, tmp_var);
1152 }
1153 
1154 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1155 
1156 static enum gimplify_status
1157 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1158 {
1159   tree bind_expr = *expr_p;
1160   bool old_save_stack = gimplify_ctxp->save_stack;
1161   tree t;
1162   gimple gimple_bind;
1163   gimple_seq body, cleanup;
1164   gimple stack_save;
1165 
1166   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1167 
1168   /* Mark variables seen in this bind expr.  */
1169   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1170     {
1171       if (TREE_CODE (t) == VAR_DECL)
1172 	{
1173 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1174 
1175 	  /* Mark variable as local.  */
1176 	  if (ctx && !DECL_EXTERNAL (t)
1177 	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1178 		  || splay_tree_lookup (ctx->variables,
1179 					(splay_tree_key) t) == NULL))
1180 	    omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1181 
1182 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1183 
1184 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1185 	    cfun->has_local_explicit_reg_vars = true;
1186 	}
1187 
1188       /* Preliminarily mark non-addressed complex variables as eligible
1189 	 for promotion to gimple registers.  We'll transform their uses
1190 	 as we find them.  */
1191       if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1192 	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1193 	  && !TREE_THIS_VOLATILE (t)
1194 	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1195 	  && !needs_to_live_in_memory (t))
1196 	DECL_GIMPLE_REG_P (t) = 1;
1197     }
1198 
1199   gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1200                                    BIND_EXPR_BLOCK (bind_expr));
1201   gimple_push_bind_expr (gimple_bind);
1202 
1203   gimplify_ctxp->save_stack = false;
1204 
1205   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1206   body = NULL;
1207   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1208   gimple_bind_set_body (gimple_bind, body);
1209 
1210   cleanup = NULL;
1211   stack_save = NULL;
1212   if (gimplify_ctxp->save_stack)
1213     {
1214       gimple stack_restore;
1215 
1216       /* Save stack on entry and restore it on exit.  Add a try_finally
1217 	 block to achieve this.  Note that mudflap depends on the
1218 	 format of the emitted code: see mx_register_decls().  */
1219       build_stack_save_restore (&stack_save, &stack_restore);
1220 
1221       gimplify_seq_add_stmt (&cleanup, stack_restore);
1222     }
1223 
1224   /* Add clobbers for all variables that go out of scope.  */
1225   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1226     {
1227       if (TREE_CODE (t) == VAR_DECL
1228 	  && !is_global_var (t)
1229 	  && DECL_CONTEXT (t) == current_function_decl
1230 	  && !DECL_HARD_REGISTER (t)
1231 	  && !TREE_THIS_VOLATILE (t)
1232 	  && !DECL_HAS_VALUE_EXPR_P (t)
1233 	  /* Only care for variables that have to be in memory.  Others
1234 	     will be rewritten into SSA names, hence moved to the top-level.  */
1235 	  && !is_gimple_reg (t))
1236 	{
1237 	  tree clobber = build_constructor (TREE_TYPE (t), NULL);
1238 	  TREE_THIS_VOLATILE (clobber) = 1;
1239 	  gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1240 	}
1241     }
1242 
1243   if (cleanup)
1244     {
1245       gimple gs;
1246       gimple_seq new_body;
1247 
1248       new_body = NULL;
1249       gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1250 	  		     GIMPLE_TRY_FINALLY);
1251 
1252       if (stack_save)
1253 	gimplify_seq_add_stmt (&new_body, stack_save);
1254       gimplify_seq_add_stmt (&new_body, gs);
1255       gimple_bind_set_body (gimple_bind, new_body);
1256     }
1257 
1258   gimplify_ctxp->save_stack = old_save_stack;
1259   gimple_pop_bind_expr ();
1260 
1261   gimplify_seq_add_stmt (pre_p, gimple_bind);
1262 
1263   if (temp)
1264     {
1265       *expr_p = temp;
1266       return GS_OK;
1267     }
1268 
1269   *expr_p = NULL_TREE;
1270   return GS_ALL_DONE;
1271 }
1272 
1273 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1274    GIMPLE value, it is assigned to a new temporary and the statement is
1275    re-written to return the temporary.
1276 
1277    PRE_P points to the sequence where side effects that must happen before
1278    STMT should be stored.  */
1279 
1280 static enum gimplify_status
1281 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1282 {
1283   gimple ret;
1284   tree ret_expr = TREE_OPERAND (stmt, 0);
1285   tree result_decl, result;
1286 
1287   if (ret_expr == error_mark_node)
1288     return GS_ERROR;
1289 
1290   if (!ret_expr
1291       || TREE_CODE (ret_expr) == RESULT_DECL
1292       || ret_expr == error_mark_node)
1293     {
1294       gimple ret = gimple_build_return (ret_expr);
1295       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1296       gimplify_seq_add_stmt (pre_p, ret);
1297       return GS_ALL_DONE;
1298     }
1299 
1300   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1301     result_decl = NULL_TREE;
1302   else
1303     {
1304       result_decl = TREE_OPERAND (ret_expr, 0);
1305 
1306       /* See through a return by reference.  */
1307       if (TREE_CODE (result_decl) == INDIRECT_REF)
1308 	result_decl = TREE_OPERAND (result_decl, 0);
1309 
1310       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1311 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1312 		  && TREE_CODE (result_decl) == RESULT_DECL);
1313     }
1314 
1315   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1316      Recall that aggregate_value_p is FALSE for any aggregate type that is
1317      returned in registers.  If we're returning values in registers, then
1318      we don't want to extend the lifetime of the RESULT_DECL, particularly
1319      across another call.  In addition, for those aggregates for which
1320      hard_function_value generates a PARALLEL, we'll die during normal
1321      expansion of structure assignments; there's special code in expand_return
1322      to handle this case that does not exist in expand_expr.  */
1323   if (!result_decl)
1324     result = NULL_TREE;
1325   else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1326     {
1327       if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1328 	{
1329 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1330 	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1331 	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1332 	     should be effectively allocated by the caller, i.e. all calls to
1333 	     this function must be subject to the Return Slot Optimization.  */
1334 	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1335 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1336 	}
1337       result = result_decl;
1338     }
1339   else if (gimplify_ctxp->return_temp)
1340     result = gimplify_ctxp->return_temp;
1341   else
1342     {
1343       result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1344 
1345       /* ??? With complex control flow (usually involving abnormal edges),
1346 	 we can wind up warning about an uninitialized value for this.  Due
1347 	 to how this variable is constructed and initialized, this is never
1348 	 true.  Give up and never warn.  */
1349       TREE_NO_WARNING (result) = 1;
1350 
1351       gimplify_ctxp->return_temp = result;
1352     }
1353 
1354   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1355      Then gimplify the whole thing.  */
1356   if (result != result_decl)
1357     TREE_OPERAND (ret_expr, 0) = result;
1358 
1359   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1360 
1361   ret = gimple_build_return (result);
1362   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1363   gimplify_seq_add_stmt (pre_p, ret);
1364 
1365   return GS_ALL_DONE;
1366 }
1367 
1368 /* Gimplify a variable-length array DECL.  */
1369 
1370 static void
1371 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1372 {
1373   /* This is a variable-sized decl.  Simplify its size and mark it
1374      for deferred expansion.  Note that mudflap depends on the format
1375      of the emitted code: see mx_register_decls().  */
1376   tree t, addr, ptr_type;
1377 
1378   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1379   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1380 
1381   /* All occurrences of this decl in final gimplified code will be
1382      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1383      things: First, it lets the rest of the gimplifier know what
1384      replacement to use.  Second, it lets the debug info know
1385      where to find the value.  */
1386   ptr_type = build_pointer_type (TREE_TYPE (decl));
1387   addr = create_tmp_var (ptr_type, get_name (decl));
1388   DECL_IGNORED_P (addr) = 0;
1389   t = build_fold_indirect_ref (addr);
1390   TREE_THIS_NOTRAP (t) = 1;
1391   SET_DECL_VALUE_EXPR (decl, t);
1392   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1393 
1394   t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1395   t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1396 		       size_int (DECL_ALIGN (decl)));
1397   /* The call has been built for a variable-sized object.  */
1398   CALL_ALLOCA_FOR_VAR_P (t) = 1;
1399   t = fold_convert (ptr_type, t);
1400   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1401 
1402   gimplify_and_add (t, seq_p);
1403 
1404   /* Indicate that we need to restore the stack level when the
1405      enclosing BIND_EXPR is exited.  */
1406   gimplify_ctxp->save_stack = true;
1407 }
1408 
1409 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1410    and initialization explicit.  */
1411 
1412 static enum gimplify_status
1413 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1414 {
1415   tree stmt = *stmt_p;
1416   tree decl = DECL_EXPR_DECL (stmt);
1417 
1418   *stmt_p = NULL_TREE;
1419 
1420   if (TREE_TYPE (decl) == error_mark_node)
1421     return GS_ERROR;
1422 
1423   if ((TREE_CODE (decl) == TYPE_DECL
1424        || TREE_CODE (decl) == VAR_DECL)
1425       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1426     gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1427 
1428   /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1429      in case its size expressions contain problematic nodes like CALL_EXPR.  */
1430   if (TREE_CODE (decl) == TYPE_DECL
1431       && DECL_ORIGINAL_TYPE (decl)
1432       && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1433     gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1434 
1435   if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1436     {
1437       tree init = DECL_INITIAL (decl);
1438 
1439       if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1440 	  || (!TREE_STATIC (decl)
1441 	      && flag_stack_check == GENERIC_STACK_CHECK
1442 	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1443 				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1444 	gimplify_vla_decl (decl, seq_p);
1445 
1446       /* Some front ends do not explicitly declare all anonymous
1447 	 artificial variables.  We compensate here by declaring the
1448 	 variables, though it would be better if the front ends would
1449 	 explicitly declare them.  */
1450       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1451 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1452 	gimple_add_tmp_var (decl);
1453 
1454       if (init && init != error_mark_node)
1455 	{
1456 	  if (!TREE_STATIC (decl))
1457 	    {
1458 	      DECL_INITIAL (decl) = NULL_TREE;
1459 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1460 	      gimplify_and_add (init, seq_p);
1461 	      ggc_free (init);
1462 	    }
1463 	  else
1464 	    /* We must still examine initializers for static variables
1465 	       as they may contain a label address.  */
1466 	    walk_tree (&init, force_labels_r, NULL, NULL);
1467 	}
1468     }
1469 
1470   return GS_ALL_DONE;
1471 }
1472 
1473 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1474    and replacing the LOOP_EXPR with goto, but if the loop contains an
1475    EXIT_EXPR, we need to append a label for it to jump to.  */
1476 
1477 static enum gimplify_status
1478 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1479 {
1480   tree saved_label = gimplify_ctxp->exit_label;
1481   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1482 
1483   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1484 
1485   gimplify_ctxp->exit_label = NULL_TREE;
1486 
1487   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1488 
1489   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1490 
1491   if (gimplify_ctxp->exit_label)
1492     gimplify_seq_add_stmt (pre_p,
1493 			   gimple_build_label (gimplify_ctxp->exit_label));
1494 
1495   gimplify_ctxp->exit_label = saved_label;
1496 
1497   *expr_p = NULL;
1498   return GS_ALL_DONE;
1499 }
1500 
1501 /* Gimplify a statement list onto a sequence.  These may be created either
1502    by an enlightened front-end, or by shortcut_cond_expr.  */
1503 
1504 static enum gimplify_status
1505 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1506 {
1507   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1508 
1509   tree_stmt_iterator i = tsi_start (*expr_p);
1510 
1511   while (!tsi_end_p (i))
1512     {
1513       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1514       tsi_delink (&i);
1515     }
1516 
1517   if (temp)
1518     {
1519       *expr_p = temp;
1520       return GS_OK;
1521     }
1522 
1523   return GS_ALL_DONE;
1524 }
1525 
1526 /* Compare two case labels.  Because the front end should already have
1527    made sure that case ranges do not overlap, it is enough to only compare
1528    the CASE_LOW values of each case label.  */
1529 
1530 static int
1531 compare_case_labels (const void *p1, const void *p2)
1532 {
1533   const_tree const case1 = *(const_tree const*)p1;
1534   const_tree const case2 = *(const_tree const*)p2;
1535 
1536   /* The 'default' case label always goes first.  */
1537   if (!CASE_LOW (case1))
1538     return -1;
1539   else if (!CASE_LOW (case2))
1540     return 1;
1541   else
1542     return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1543 }
1544 
1545 /* Sort the case labels in LABEL_VEC in place in ascending order.  */
1546 
1547 void
1548 sort_case_labels (VEC(tree,heap)* label_vec)
1549 {
1550   VEC_qsort (tree, label_vec, compare_case_labels);
1551 }
1552 
1553 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1554    branch to.  */
1555 
1556 static enum gimplify_status
1557 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1558 {
1559   tree switch_expr = *expr_p;
1560   gimple_seq switch_body_seq = NULL;
1561   enum gimplify_status ret;
1562 
1563   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1564                        fb_rvalue);
1565   if (ret == GS_ERROR || ret == GS_UNHANDLED)
1566     return ret;
1567 
1568   if (SWITCH_BODY (switch_expr))
1569     {
1570       VEC (tree,heap) *labels;
1571       VEC (tree,heap) *saved_labels;
1572       tree default_case = NULL_TREE;
1573       size_t i, len;
1574       gimple gimple_switch;
1575 
1576       /* If someone can be bothered to fill in the labels, they can
1577 	 be bothered to null out the body too.  */
1578       gcc_assert (!SWITCH_LABELS (switch_expr));
1579 
1580       /* save old labels, get new ones from body, then restore the old
1581          labels.  Save all the things from the switch body to append after.  */
1582       saved_labels = gimplify_ctxp->case_labels;
1583       gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1584 
1585       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1586       labels = gimplify_ctxp->case_labels;
1587       gimplify_ctxp->case_labels = saved_labels;
1588 
1589       i = 0;
1590       while (i < VEC_length (tree, labels))
1591 	{
1592 	  tree elt = VEC_index (tree, labels, i);
1593 	  tree low = CASE_LOW (elt);
1594 	  bool remove_element = FALSE;
1595 
1596 	  if (low)
1597 	    {
1598 	      /* Discard empty ranges.  */
1599 	      tree high = CASE_HIGH (elt);
1600 	      if (high && tree_int_cst_lt (high, low))
1601 	        remove_element = TRUE;
1602 	    }
1603 	  else
1604 	    {
1605 	      /* The default case must be the last label in the list.  */
1606 	      gcc_assert (!default_case);
1607 	      default_case = elt;
1608 	      remove_element = TRUE;
1609 	    }
1610 
1611 	  if (remove_element)
1612 	    VEC_ordered_remove (tree, labels, i);
1613 	  else
1614 	    i++;
1615 	}
1616       len = i;
1617 
1618       if (!VEC_empty (tree, labels))
1619 	sort_case_labels (labels);
1620 
1621       if (!default_case)
1622 	{
1623 	  tree type = TREE_TYPE (switch_expr);
1624 
1625 	  /* If the switch has no default label, add one, so that we jump
1626 	     around the switch body.  If the labels already cover the whole
1627 	     range of type, add the default label pointing to one of the
1628 	     existing labels.  */
1629 	  if (type == void_type_node)
1630 	    type = TREE_TYPE (SWITCH_COND (switch_expr));
1631 	  if (len
1632 	      && INTEGRAL_TYPE_P (type)
1633 	      && TYPE_MIN_VALUE (type)
1634 	      && TYPE_MAX_VALUE (type)
1635 	      && tree_int_cst_equal (CASE_LOW (VEC_index (tree, labels, 0)),
1636 				     TYPE_MIN_VALUE (type)))
1637 	    {
1638 	      tree low, high = CASE_HIGH (VEC_index (tree, labels, len - 1));
1639 	      if (!high)
1640 		high = CASE_LOW (VEC_index (tree, labels, len - 1));
1641 	      if (tree_int_cst_equal (high, TYPE_MAX_VALUE (type)))
1642 		{
1643 		  for (i = 1; i < len; i++)
1644 		    {
1645 		      high = CASE_LOW (VEC_index (tree, labels, i));
1646 		      low = CASE_HIGH (VEC_index (tree, labels, i - 1));
1647 		      if (!low)
1648 			low = CASE_LOW (VEC_index (tree, labels, i - 1));
1649 		      if ((TREE_INT_CST_LOW (low) + 1
1650 			   != TREE_INT_CST_LOW (high))
1651 			  || (TREE_INT_CST_HIGH (low)
1652 			      + (TREE_INT_CST_LOW (high) == 0)
1653 			      != TREE_INT_CST_HIGH (high)))
1654 			break;
1655 		    }
1656 		  if (i == len)
1657 		    {
1658 		      tree label = CASE_LABEL (VEC_index (tree, labels, 0));
1659 		      default_case = build_case_label (NULL_TREE, NULL_TREE,
1660 						       label);
1661 		    }
1662 		}
1663 	    }
1664 
1665 	  if (!default_case)
1666 	    {
1667 	      gimple new_default;
1668 
1669 	      default_case
1670 		= build_case_label (NULL_TREE, NULL_TREE,
1671 				    create_artificial_label (UNKNOWN_LOCATION));
1672 	      new_default = gimple_build_label (CASE_LABEL (default_case));
1673 	      gimplify_seq_add_stmt (&switch_body_seq, new_default);
1674 	    }
1675 	}
1676 
1677       gimple_switch = gimple_build_switch_vec (SWITCH_COND (switch_expr),
1678                                                default_case, labels);
1679       gimplify_seq_add_stmt (pre_p, gimple_switch);
1680       gimplify_seq_add_seq (pre_p, switch_body_seq);
1681       VEC_free(tree, heap, labels);
1682     }
1683   else
1684     gcc_assert (SWITCH_LABELS (switch_expr));
1685 
1686   return GS_ALL_DONE;
1687 }
1688 
1689 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
1690 
1691 static enum gimplify_status
1692 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1693 {
1694   struct gimplify_ctx *ctxp;
1695   gimple gimple_label;
1696 
1697   /* Invalid OpenMP programs can play Duff's Device type games with
1698      #pragma omp parallel.  At least in the C front end, we don't
1699      detect such invalid branches until after gimplification.  */
1700   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1701     if (ctxp->case_labels)
1702       break;
1703 
1704   gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1705   VEC_safe_push (tree, heap, ctxp->case_labels, *expr_p);
1706   gimplify_seq_add_stmt (pre_p, gimple_label);
1707 
1708   return GS_ALL_DONE;
1709 }
1710 
1711 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1712    if necessary.  */
1713 
1714 tree
1715 build_and_jump (tree *label_p)
1716 {
1717   if (label_p == NULL)
1718     /* If there's nowhere to jump, just fall through.  */
1719     return NULL_TREE;
1720 
1721   if (*label_p == NULL_TREE)
1722     {
1723       tree label = create_artificial_label (UNKNOWN_LOCATION);
1724       *label_p = label;
1725     }
1726 
1727   return build1 (GOTO_EXPR, void_type_node, *label_p);
1728 }
1729 
1730 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1731    This also involves building a label to jump to and communicating it to
1732    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1733 
1734 static enum gimplify_status
1735 gimplify_exit_expr (tree *expr_p)
1736 {
1737   tree cond = TREE_OPERAND (*expr_p, 0);
1738   tree expr;
1739 
1740   expr = build_and_jump (&gimplify_ctxp->exit_label);
1741   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1742   *expr_p = expr;
1743 
1744   return GS_OK;
1745 }
1746 
1747 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1748    as being forced.  To be called for DECL_INITIAL of static variables.  */
1749 
1750 tree
1751 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1752 {
1753   if (TYPE_P (*tp))
1754     *walk_subtrees = 0;
1755   if (TREE_CODE (*tp) == LABEL_DECL)
1756     FORCED_LABEL (*tp) = 1;
1757 
1758   return NULL_TREE;
1759 }
1760 
1761 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1762    different from its canonical type, wrap the whole thing inside a
1763    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1764    type.
1765 
1766    The canonical type of a COMPONENT_REF is the type of the field being
1767    referenced--unless the field is a bit-field which can be read directly
1768    in a smaller mode, in which case the canonical type is the
1769    sign-appropriate type corresponding to that mode.  */
1770 
1771 static void
1772 canonicalize_component_ref (tree *expr_p)
1773 {
1774   tree expr = *expr_p;
1775   tree type;
1776 
1777   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1778 
1779   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1780     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1781   else
1782     type = TREE_TYPE (TREE_OPERAND (expr, 1));
1783 
1784   /* One could argue that all the stuff below is not necessary for
1785      the non-bitfield case and declare it a FE error if type
1786      adjustment would be needed.  */
1787   if (TREE_TYPE (expr) != type)
1788     {
1789 #ifdef ENABLE_TYPES_CHECKING
1790       tree old_type = TREE_TYPE (expr);
1791 #endif
1792       int type_quals;
1793 
1794       /* We need to preserve qualifiers and propagate them from
1795 	 operand 0.  */
1796       type_quals = TYPE_QUALS (type)
1797 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1798       if (TYPE_QUALS (type) != type_quals)
1799 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1800 
1801       /* Set the type of the COMPONENT_REF to the underlying type.  */
1802       TREE_TYPE (expr) = type;
1803 
1804 #ifdef ENABLE_TYPES_CHECKING
1805       /* It is now a FE error, if the conversion from the canonical
1806 	 type to the original expression type is not useless.  */
1807       gcc_assert (useless_type_conversion_p (old_type, type));
1808 #endif
1809     }
1810 }
1811 
1812 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1813    to foo, embed that change in the ADDR_EXPR by converting
1814       T array[U];
1815       (T *)&array
1816    ==>
1817       &array[L]
1818    where L is the lower bound.  For simplicity, only do this for constant
1819    lower bound.
1820    The constraint is that the type of &array[L] is trivially convertible
1821    to T *.  */
1822 
1823 static void
1824 canonicalize_addr_expr (tree *expr_p)
1825 {
1826   tree expr = *expr_p;
1827   tree addr_expr = TREE_OPERAND (expr, 0);
1828   tree datype, ddatype, pddatype;
1829 
1830   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1831   if (!POINTER_TYPE_P (TREE_TYPE (expr))
1832       || TREE_CODE (addr_expr) != ADDR_EXPR)
1833     return;
1834 
1835   /* The addr_expr type should be a pointer to an array.  */
1836   datype = TREE_TYPE (TREE_TYPE (addr_expr));
1837   if (TREE_CODE (datype) != ARRAY_TYPE)
1838     return;
1839 
1840   /* The pointer to element type shall be trivially convertible to
1841      the expression pointer type.  */
1842   ddatype = TREE_TYPE (datype);
1843   pddatype = build_pointer_type (ddatype);
1844   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1845 				  pddatype))
1846     return;
1847 
1848   /* The lower bound and element sizes must be constant.  */
1849   if (!TYPE_SIZE_UNIT (ddatype)
1850       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1851       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1852       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1853     return;
1854 
1855   /* All checks succeeded.  Build a new node to merge the cast.  */
1856   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1857 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1858 		    NULL_TREE, NULL_TREE);
1859   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1860 
1861   /* We can have stripped a required restrict qualifier above.  */
1862   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1863     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1864 }
1865 
1866 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1867    underneath as appropriate.  */
1868 
1869 static enum gimplify_status
1870 gimplify_conversion (tree *expr_p)
1871 {
1872   location_t loc = EXPR_LOCATION (*expr_p);
1873   gcc_assert (CONVERT_EXPR_P (*expr_p));
1874 
1875   /* Then strip away all but the outermost conversion.  */
1876   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1877 
1878   /* And remove the outermost conversion if it's useless.  */
1879   if (tree_ssa_useless_type_conversion (*expr_p))
1880     *expr_p = TREE_OPERAND (*expr_p, 0);
1881 
1882   /* If we still have a conversion at the toplevel,
1883      then canonicalize some constructs.  */
1884   if (CONVERT_EXPR_P (*expr_p))
1885     {
1886       tree sub = TREE_OPERAND (*expr_p, 0);
1887 
1888       /* If a NOP conversion is changing the type of a COMPONENT_REF
1889 	 expression, then canonicalize its type now in order to expose more
1890 	 redundant conversions.  */
1891       if (TREE_CODE (sub) == COMPONENT_REF)
1892 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1893 
1894       /* If a NOP conversion is changing a pointer to array of foo
1895 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
1896       else if (TREE_CODE (sub) == ADDR_EXPR)
1897 	canonicalize_addr_expr (expr_p);
1898     }
1899 
1900   /* If we have a conversion to a non-register type force the
1901      use of a VIEW_CONVERT_EXPR instead.  */
1902   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1903     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1904 			       TREE_OPERAND (*expr_p, 0));
1905 
1906   return GS_OK;
1907 }
1908 
1909 /* Nonlocal VLAs seen in the current function.  */
1910 static struct pointer_set_t *nonlocal_vlas;
1911 
1912 /* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
1913    DECL_VALUE_EXPR, and it's worth re-examining things.  */
1914 
1915 static enum gimplify_status
1916 gimplify_var_or_parm_decl (tree *expr_p)
1917 {
1918   tree decl = *expr_p;
1919 
1920   /* ??? If this is a local variable, and it has not been seen in any
1921      outer BIND_EXPR, then it's probably the result of a duplicate
1922      declaration, for which we've already issued an error.  It would
1923      be really nice if the front end wouldn't leak these at all.
1924      Currently the only known culprit is C++ destructors, as seen
1925      in g++.old-deja/g++.jason/binding.C.  */
1926   if (TREE_CODE (decl) == VAR_DECL
1927       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1928       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1929       && decl_function_context (decl) == current_function_decl)
1930     {
1931       gcc_assert (seen_error ());
1932       return GS_ERROR;
1933     }
1934 
1935   /* When within an OpenMP context, notice uses of variables.  */
1936   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1937     return GS_ALL_DONE;
1938 
1939   /* If the decl is an alias for another expression, substitute it now.  */
1940   if (DECL_HAS_VALUE_EXPR_P (decl))
1941     {
1942       tree value_expr = DECL_VALUE_EXPR (decl);
1943 
1944       /* For referenced nonlocal VLAs add a decl for debugging purposes
1945 	 to the current function.  */
1946       if (TREE_CODE (decl) == VAR_DECL
1947 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1948 	  && nonlocal_vlas != NULL
1949 	  && TREE_CODE (value_expr) == INDIRECT_REF
1950 	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1951 	  && decl_function_context (decl) != current_function_decl)
1952 	{
1953 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1954 	  while (ctx && ctx->region_type == ORT_WORKSHARE)
1955 	    ctx = ctx->outer_context;
1956 	  if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1957 	    {
1958 	      tree copy = copy_node (decl), block;
1959 
1960 	      lang_hooks.dup_lang_specific_decl (copy);
1961 	      SET_DECL_RTL (copy, 0);
1962 	      TREE_USED (copy) = 1;
1963 	      block = DECL_INITIAL (current_function_decl);
1964 	      DECL_CHAIN (copy) = BLOCK_VARS (block);
1965 	      BLOCK_VARS (block) = copy;
1966 	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1967 	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
1968 	    }
1969 	}
1970 
1971       *expr_p = unshare_expr (value_expr);
1972       return GS_OK;
1973     }
1974 
1975   return GS_ALL_DONE;
1976 }
1977 
1978 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1979    node *EXPR_P.
1980 
1981       compound_lval
1982 	      : min_lval '[' val ']'
1983 	      | min_lval '.' ID
1984 	      | compound_lval '[' val ']'
1985 	      | compound_lval '.' ID
1986 
1987    This is not part of the original SIMPLE definition, which separates
1988    array and member references, but it seems reasonable to handle them
1989    together.  Also, this way we don't run into problems with union
1990    aliasing; gcc requires that for accesses through a union to alias, the
1991    union reference must be explicit, which was not always the case when we
1992    were splitting up array and member refs.
1993 
1994    PRE_P points to the sequence where side effects that must happen before
1995      *EXPR_P should be stored.
1996 
1997    POST_P points to the sequence where side effects that must happen after
1998      *EXPR_P should be stored.  */
1999 
2000 static enum gimplify_status
2001 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2002 			fallback_t fallback)
2003 {
2004   tree *p;
2005   VEC(tree,heap) *stack;
2006   enum gimplify_status ret = GS_ALL_DONE, tret;
2007   int i;
2008   location_t loc = EXPR_LOCATION (*expr_p);
2009   tree expr = *expr_p;
2010 
2011   /* Create a stack of the subexpressions so later we can walk them in
2012      order from inner to outer.  */
2013   stack = VEC_alloc (tree, heap, 10);
2014 
2015   /* We can handle anything that get_inner_reference can deal with.  */
2016   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2017     {
2018     restart:
2019       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
2020       if (TREE_CODE (*p) == INDIRECT_REF)
2021 	*p = fold_indirect_ref_loc (loc, *p);
2022 
2023       if (handled_component_p (*p))
2024 	;
2025       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
2026 	 additional COMPONENT_REFs.  */
2027       else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
2028 	       && gimplify_var_or_parm_decl (p) == GS_OK)
2029 	goto restart;
2030       else
2031 	break;
2032 
2033       VEC_safe_push (tree, heap, stack, *p);
2034     }
2035 
2036   gcc_assert (VEC_length (tree, stack));
2037 
2038   /* Now STACK is a stack of pointers to all the refs we've walked through
2039      and P points to the innermost expression.
2040 
2041      Java requires that we elaborated nodes in source order.  That
2042      means we must gimplify the inner expression followed by each of
2043      the indices, in order.  But we can't gimplify the inner
2044      expression until we deal with any variable bounds, sizes, or
2045      positions in order to deal with PLACEHOLDER_EXPRs.
2046 
2047      So we do this in three steps.  First we deal with the annotations
2048      for any variables in the components, then we gimplify the base,
2049      then we gimplify any indices, from left to right.  */
2050   for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
2051     {
2052       tree t = VEC_index (tree, stack, i);
2053 
2054       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2055 	{
2056 	  /* Gimplify the low bound and element type size and put them into
2057 	     the ARRAY_REF.  If these values are set, they have already been
2058 	     gimplified.  */
2059 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2060 	    {
2061 	      tree low = unshare_expr (array_ref_low_bound (t));
2062 	      if (!is_gimple_min_invariant (low))
2063 		{
2064 		  TREE_OPERAND (t, 2) = low;
2065 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2066 					post_p, is_gimple_reg,
2067 					fb_rvalue);
2068 		  ret = MIN (ret, tret);
2069 		}
2070 	    }
2071 	  else
2072 	    {
2073 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2074 				    is_gimple_reg, fb_rvalue);
2075 	      ret = MIN (ret, tret);
2076 	    }
2077 
2078 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
2079 	    {
2080 	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2081 	      tree elmt_size = unshare_expr (array_ref_element_size (t));
2082 	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2083 
2084 	      /* Divide the element size by the alignment of the element
2085 		 type (above).  */
2086 	      elmt_size
2087 		= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2088 
2089 	      if (!is_gimple_min_invariant (elmt_size))
2090 		{
2091 		  TREE_OPERAND (t, 3) = elmt_size;
2092 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2093 					post_p, is_gimple_reg,
2094 					fb_rvalue);
2095 		  ret = MIN (ret, tret);
2096 		}
2097 	    }
2098 	  else
2099 	    {
2100 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2101 				    is_gimple_reg, fb_rvalue);
2102 	      ret = MIN (ret, tret);
2103 	    }
2104 	}
2105       else if (TREE_CODE (t) == COMPONENT_REF)
2106 	{
2107 	  /* Set the field offset into T and gimplify it.  */
2108 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
2109 	    {
2110 	      tree offset = unshare_expr (component_ref_field_offset (t));
2111 	      tree field = TREE_OPERAND (t, 1);
2112 	      tree factor
2113 		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2114 
2115 	      /* Divide the offset by its alignment.  */
2116 	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2117 
2118 	      if (!is_gimple_min_invariant (offset))
2119 		{
2120 		  TREE_OPERAND (t, 2) = offset;
2121 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2122 					post_p, is_gimple_reg,
2123 					fb_rvalue);
2124 		  ret = MIN (ret, tret);
2125 		}
2126 	    }
2127 	  else
2128 	    {
2129 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2130 				    is_gimple_reg, fb_rvalue);
2131 	      ret = MIN (ret, tret);
2132 	    }
2133 	}
2134     }
2135 
2136   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2137      so as to match the min_lval predicate.  Failure to do so may result
2138      in the creation of large aggregate temporaries.  */
2139   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2140 			fallback | fb_lvalue);
2141   ret = MIN (ret, tret);
2142 
2143   /* And finally, the indices and operands to BIT_FIELD_REF.  During this
2144      loop we also remove any useless conversions.  */
2145   for (; VEC_length (tree, stack) > 0; )
2146     {
2147       tree t = VEC_pop (tree, stack);
2148 
2149       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2150 	{
2151 	  /* Gimplify the dimension.  */
2152 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2153 	    {
2154 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2155 				    is_gimple_val, fb_rvalue);
2156 	      ret = MIN (ret, tret);
2157 	    }
2158 	}
2159       else if (TREE_CODE (t) == BIT_FIELD_REF)
2160 	{
2161 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2162 				is_gimple_val, fb_rvalue);
2163 	  ret = MIN (ret, tret);
2164 	  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2165 				is_gimple_val, fb_rvalue);
2166 	  ret = MIN (ret, tret);
2167 	}
2168 
2169       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2170 
2171       /* The innermost expression P may have originally had
2172 	 TREE_SIDE_EFFECTS set which would have caused all the outer
2173 	 expressions in *EXPR_P leading to P to also have had
2174 	 TREE_SIDE_EFFECTS set.  */
2175       recalculate_side_effects (t);
2176     }
2177 
2178   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2179   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2180     {
2181       canonicalize_component_ref (expr_p);
2182     }
2183 
2184   VEC_free (tree, heap, stack);
2185 
2186   gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2187 
2188   return ret;
2189 }
2190 
2191 /*  Gimplify the self modifying expression pointed to by EXPR_P
2192     (++, --, +=, -=).
2193 
2194     PRE_P points to the list where side effects that must happen before
2195 	*EXPR_P should be stored.
2196 
2197     POST_P points to the list where side effects that must happen after
2198 	*EXPR_P should be stored.
2199 
2200     WANT_VALUE is nonzero iff we want to use the value of this expression
2201 	in another expression.  */
2202 
2203 static enum gimplify_status
2204 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2205 			bool want_value)
2206 {
2207   enum tree_code code;
2208   tree lhs, lvalue, rhs, t1;
2209   gimple_seq post = NULL, *orig_post_p = post_p;
2210   bool postfix;
2211   enum tree_code arith_code;
2212   enum gimplify_status ret;
2213   location_t loc = EXPR_LOCATION (*expr_p);
2214 
2215   code = TREE_CODE (*expr_p);
2216 
2217   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2218 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2219 
2220   /* Prefix or postfix?  */
2221   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2222     /* Faster to treat as prefix if result is not used.  */
2223     postfix = want_value;
2224   else
2225     postfix = false;
2226 
2227   /* For postfix, make sure the inner expression's post side effects
2228      are executed after side effects from this expression.  */
2229   if (postfix)
2230     post_p = &post;
2231 
2232   /* Add or subtract?  */
2233   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2234     arith_code = PLUS_EXPR;
2235   else
2236     arith_code = MINUS_EXPR;
2237 
2238   /* Gimplify the LHS into a GIMPLE lvalue.  */
2239   lvalue = TREE_OPERAND (*expr_p, 0);
2240   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2241   if (ret == GS_ERROR)
2242     return ret;
2243 
2244   /* Extract the operands to the arithmetic operation.  */
2245   lhs = lvalue;
2246   rhs = TREE_OPERAND (*expr_p, 1);
2247 
2248   /* For postfix operator, we evaluate the LHS to an rvalue and then use
2249      that as the result value and in the postqueue operation.  We also
2250      make sure to make lvalue a minimal lval, see
2251      gcc.c-torture/execute/20040313-1.c for an example where this matters.  */
2252   if (postfix)
2253     {
2254       if (!is_gimple_min_lval (lvalue))
2255 	{
2256 	  mark_addressable (lvalue);
2257 	  lvalue = build_fold_addr_expr_loc (input_location, lvalue);
2258 	  gimplify_expr (&lvalue, pre_p, post_p, is_gimple_val, fb_rvalue);
2259 	  lvalue = build_fold_indirect_ref_loc (input_location, lvalue);
2260 	}
2261       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2262       if (ret == GS_ERROR)
2263 	return ret;
2264     }
2265 
2266   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2267   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2268     {
2269       rhs = convert_to_ptrofftype_loc (loc, rhs);
2270       if (arith_code == MINUS_EXPR)
2271 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2272       arith_code = POINTER_PLUS_EXPR;
2273     }
2274 
2275   t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
2276 
2277   if (postfix)
2278     {
2279       gimplify_assign (lvalue, t1, orig_post_p);
2280       gimplify_seq_add_seq (orig_post_p, post);
2281       *expr_p = lhs;
2282       return GS_ALL_DONE;
2283     }
2284   else
2285     {
2286       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2287       return GS_OK;
2288     }
2289 }
2290 
2291 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2292 
2293 static void
2294 maybe_with_size_expr (tree *expr_p)
2295 {
2296   tree expr = *expr_p;
2297   tree type = TREE_TYPE (expr);
2298   tree size;
2299 
2300   /* If we've already wrapped this or the type is error_mark_node, we can't do
2301      anything.  */
2302   if (TREE_CODE (expr) == WITH_SIZE_EXPR
2303       || type == error_mark_node)
2304     return;
2305 
2306   /* If the size isn't known or is a constant, we have nothing to do.  */
2307   size = TYPE_SIZE_UNIT (type);
2308   if (!size || TREE_CODE (size) == INTEGER_CST)
2309     return;
2310 
2311   /* Otherwise, make a WITH_SIZE_EXPR.  */
2312   size = unshare_expr (size);
2313   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2314   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2315 }
2316 
2317 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2318    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2319    the CALL_EXPR.  */
2320 
2321 static enum gimplify_status
2322 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2323 {
2324   bool (*test) (tree);
2325   fallback_t fb;
2326 
2327   /* In general, we allow lvalues for function arguments to avoid
2328      extra overhead of copying large aggregates out of even larger
2329      aggregates into temporaries only to copy the temporaries to
2330      the argument list.  Make optimizers happy by pulling out to
2331      temporaries those types that fit in registers.  */
2332   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2333     test = is_gimple_val, fb = fb_rvalue;
2334   else
2335     {
2336       test = is_gimple_lvalue, fb = fb_either;
2337       /* Also strip a TARGET_EXPR that would force an extra copy.  */
2338       if (TREE_CODE (*arg_p) == TARGET_EXPR)
2339 	{
2340 	  tree init = TARGET_EXPR_INITIAL (*arg_p);
2341 	  if (init
2342 	      && !VOID_TYPE_P (TREE_TYPE (init)))
2343 	    *arg_p = init;
2344 	}
2345     }
2346 
2347   /* If this is a variable sized type, we must remember the size.  */
2348   maybe_with_size_expr (arg_p);
2349 
2350   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2351   /* Make sure arguments have the same location as the function call
2352      itself.  */
2353   protected_set_expr_location (*arg_p, call_location);
2354 
2355   /* There is a sequence point before a function call.  Side effects in
2356      the argument list must occur before the actual call. So, when
2357      gimplifying arguments, force gimplify_expr to use an internal
2358      post queue which is then appended to the end of PRE_P.  */
2359   return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2360 }
2361 
2362 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2363    WANT_VALUE is true if the result of the call is desired.  */
2364 
2365 static enum gimplify_status
2366 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2367 {
2368   tree fndecl, parms, p, fnptrtype;
2369   enum gimplify_status ret;
2370   int i, nargs;
2371   gimple call;
2372   bool builtin_va_start_p = FALSE;
2373   location_t loc = EXPR_LOCATION (*expr_p);
2374 
2375   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2376 
2377   /* For reliable diagnostics during inlining, it is necessary that
2378      every call_expr be annotated with file and line.  */
2379   if (! EXPR_HAS_LOCATION (*expr_p))
2380     SET_EXPR_LOCATION (*expr_p, input_location);
2381 
2382   /* This may be a call to a builtin function.
2383 
2384      Builtin function calls may be transformed into different
2385      (and more efficient) builtin function calls under certain
2386      circumstances.  Unfortunately, gimplification can muck things
2387      up enough that the builtin expanders are not aware that certain
2388      transformations are still valid.
2389 
2390      So we attempt transformation/gimplification of the call before
2391      we gimplify the CALL_EXPR.  At this time we do not manage to
2392      transform all calls in the same manner as the expanders do, but
2393      we do transform most of them.  */
2394   fndecl = get_callee_fndecl (*expr_p);
2395   if (fndecl && DECL_BUILT_IN (fndecl))
2396     {
2397       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2398 
2399       if (new_tree && new_tree != *expr_p)
2400 	{
2401 	  /* There was a transformation of this call which computes the
2402 	     same value, but in a more efficient way.  Return and try
2403 	     again.  */
2404 	  *expr_p = new_tree;
2405 	  return GS_OK;
2406 	}
2407 
2408       if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
2409 	  && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_VA_START)
2410         {
2411 	  builtin_va_start_p = TRUE;
2412 	  if (call_expr_nargs (*expr_p) < 2)
2413 	    {
2414 	      error ("too few arguments to function %<va_start%>");
2415 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2416 	      return GS_OK;
2417 	    }
2418 
2419 	  if (fold_builtin_next_arg (*expr_p, true))
2420 	    {
2421 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2422 	      return GS_OK;
2423 	    }
2424 	}
2425     }
2426 
2427   /* Remember the original function pointer type.  */
2428   fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2429 
2430   /* There is a sequence point before the call, so any side effects in
2431      the calling expression must occur before the actual call.  Force
2432      gimplify_expr to use an internal post queue.  */
2433   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2434 		       is_gimple_call_addr, fb_rvalue);
2435 
2436   nargs = call_expr_nargs (*expr_p);
2437 
2438   /* Get argument types for verification.  */
2439   fndecl = get_callee_fndecl (*expr_p);
2440   parms = NULL_TREE;
2441   if (fndecl)
2442     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2443   else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2444     parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2445 
2446   if (fndecl && DECL_ARGUMENTS (fndecl))
2447     p = DECL_ARGUMENTS (fndecl);
2448   else if (parms)
2449     p = parms;
2450   else
2451     p = NULL_TREE;
2452   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2453     ;
2454 
2455   /* If the last argument is __builtin_va_arg_pack () and it is not
2456      passed as a named argument, decrease the number of CALL_EXPR
2457      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2458   if (!p
2459       && i < nargs
2460       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2461     {
2462       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2463       tree last_arg_fndecl = get_callee_fndecl (last_arg);
2464 
2465       if (last_arg_fndecl
2466 	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2467 	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2468 	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2469 	{
2470 	  tree call = *expr_p;
2471 
2472 	  --nargs;
2473 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2474 					  CALL_EXPR_FN (call),
2475 					  nargs, CALL_EXPR_ARGP (call));
2476 
2477 	  /* Copy all CALL_EXPR flags, location and block, except
2478 	     CALL_EXPR_VA_ARG_PACK flag.  */
2479 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2480 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2481 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2482 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
2483 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2484 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2485 	  TREE_BLOCK (*expr_p) = TREE_BLOCK (call);
2486 
2487 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
2488 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2489 	}
2490     }
2491 
2492   /* Finally, gimplify the function arguments.  */
2493   if (nargs > 0)
2494     {
2495       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2496            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2497            PUSH_ARGS_REVERSED ? i-- : i++)
2498         {
2499           enum gimplify_status t;
2500 
2501           /* Avoid gimplifying the second argument to va_start, which needs to
2502              be the plain PARM_DECL.  */
2503           if ((i != 1) || !builtin_va_start_p)
2504             {
2505               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2506 				EXPR_LOCATION (*expr_p));
2507 
2508               if (t == GS_ERROR)
2509                 ret = GS_ERROR;
2510             }
2511         }
2512     }
2513 
2514   /* Verify the function result.  */
2515   if (want_value && fndecl
2516       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2517     {
2518       error_at (loc, "using result of function returning %<void%>");
2519       ret = GS_ERROR;
2520     }
2521 
2522   /* Try this again in case gimplification exposed something.  */
2523   if (ret != GS_ERROR)
2524     {
2525       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2526 
2527       if (new_tree && new_tree != *expr_p)
2528 	{
2529 	  /* There was a transformation of this call which computes the
2530 	     same value, but in a more efficient way.  Return and try
2531 	     again.  */
2532 	  *expr_p = new_tree;
2533 	  return GS_OK;
2534 	}
2535     }
2536   else
2537     {
2538       *expr_p = error_mark_node;
2539       return GS_ERROR;
2540     }
2541 
2542   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2543      decl.  This allows us to eliminate redundant or useless
2544      calls to "const" functions.  */
2545   if (TREE_CODE (*expr_p) == CALL_EXPR)
2546     {
2547       int flags = call_expr_flags (*expr_p);
2548       if (flags & (ECF_CONST | ECF_PURE)
2549 	  /* An infinite loop is considered a side effect.  */
2550 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2551 	TREE_SIDE_EFFECTS (*expr_p) = 0;
2552     }
2553 
2554   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2555      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2556      form and delegate the creation of a GIMPLE_CALL to
2557      gimplify_modify_expr.  This is always possible because when
2558      WANT_VALUE is true, the caller wants the result of this call into
2559      a temporary, which means that we will emit an INIT_EXPR in
2560      internal_get_tmp_var which will then be handled by
2561      gimplify_modify_expr.  */
2562   if (!want_value)
2563     {
2564       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2565 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
2566       gimple_stmt_iterator gsi;
2567       call = gimple_build_call_from_tree (*expr_p);
2568       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2569       gimplify_seq_add_stmt (pre_p, call);
2570       gsi = gsi_last (*pre_p);
2571       fold_stmt (&gsi);
2572       *expr_p = NULL_TREE;
2573     }
2574   else
2575     /* Remember the original function type.  */
2576     CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2577 				     CALL_EXPR_FN (*expr_p));
2578 
2579   return ret;
2580 }
2581 
2582 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2583    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2584 
2585    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2586    condition is true or false, respectively.  If null, we should generate
2587    our own to skip over the evaluation of this specific expression.
2588 
2589    LOCUS is the source location of the COND_EXPR.
2590 
2591    This function is the tree equivalent of do_jump.
2592 
2593    shortcut_cond_r should only be called by shortcut_cond_expr.  */
2594 
2595 static tree
2596 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2597 		 location_t locus)
2598 {
2599   tree local_label = NULL_TREE;
2600   tree t, expr = NULL;
2601 
2602   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2603      retain the shortcut semantics.  Just insert the gotos here;
2604      shortcut_cond_expr will append the real blocks later.  */
2605   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2606     {
2607       location_t new_locus;
2608 
2609       /* Turn if (a && b) into
2610 
2611 	 if (a); else goto no;
2612 	 if (b) goto yes; else goto no;
2613 	 (no:) */
2614 
2615       if (false_label_p == NULL)
2616 	false_label_p = &local_label;
2617 
2618       /* Keep the original source location on the first 'if'.  */
2619       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2620       append_to_statement_list (t, &expr);
2621 
2622       /* Set the source location of the && on the second 'if'.  */
2623       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2624       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2625 			   new_locus);
2626       append_to_statement_list (t, &expr);
2627     }
2628   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2629     {
2630       location_t new_locus;
2631 
2632       /* Turn if (a || b) into
2633 
2634 	 if (a) goto yes;
2635 	 if (b) goto yes; else goto no;
2636 	 (yes:) */
2637 
2638       if (true_label_p == NULL)
2639 	true_label_p = &local_label;
2640 
2641       /* Keep the original source location on the first 'if'.  */
2642       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2643       append_to_statement_list (t, &expr);
2644 
2645       /* Set the source location of the || on the second 'if'.  */
2646       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2647       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2648 			   new_locus);
2649       append_to_statement_list (t, &expr);
2650     }
2651   else if (TREE_CODE (pred) == COND_EXPR
2652 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2653 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2654     {
2655       location_t new_locus;
2656 
2657       /* As long as we're messing with gotos, turn if (a ? b : c) into
2658 	 if (a)
2659 	   if (b) goto yes; else goto no;
2660 	 else
2661 	   if (c) goto yes; else goto no;
2662 
2663 	 Don't do this if one of the arms has void type, which can happen
2664 	 in C++ when the arm is throw.  */
2665 
2666       /* Keep the original source location on the first 'if'.  Set the source
2667 	 location of the ? on the second 'if'.  */
2668       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2669       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2670 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2671 				      false_label_p, locus),
2672 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2673 				      false_label_p, new_locus));
2674     }
2675   else
2676     {
2677       expr = build3 (COND_EXPR, void_type_node, pred,
2678 		     build_and_jump (true_label_p),
2679 		     build_and_jump (false_label_p));
2680       SET_EXPR_LOCATION (expr, locus);
2681     }
2682 
2683   if (local_label)
2684     {
2685       t = build1 (LABEL_EXPR, void_type_node, local_label);
2686       append_to_statement_list (t, &expr);
2687     }
2688 
2689   return expr;
2690 }
2691 
2692 /* Given a conditional expression EXPR with short-circuit boolean
2693    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2694    predicate appart into the equivalent sequence of conditionals.  */
2695 
2696 static tree
2697 shortcut_cond_expr (tree expr)
2698 {
2699   tree pred = TREE_OPERAND (expr, 0);
2700   tree then_ = TREE_OPERAND (expr, 1);
2701   tree else_ = TREE_OPERAND (expr, 2);
2702   tree true_label, false_label, end_label, t;
2703   tree *true_label_p;
2704   tree *false_label_p;
2705   bool emit_end, emit_false, jump_over_else;
2706   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2707   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2708 
2709   /* First do simple transformations.  */
2710   if (!else_se)
2711     {
2712       /* If there is no 'else', turn
2713 	   if (a && b) then c
2714 	 into
2715 	   if (a) if (b) then c.  */
2716       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2717 	{
2718 	  /* Keep the original source location on the first 'if'.  */
2719 	  location_t locus = EXPR_LOC_OR_HERE (expr);
2720 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2721 	  /* Set the source location of the && on the second 'if'.  */
2722 	  if (EXPR_HAS_LOCATION (pred))
2723 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2724 	  then_ = shortcut_cond_expr (expr);
2725 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
2726 	  pred = TREE_OPERAND (pred, 0);
2727 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2728 	  SET_EXPR_LOCATION (expr, locus);
2729 	}
2730     }
2731 
2732   if (!then_se)
2733     {
2734       /* If there is no 'then', turn
2735 	   if (a || b); else d
2736 	 into
2737 	   if (a); else if (b); else d.  */
2738       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2739 	{
2740 	  /* Keep the original source location on the first 'if'.  */
2741 	  location_t locus = EXPR_LOC_OR_HERE (expr);
2742 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2743 	  /* Set the source location of the || on the second 'if'.  */
2744 	  if (EXPR_HAS_LOCATION (pred))
2745 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2746 	  else_ = shortcut_cond_expr (expr);
2747 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
2748 	  pred = TREE_OPERAND (pred, 0);
2749 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2750 	  SET_EXPR_LOCATION (expr, locus);
2751 	}
2752     }
2753 
2754   /* If we're done, great.  */
2755   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2756       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2757     return expr;
2758 
2759   /* Otherwise we need to mess with gotos.  Change
2760        if (a) c; else d;
2761      to
2762        if (a); else goto no;
2763        c; goto end;
2764        no: d; end:
2765      and recursively gimplify the condition.  */
2766 
2767   true_label = false_label = end_label = NULL_TREE;
2768 
2769   /* If our arms just jump somewhere, hijack those labels so we don't
2770      generate jumps to jumps.  */
2771 
2772   if (then_
2773       && TREE_CODE (then_) == GOTO_EXPR
2774       && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2775     {
2776       true_label = GOTO_DESTINATION (then_);
2777       then_ = NULL;
2778       then_se = false;
2779     }
2780 
2781   if (else_
2782       && TREE_CODE (else_) == GOTO_EXPR
2783       && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2784     {
2785       false_label = GOTO_DESTINATION (else_);
2786       else_ = NULL;
2787       else_se = false;
2788     }
2789 
2790   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2791   if (true_label)
2792     true_label_p = &true_label;
2793   else
2794     true_label_p = NULL;
2795 
2796   /* The 'else' branch also needs a label if it contains interesting code.  */
2797   if (false_label || else_se)
2798     false_label_p = &false_label;
2799   else
2800     false_label_p = NULL;
2801 
2802   /* If there was nothing else in our arms, just forward the label(s).  */
2803   if (!then_se && !else_se)
2804     return shortcut_cond_r (pred, true_label_p, false_label_p,
2805 			    EXPR_LOC_OR_HERE (expr));
2806 
2807   /* If our last subexpression already has a terminal label, reuse it.  */
2808   if (else_se)
2809     t = expr_last (else_);
2810   else if (then_se)
2811     t = expr_last (then_);
2812   else
2813     t = NULL;
2814   if (t && TREE_CODE (t) == LABEL_EXPR)
2815     end_label = LABEL_EXPR_LABEL (t);
2816 
2817   /* If we don't care about jumping to the 'else' branch, jump to the end
2818      if the condition is false.  */
2819   if (!false_label_p)
2820     false_label_p = &end_label;
2821 
2822   /* We only want to emit these labels if we aren't hijacking them.  */
2823   emit_end = (end_label == NULL_TREE);
2824   emit_false = (false_label == NULL_TREE);
2825 
2826   /* We only emit the jump over the else clause if we have to--if the
2827      then clause may fall through.  Otherwise we can wind up with a
2828      useless jump and a useless label at the end of gimplified code,
2829      which will cause us to think that this conditional as a whole
2830      falls through even if it doesn't.  If we then inline a function
2831      which ends with such a condition, that can cause us to issue an
2832      inappropriate warning about control reaching the end of a
2833      non-void function.  */
2834   jump_over_else = block_may_fallthru (then_);
2835 
2836   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2837 			  EXPR_LOC_OR_HERE (expr));
2838 
2839   expr = NULL;
2840   append_to_statement_list (pred, &expr);
2841 
2842   append_to_statement_list (then_, &expr);
2843   if (else_se)
2844     {
2845       if (jump_over_else)
2846 	{
2847 	  tree last = expr_last (expr);
2848 	  t = build_and_jump (&end_label);
2849 	  if (EXPR_HAS_LOCATION (last))
2850 	    SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2851 	  append_to_statement_list (t, &expr);
2852 	}
2853       if (emit_false)
2854 	{
2855 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
2856 	  append_to_statement_list (t, &expr);
2857 	}
2858       append_to_statement_list (else_, &expr);
2859     }
2860   if (emit_end && end_label)
2861     {
2862       t = build1 (LABEL_EXPR, void_type_node, end_label);
2863       append_to_statement_list (t, &expr);
2864     }
2865 
2866   return expr;
2867 }
2868 
2869 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2870 
2871 tree
2872 gimple_boolify (tree expr)
2873 {
2874   tree type = TREE_TYPE (expr);
2875   location_t loc = EXPR_LOCATION (expr);
2876 
2877   if (TREE_CODE (expr) == NE_EXPR
2878       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2879       && integer_zerop (TREE_OPERAND (expr, 1)))
2880     {
2881       tree call = TREE_OPERAND (expr, 0);
2882       tree fn = get_callee_fndecl (call);
2883 
2884       /* For __builtin_expect ((long) (x), y) recurse into x as well
2885 	 if x is truth_value_p.  */
2886       if (fn
2887 	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2888 	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2889 	  && call_expr_nargs (call) == 2)
2890 	{
2891 	  tree arg = CALL_EXPR_ARG (call, 0);
2892 	  if (arg)
2893 	    {
2894 	      if (TREE_CODE (arg) == NOP_EXPR
2895 		  && TREE_TYPE (arg) == TREE_TYPE (call))
2896 		arg = TREE_OPERAND (arg, 0);
2897 	      if (truth_value_p (TREE_CODE (arg)))
2898 		{
2899 		  arg = gimple_boolify (arg);
2900 		  CALL_EXPR_ARG (call, 0)
2901 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2902 		}
2903 	    }
2904 	}
2905     }
2906 
2907   switch (TREE_CODE (expr))
2908     {
2909     case TRUTH_AND_EXPR:
2910     case TRUTH_OR_EXPR:
2911     case TRUTH_XOR_EXPR:
2912     case TRUTH_ANDIF_EXPR:
2913     case TRUTH_ORIF_EXPR:
2914       /* Also boolify the arguments of truth exprs.  */
2915       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2916       /* FALLTHRU */
2917 
2918     case TRUTH_NOT_EXPR:
2919       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2920 
2921       /* These expressions always produce boolean results.  */
2922       if (TREE_CODE (type) != BOOLEAN_TYPE)
2923 	TREE_TYPE (expr) = boolean_type_node;
2924       return expr;
2925 
2926     default:
2927       if (COMPARISON_CLASS_P (expr))
2928 	{
2929 	  /* There expressions always prduce boolean results.  */
2930 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2931 	    TREE_TYPE (expr) = boolean_type_node;
2932 	  return expr;
2933 	}
2934       /* Other expressions that get here must have boolean values, but
2935 	 might need to be converted to the appropriate mode.  */
2936       if (TREE_CODE (type) == BOOLEAN_TYPE)
2937 	return expr;
2938       return fold_convert_loc (loc, boolean_type_node, expr);
2939     }
2940 }
2941 
2942 /* Given a conditional expression *EXPR_P without side effects, gimplify
2943    its operands.  New statements are inserted to PRE_P.  */
2944 
2945 static enum gimplify_status
2946 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2947 {
2948   tree expr = *expr_p, cond;
2949   enum gimplify_status ret, tret;
2950   enum tree_code code;
2951 
2952   cond = gimple_boolify (COND_EXPR_COND (expr));
2953 
2954   /* We need to handle && and || specially, as their gimplification
2955      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2956   code = TREE_CODE (cond);
2957   if (code == TRUTH_ANDIF_EXPR)
2958     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2959   else if (code == TRUTH_ORIF_EXPR)
2960     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2961   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2962   COND_EXPR_COND (*expr_p) = cond;
2963 
2964   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2965 				   is_gimple_val, fb_rvalue);
2966   ret = MIN (ret, tret);
2967   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2968 				   is_gimple_val, fb_rvalue);
2969 
2970   return MIN (ret, tret);
2971 }
2972 
2973 /* Return true if evaluating EXPR could trap.
2974    EXPR is GENERIC, while tree_could_trap_p can be called
2975    only on GIMPLE.  */
2976 
2977 static bool
2978 generic_expr_could_trap_p (tree expr)
2979 {
2980   unsigned i, n;
2981 
2982   if (!expr || is_gimple_val (expr))
2983     return false;
2984 
2985   if (!EXPR_P (expr) || tree_could_trap_p (expr))
2986     return true;
2987 
2988   n = TREE_OPERAND_LENGTH (expr);
2989   for (i = 0; i < n; i++)
2990     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2991       return true;
2992 
2993   return false;
2994 }
2995 
2996 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2997     into
2998 
2999     if (p)			if (p)
3000       t1 = a;			  a;
3001     else		or	else
3002       t1 = b;			  b;
3003     t1;
3004 
3005     The second form is used when *EXPR_P is of type void.
3006 
3007     PRE_P points to the list where side effects that must happen before
3008       *EXPR_P should be stored.  */
3009 
3010 static enum gimplify_status
3011 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3012 {
3013   tree expr = *expr_p;
3014   tree type = TREE_TYPE (expr);
3015   location_t loc = EXPR_LOCATION (expr);
3016   tree tmp, arm1, arm2;
3017   enum gimplify_status ret;
3018   tree label_true, label_false, label_cont;
3019   bool have_then_clause_p, have_else_clause_p;
3020   gimple gimple_cond;
3021   enum tree_code pred_code;
3022   gimple_seq seq = NULL;
3023 
3024   /* If this COND_EXPR has a value, copy the values into a temporary within
3025      the arms.  */
3026   if (!VOID_TYPE_P (type))
3027     {
3028       tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3029       tree result;
3030 
3031       /* If either an rvalue is ok or we do not require an lvalue, create the
3032 	 temporary.  But we cannot do that if the type is addressable.  */
3033       if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3034 	  && !TREE_ADDRESSABLE (type))
3035 	{
3036 	  if (gimplify_ctxp->allow_rhs_cond_expr
3037 	      /* If either branch has side effects or could trap, it can't be
3038 		 evaluated unconditionally.  */
3039 	      && !TREE_SIDE_EFFECTS (then_)
3040 	      && !generic_expr_could_trap_p (then_)
3041 	      && !TREE_SIDE_EFFECTS (else_)
3042 	      && !generic_expr_could_trap_p (else_))
3043 	    return gimplify_pure_cond_expr (expr_p, pre_p);
3044 
3045 	  tmp = create_tmp_var (type, "iftmp");
3046 	  result = tmp;
3047 	}
3048 
3049       /* Otherwise, only create and copy references to the values.  */
3050       else
3051 	{
3052 	  type = build_pointer_type (type);
3053 
3054 	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
3055 	    then_ = build_fold_addr_expr_loc (loc, then_);
3056 
3057 	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
3058 	    else_ = build_fold_addr_expr_loc (loc, else_);
3059 
3060 	  expr
3061 	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
3062 
3063 	  tmp = create_tmp_var (type, "iftmp");
3064 	  result = build_simple_mem_ref_loc (loc, tmp);
3065 	}
3066 
3067       /* Build the new then clause, `tmp = then_;'.  But don't build the
3068 	 assignment if the value is void; in C++ it can be if it's a throw.  */
3069       if (!VOID_TYPE_P (TREE_TYPE (then_)))
3070 	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
3071 
3072       /* Similarly, build the new else clause, `tmp = else_;'.  */
3073       if (!VOID_TYPE_P (TREE_TYPE (else_)))
3074 	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
3075 
3076       TREE_TYPE (expr) = void_type_node;
3077       recalculate_side_effects (expr);
3078 
3079       /* Move the COND_EXPR to the prequeue.  */
3080       gimplify_stmt (&expr, pre_p);
3081 
3082       *expr_p = result;
3083       return GS_ALL_DONE;
3084     }
3085 
3086   /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
3087   STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
3088   if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
3089     gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
3090 
3091   /* Make sure the condition has BOOLEAN_TYPE.  */
3092   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3093 
3094   /* Break apart && and || conditions.  */
3095   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
3096       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
3097     {
3098       expr = shortcut_cond_expr (expr);
3099 
3100       if (expr != *expr_p)
3101 	{
3102 	  *expr_p = expr;
3103 
3104 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
3105 	     form properly, as cleanups might cause the target labels to be
3106 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
3107 	     set up a conditional context.  */
3108 	  gimple_push_condition ();
3109 	  gimplify_stmt (expr_p, &seq);
3110 	  gimple_pop_condition (pre_p);
3111 	  gimple_seq_add_seq (pre_p, seq);
3112 
3113 	  return GS_ALL_DONE;
3114 	}
3115     }
3116 
3117   /* Now do the normal gimplification.  */
3118 
3119   /* Gimplify condition.  */
3120   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3121 		       fb_rvalue);
3122   if (ret == GS_ERROR)
3123     return GS_ERROR;
3124   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3125 
3126   gimple_push_condition ();
3127 
3128   have_then_clause_p = have_else_clause_p = false;
3129   if (TREE_OPERAND (expr, 1) != NULL
3130       && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3131       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3132       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3133 	  == current_function_decl)
3134       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3135 	 have different locations, otherwise we end up with incorrect
3136 	 location information on the branches.  */
3137       && (optimize
3138 	  || !EXPR_HAS_LOCATION (expr)
3139 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3140 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3141     {
3142       label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3143       have_then_clause_p = true;
3144     }
3145   else
3146     label_true = create_artificial_label (UNKNOWN_LOCATION);
3147   if (TREE_OPERAND (expr, 2) != NULL
3148       && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3149       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3150       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3151 	  == current_function_decl)
3152       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3153 	 have different locations, otherwise we end up with incorrect
3154 	 location information on the branches.  */
3155       && (optimize
3156 	  || !EXPR_HAS_LOCATION (expr)
3157 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3158 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3159     {
3160       label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3161       have_else_clause_p = true;
3162     }
3163   else
3164     label_false = create_artificial_label (UNKNOWN_LOCATION);
3165 
3166   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3167 				 &arm2);
3168 
3169   gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3170                                    label_false);
3171 
3172   gimplify_seq_add_stmt (&seq, gimple_cond);
3173   label_cont = NULL_TREE;
3174   if (!have_then_clause_p)
3175     {
3176       /* For if (...) {} else { code; } put label_true after
3177 	 the else block.  */
3178       if (TREE_OPERAND (expr, 1) == NULL_TREE
3179 	  && !have_else_clause_p
3180 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
3181 	label_cont = label_true;
3182       else
3183 	{
3184 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3185 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3186 	  /* For if (...) { code; } else {} or
3187 	     if (...) { code; } else goto label; or
3188 	     if (...) { code; return; } else { ... }
3189 	     label_cont isn't needed.  */
3190 	  if (!have_else_clause_p
3191 	      && TREE_OPERAND (expr, 2) != NULL_TREE
3192 	      && gimple_seq_may_fallthru (seq))
3193 	    {
3194 	      gimple g;
3195 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
3196 
3197 	      g = gimple_build_goto (label_cont);
3198 
3199 	      /* GIMPLE_COND's are very low level; they have embedded
3200 		 gotos.  This particular embedded goto should not be marked
3201 		 with the location of the original COND_EXPR, as it would
3202 		 correspond to the COND_EXPR's condition, not the ELSE or the
3203 		 THEN arms.  To avoid marking it with the wrong location, flag
3204 		 it as "no location".  */
3205 	      gimple_set_do_not_emit_location (g);
3206 
3207 	      gimplify_seq_add_stmt (&seq, g);
3208 	    }
3209 	}
3210     }
3211   if (!have_else_clause_p)
3212     {
3213       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3214       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3215     }
3216   if (label_cont)
3217     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3218 
3219   gimple_pop_condition (pre_p);
3220   gimple_seq_add_seq (pre_p, seq);
3221 
3222   if (ret == GS_ERROR)
3223     ; /* Do nothing.  */
3224   else if (have_then_clause_p || have_else_clause_p)
3225     ret = GS_ALL_DONE;
3226   else
3227     {
3228       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3229       expr = TREE_OPERAND (expr, 0);
3230       gimplify_stmt (&expr, pre_p);
3231     }
3232 
3233   *expr_p = NULL;
3234   return ret;
3235 }
3236 
3237 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3238    to be marked addressable.
3239 
3240    We cannot rely on such an expression being directly markable if a temporary
3241    has been created by the gimplification.  In this case, we create another
3242    temporary and initialize it with a copy, which will become a store after we
3243    mark it addressable.  This can happen if the front-end passed us something
3244    that it could not mark addressable yet, like a Fortran pass-by-reference
3245    parameter (int) floatvar.  */
3246 
3247 static void
3248 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3249 {
3250   while (handled_component_p (*expr_p))
3251     expr_p = &TREE_OPERAND (*expr_p, 0);
3252   if (is_gimple_reg (*expr_p))
3253     *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3254 }
3255 
3256 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3257    a call to __builtin_memcpy.  */
3258 
3259 static enum gimplify_status
3260 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3261     				gimple_seq *seq_p)
3262 {
3263   tree t, to, to_ptr, from, from_ptr;
3264   gimple gs;
3265   location_t loc = EXPR_LOCATION (*expr_p);
3266 
3267   to = TREE_OPERAND (*expr_p, 0);
3268   from = TREE_OPERAND (*expr_p, 1);
3269 
3270   /* Mark the RHS addressable.  Beware that it may not be possible to do so
3271      directly if a temporary has been created by the gimplification.  */
3272   prepare_gimple_addressable (&from, seq_p);
3273 
3274   mark_addressable (from);
3275   from_ptr = build_fold_addr_expr_loc (loc, from);
3276   gimplify_arg (&from_ptr, seq_p, loc);
3277 
3278   mark_addressable (to);
3279   to_ptr = build_fold_addr_expr_loc (loc, to);
3280   gimplify_arg (&to_ptr, seq_p, loc);
3281 
3282   t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3283 
3284   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3285 
3286   if (want_value)
3287     {
3288       /* tmp = memcpy() */
3289       t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3290       gimple_call_set_lhs (gs, t);
3291       gimplify_seq_add_stmt (seq_p, gs);
3292 
3293       *expr_p = build_simple_mem_ref (t);
3294       return GS_ALL_DONE;
3295     }
3296 
3297   gimplify_seq_add_stmt (seq_p, gs);
3298   *expr_p = NULL;
3299   return GS_ALL_DONE;
3300 }
3301 
3302 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3303    a call to __builtin_memset.  In this case we know that the RHS is
3304    a CONSTRUCTOR with an empty element list.  */
3305 
3306 static enum gimplify_status
3307 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3308     				gimple_seq *seq_p)
3309 {
3310   tree t, from, to, to_ptr;
3311   gimple gs;
3312   location_t loc = EXPR_LOCATION (*expr_p);
3313 
3314   /* Assert our assumptions, to abort instead of producing wrong code
3315      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3316      not be immediately exposed.  */
3317   from = TREE_OPERAND (*expr_p, 1);
3318   if (TREE_CODE (from) == WITH_SIZE_EXPR)
3319     from = TREE_OPERAND (from, 0);
3320 
3321   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3322 	      && VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (from)));
3323 
3324   /* Now proceed.  */
3325   to = TREE_OPERAND (*expr_p, 0);
3326 
3327   to_ptr = build_fold_addr_expr_loc (loc, to);
3328   gimplify_arg (&to_ptr, seq_p, loc);
3329   t = builtin_decl_implicit (BUILT_IN_MEMSET);
3330 
3331   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3332 
3333   if (want_value)
3334     {
3335       /* tmp = memset() */
3336       t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3337       gimple_call_set_lhs (gs, t);
3338       gimplify_seq_add_stmt (seq_p, gs);
3339 
3340       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3341       return GS_ALL_DONE;
3342     }
3343 
3344   gimplify_seq_add_stmt (seq_p, gs);
3345   *expr_p = NULL;
3346   return GS_ALL_DONE;
3347 }
3348 
3349 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3350    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3351    assignment.  Return non-null if we detect a potential overlap.  */
3352 
3353 struct gimplify_init_ctor_preeval_data
3354 {
3355   /* The base decl of the lhs object.  May be NULL, in which case we
3356      have to assume the lhs is indirect.  */
3357   tree lhs_base_decl;
3358 
3359   /* The alias set of the lhs object.  */
3360   alias_set_type lhs_alias_set;
3361 };
3362 
3363 static tree
3364 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3365 {
3366   struct gimplify_init_ctor_preeval_data *data
3367     = (struct gimplify_init_ctor_preeval_data *) xdata;
3368   tree t = *tp;
3369 
3370   /* If we find the base object, obviously we have overlap.  */
3371   if (data->lhs_base_decl == t)
3372     return t;
3373 
3374   /* If the constructor component is indirect, determine if we have a
3375      potential overlap with the lhs.  The only bits of information we
3376      have to go on at this point are addressability and alias sets.  */
3377   if ((INDIRECT_REF_P (t)
3378        || TREE_CODE (t) == MEM_REF)
3379       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3380       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3381     return t;
3382 
3383   /* If the constructor component is a call, determine if it can hide a
3384      potential overlap with the lhs through an INDIRECT_REF like above.
3385      ??? Ugh - this is completely broken.  In fact this whole analysis
3386      doesn't look conservative.  */
3387   if (TREE_CODE (t) == CALL_EXPR)
3388     {
3389       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3390 
3391       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3392 	if (POINTER_TYPE_P (TREE_VALUE (type))
3393 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3394 	    && alias_sets_conflict_p (data->lhs_alias_set,
3395 				      get_alias_set
3396 				        (TREE_TYPE (TREE_VALUE (type)))))
3397 	  return t;
3398     }
3399 
3400   if (IS_TYPE_OR_DECL_P (t))
3401     *walk_subtrees = 0;
3402   return NULL;
3403 }
3404 
3405 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3406    force values that overlap with the lhs (as described by *DATA)
3407    into temporaries.  */
3408 
3409 static void
3410 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3411 			    struct gimplify_init_ctor_preeval_data *data)
3412 {
3413   enum gimplify_status one;
3414 
3415   /* If the value is constant, then there's nothing to pre-evaluate.  */
3416   if (TREE_CONSTANT (*expr_p))
3417     {
3418       /* Ensure it does not have side effects, it might contain a reference to
3419 	 the object we're initializing.  */
3420       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3421       return;
3422     }
3423 
3424   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3425   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3426     return;
3427 
3428   /* Recurse for nested constructors.  */
3429   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3430     {
3431       unsigned HOST_WIDE_INT ix;
3432       constructor_elt *ce;
3433       VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
3434 
3435       FOR_EACH_VEC_ELT (constructor_elt, v, ix, ce)
3436 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3437 
3438       return;
3439     }
3440 
3441   /* If this is a variable sized type, we must remember the size.  */
3442   maybe_with_size_expr (expr_p);
3443 
3444   /* Gimplify the constructor element to something appropriate for the rhs
3445      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3446      the gimplifier will consider this a store to memory.  Doing this
3447      gimplification now means that we won't have to deal with complicated
3448      language-specific trees, nor trees like SAVE_EXPR that can induce
3449      exponential search behavior.  */
3450   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3451   if (one == GS_ERROR)
3452     {
3453       *expr_p = NULL;
3454       return;
3455     }
3456 
3457   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3458      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3459      always be true for all scalars, since is_gimple_mem_rhs insists on a
3460      temporary variable for them.  */
3461   if (DECL_P (*expr_p))
3462     return;
3463 
3464   /* If this is of variable size, we have no choice but to assume it doesn't
3465      overlap since we can't make a temporary for it.  */
3466   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3467     return;
3468 
3469   /* Otherwise, we must search for overlap ...  */
3470   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3471     return;
3472 
3473   /* ... and if found, force the value into a temporary.  */
3474   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3475 }
3476 
3477 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3478    a RANGE_EXPR in a CONSTRUCTOR for an array.
3479 
3480       var = lower;
3481     loop_entry:
3482       object[var] = value;
3483       if (var == upper)
3484 	goto loop_exit;
3485       var = var + 1;
3486       goto loop_entry;
3487     loop_exit:
3488 
3489    We increment var _after_ the loop exit check because we might otherwise
3490    fail if upper == TYPE_MAX_VALUE (type for upper).
3491 
3492    Note that we never have to deal with SAVE_EXPRs here, because this has
3493    already been taken care of for us, in gimplify_init_ctor_preeval().  */
3494 
3495 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
3496 				     gimple_seq *, bool);
3497 
3498 static void
3499 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3500 			       tree value, tree array_elt_type,
3501 			       gimple_seq *pre_p, bool cleared)
3502 {
3503   tree loop_entry_label, loop_exit_label, fall_thru_label;
3504   tree var, var_type, cref, tmp;
3505 
3506   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3507   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3508   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3509 
3510   /* Create and initialize the index variable.  */
3511   var_type = TREE_TYPE (upper);
3512   var = create_tmp_var (var_type, NULL);
3513   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3514 
3515   /* Add the loop entry label.  */
3516   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3517 
3518   /* Build the reference.  */
3519   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3520 		 var, NULL_TREE, NULL_TREE);
3521 
3522   /* If we are a constructor, just call gimplify_init_ctor_eval to do
3523      the store.  Otherwise just assign value to the reference.  */
3524 
3525   if (TREE_CODE (value) == CONSTRUCTOR)
3526     /* NB we might have to call ourself recursively through
3527        gimplify_init_ctor_eval if the value is a constructor.  */
3528     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3529 			     pre_p, cleared);
3530   else
3531     gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3532 
3533   /* We exit the loop when the index var is equal to the upper bound.  */
3534   gimplify_seq_add_stmt (pre_p,
3535 			 gimple_build_cond (EQ_EXPR, var, upper,
3536 					    loop_exit_label, fall_thru_label));
3537 
3538   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3539 
3540   /* Otherwise, increment the index var...  */
3541   tmp = build2 (PLUS_EXPR, var_type, var,
3542 		fold_convert (var_type, integer_one_node));
3543   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3544 
3545   /* ...and jump back to the loop entry.  */
3546   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3547 
3548   /* Add the loop exit label.  */
3549   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3550 }
3551 
3552 /* Return true if FDECL is accessing a field that is zero sized.  */
3553 
3554 static bool
3555 zero_sized_field_decl (const_tree fdecl)
3556 {
3557   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3558       && integer_zerop (DECL_SIZE (fdecl)))
3559     return true;
3560   return false;
3561 }
3562 
3563 /* Return true if TYPE is zero sized.  */
3564 
3565 static bool
3566 zero_sized_type (const_tree type)
3567 {
3568   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3569       && integer_zerop (TYPE_SIZE (type)))
3570     return true;
3571   return false;
3572 }
3573 
3574 /* A subroutine of gimplify_init_constructor.  Generate individual
3575    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3576    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3577    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3578    zeroed first.  */
3579 
3580 static void
3581 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
3582 			 gimple_seq *pre_p, bool cleared)
3583 {
3584   tree array_elt_type = NULL;
3585   unsigned HOST_WIDE_INT ix;
3586   tree purpose, value;
3587 
3588   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3589     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3590 
3591   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3592     {
3593       tree cref;
3594 
3595       /* NULL values are created above for gimplification errors.  */
3596       if (value == NULL)
3597 	continue;
3598 
3599       if (cleared && initializer_zerop (value))
3600 	continue;
3601 
3602       /* ??? Here's to hoping the front end fills in all of the indices,
3603 	 so we don't have to figure out what's missing ourselves.  */
3604       gcc_assert (purpose);
3605 
3606       /* Skip zero-sized fields, unless value has side-effects.  This can
3607 	 happen with calls to functions returning a zero-sized type, which
3608 	 we shouldn't discard.  As a number of downstream passes don't
3609 	 expect sets of zero-sized fields, we rely on the gimplification of
3610 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
3611       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3612 	continue;
3613 
3614       /* If we have a RANGE_EXPR, we have to build a loop to assign the
3615 	 whole range.  */
3616       if (TREE_CODE (purpose) == RANGE_EXPR)
3617 	{
3618 	  tree lower = TREE_OPERAND (purpose, 0);
3619 	  tree upper = TREE_OPERAND (purpose, 1);
3620 
3621 	  /* If the lower bound is equal to upper, just treat it as if
3622 	     upper was the index.  */
3623 	  if (simple_cst_equal (lower, upper))
3624 	    purpose = upper;
3625 	  else
3626 	    {
3627 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
3628 					     array_elt_type, pre_p, cleared);
3629 	      continue;
3630 	    }
3631 	}
3632 
3633       if (array_elt_type)
3634 	{
3635 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
3636 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
3637 	    purpose
3638 	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3639 			      purpose);
3640 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3641 			 purpose, NULL_TREE, NULL_TREE);
3642 	}
3643       else
3644 	{
3645 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3646 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3647 			 unshare_expr (object), purpose, NULL_TREE);
3648 	}
3649 
3650       if (TREE_CODE (value) == CONSTRUCTOR
3651 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3652 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3653 				 pre_p, cleared);
3654       else
3655 	{
3656 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3657 	  gimplify_and_add (init, pre_p);
3658 	  ggc_free (init);
3659 	}
3660     }
3661 }
3662 
3663 /* Return the appropriate RHS predicate for this LHS.  */
3664 
3665 gimple_predicate
3666 rhs_predicate_for (tree lhs)
3667 {
3668   if (is_gimple_reg (lhs))
3669     return is_gimple_reg_rhs_or_call;
3670   else
3671     return is_gimple_mem_rhs_or_call;
3672 }
3673 
3674 /* Gimplify a C99 compound literal expression.  This just means adding
3675    the DECL_EXPR before the current statement and using its anonymous
3676    decl instead.  */
3677 
3678 static enum gimplify_status
3679 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p)
3680 {
3681   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3682   tree decl = DECL_EXPR_DECL (decl_s);
3683   /* Mark the decl as addressable if the compound literal
3684      expression is addressable now, otherwise it is marked too late
3685      after we gimplify the initialization expression.  */
3686   if (TREE_ADDRESSABLE (*expr_p))
3687     TREE_ADDRESSABLE (decl) = 1;
3688 
3689   /* Preliminarily mark non-addressed complex variables as eligible
3690      for promotion to gimple registers.  We'll transform their uses
3691      as we find them.  */
3692   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3693        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3694       && !TREE_THIS_VOLATILE (decl)
3695       && !needs_to_live_in_memory (decl))
3696     DECL_GIMPLE_REG_P (decl) = 1;
3697 
3698   /* This decl isn't mentioned in the enclosing block, so add it to the
3699      list of temps.  FIXME it seems a bit of a kludge to say that
3700      anonymous artificial vars aren't pushed, but everything else is.  */
3701   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3702     gimple_add_tmp_var (decl);
3703 
3704   gimplify_and_add (decl_s, pre_p);
3705   *expr_p = decl;
3706   return GS_OK;
3707 }
3708 
3709 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3710    return a new CONSTRUCTOR if something changed.  */
3711 
3712 static tree
3713 optimize_compound_literals_in_ctor (tree orig_ctor)
3714 {
3715   tree ctor = orig_ctor;
3716   VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (ctor);
3717   unsigned int idx, num = VEC_length (constructor_elt, elts);
3718 
3719   for (idx = 0; idx < num; idx++)
3720     {
3721       tree value = VEC_index (constructor_elt, elts, idx)->value;
3722       tree newval = value;
3723       if (TREE_CODE (value) == CONSTRUCTOR)
3724 	newval = optimize_compound_literals_in_ctor (value);
3725       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3726 	{
3727 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3728 	  tree decl = DECL_EXPR_DECL (decl_s);
3729 	  tree init = DECL_INITIAL (decl);
3730 
3731 	  if (!TREE_ADDRESSABLE (value)
3732 	      && !TREE_ADDRESSABLE (decl)
3733 	      && init
3734 	      && TREE_CODE (init) == CONSTRUCTOR)
3735 	    newval = optimize_compound_literals_in_ctor (init);
3736 	}
3737       if (newval == value)
3738 	continue;
3739 
3740       if (ctor == orig_ctor)
3741 	{
3742 	  ctor = copy_node (orig_ctor);
3743 	  CONSTRUCTOR_ELTS (ctor) = VEC_copy (constructor_elt, gc, elts);
3744 	  elts = CONSTRUCTOR_ELTS (ctor);
3745 	}
3746       VEC_index (constructor_elt, elts, idx)->value = newval;
3747     }
3748   return ctor;
3749 }
3750 
3751 /* A subroutine of gimplify_modify_expr.  Break out elements of a
3752    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3753 
3754    Note that we still need to clear any elements that don't have explicit
3755    initializers, so if not all elements are initialized we keep the
3756    original MODIFY_EXPR, we just remove all of the constructor elements.
3757 
3758    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3759    GS_ERROR if we would have to create a temporary when gimplifying
3760    this constructor.  Otherwise, return GS_OK.
3761 
3762    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3763 
3764 static enum gimplify_status
3765 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3766 			   bool want_value, bool notify_temp_creation)
3767 {
3768   tree object, ctor, type;
3769   enum gimplify_status ret;
3770   VEC(constructor_elt,gc) *elts;
3771 
3772   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3773 
3774   if (!notify_temp_creation)
3775     {
3776       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3777 			   is_gimple_lvalue, fb_lvalue);
3778       if (ret == GS_ERROR)
3779 	return ret;
3780     }
3781 
3782   object = TREE_OPERAND (*expr_p, 0);
3783   ctor = TREE_OPERAND (*expr_p, 1) =
3784     optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3785   type = TREE_TYPE (ctor);
3786   elts = CONSTRUCTOR_ELTS (ctor);
3787   ret = GS_ALL_DONE;
3788 
3789   switch (TREE_CODE (type))
3790     {
3791     case RECORD_TYPE:
3792     case UNION_TYPE:
3793     case QUAL_UNION_TYPE:
3794     case ARRAY_TYPE:
3795       {
3796 	struct gimplify_init_ctor_preeval_data preeval_data;
3797 	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3798 	bool cleared, complete_p, valid_const_initializer;
3799 
3800 	/* Aggregate types must lower constructors to initialization of
3801 	   individual elements.  The exception is that a CONSTRUCTOR node
3802 	   with no elements indicates zero-initialization of the whole.  */
3803 	if (VEC_empty (constructor_elt, elts))
3804 	  {
3805 	    if (notify_temp_creation)
3806 	      return GS_OK;
3807 	    break;
3808 	  }
3809 
3810 	/* Fetch information about the constructor to direct later processing.
3811 	   We might want to make static versions of it in various cases, and
3812 	   can only do so if it known to be a valid constant initializer.  */
3813 	valid_const_initializer
3814 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
3815 				      &num_ctor_elements, &complete_p);
3816 
3817 	/* If a const aggregate variable is being initialized, then it
3818 	   should never be a lose to promote the variable to be static.  */
3819 	if (valid_const_initializer
3820 	    && num_nonzero_elements > 1
3821 	    && TREE_READONLY (object)
3822 	    && TREE_CODE (object) == VAR_DECL
3823 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3824 	  {
3825 	    if (notify_temp_creation)
3826 	      return GS_ERROR;
3827 	    DECL_INITIAL (object) = ctor;
3828 	    TREE_STATIC (object) = 1;
3829 	    if (!DECL_NAME (object))
3830 	      DECL_NAME (object) = create_tmp_var_name ("C");
3831 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3832 
3833 	    /* ??? C++ doesn't automatically append a .<number> to the
3834 	       assembler name, and even when it does, it looks a FE private
3835 	       data structures to figure out what that number should be,
3836 	       which are not set for this variable.  I suppose this is
3837 	       important for local statics for inline functions, which aren't
3838 	       "local" in the object file sense.  So in order to get a unique
3839 	       TU-local symbol, we must invoke the lhd version now.  */
3840 	    lhd_set_decl_assembler_name (object);
3841 
3842 	    *expr_p = NULL_TREE;
3843 	    break;
3844 	  }
3845 
3846 	/* If there are "lots" of initialized elements, even discounting
3847 	   those that are not address constants (and thus *must* be
3848 	   computed at runtime), then partition the constructor into
3849 	   constant and non-constant parts.  Block copy the constant
3850 	   parts in, then generate code for the non-constant parts.  */
3851 	/* TODO.  There's code in cp/typeck.c to do this.  */
3852 
3853 	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3854 	  /* store_constructor will ignore the clearing of variable-sized
3855 	     objects.  Initializers for such objects must explicitly set
3856 	     every field that needs to be set.  */
3857 	  cleared = false;
3858 	else if (!complete_p)
3859 	  /* If the constructor isn't complete, clear the whole object
3860 	     beforehand.
3861 
3862 	     ??? This ought not to be needed.  For any element not present
3863 	     in the initializer, we should simply set them to zero.  Except
3864 	     we'd need to *find* the elements that are not present, and that
3865 	     requires trickery to avoid quadratic compile-time behavior in
3866 	     large cases or excessive memory use in small cases.  */
3867 	  cleared = true;
3868 	else if (num_ctor_elements - num_nonzero_elements
3869 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3870 		 && num_nonzero_elements < num_ctor_elements / 4)
3871 	  /* If there are "lots" of zeros, it's more efficient to clear
3872 	     the memory and then set the nonzero elements.  */
3873 	  cleared = true;
3874 	else
3875 	  cleared = false;
3876 
3877 	/* If there are "lots" of initialized elements, and all of them
3878 	   are valid address constants, then the entire initializer can
3879 	   be dropped to memory, and then memcpy'd out.  Don't do this
3880 	   for sparse arrays, though, as it's more efficient to follow
3881 	   the standard CONSTRUCTOR behavior of memset followed by
3882 	   individual element initialization.  Also don't do this for small
3883 	   all-zero initializers (which aren't big enough to merit
3884 	   clearing), and don't try to make bitwise copies of
3885 	   TREE_ADDRESSABLE types.  */
3886 	if (valid_const_initializer
3887 	    && !(cleared || num_nonzero_elements == 0)
3888 	    && !TREE_ADDRESSABLE (type))
3889 	  {
3890 	    HOST_WIDE_INT size = int_size_in_bytes (type);
3891 	    unsigned int align;
3892 
3893 	    /* ??? We can still get unbounded array types, at least
3894 	       from the C++ front end.  This seems wrong, but attempt
3895 	       to work around it for now.  */
3896 	    if (size < 0)
3897 	      {
3898 		size = int_size_in_bytes (TREE_TYPE (object));
3899 		if (size >= 0)
3900 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3901 	      }
3902 
3903 	    /* Find the maximum alignment we can assume for the object.  */
3904 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
3905 	    if (DECL_P (object))
3906 	      align = DECL_ALIGN (object);
3907 	    else
3908 	      align = TYPE_ALIGN (type);
3909 
3910 	    if (size > 0
3911 		&& num_nonzero_elements > 1
3912 		&& !can_move_by_pieces (size, align))
3913 	      {
3914 		if (notify_temp_creation)
3915 		  return GS_ERROR;
3916 
3917 		walk_tree (&ctor, force_labels_r, NULL, NULL);
3918 		ctor = tree_output_constant_def (ctor);
3919 		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3920 		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3921 		TREE_OPERAND (*expr_p, 1) = ctor;
3922 
3923 		/* This is no longer an assignment of a CONSTRUCTOR, but
3924 		   we still may have processing to do on the LHS.  So
3925 		   pretend we didn't do anything here to let that happen.  */
3926 		return GS_UNHANDLED;
3927 	      }
3928 	  }
3929 
3930 	/* If the target is volatile, we have non-zero elements and more than
3931 	   one field to assign, initialize the target from a temporary.  */
3932 	if (TREE_THIS_VOLATILE (object)
3933 	    && !TREE_ADDRESSABLE (type)
3934 	    && num_nonzero_elements > 0
3935 	    && VEC_length (constructor_elt, elts) > 1)
3936 	  {
3937 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3938 	    TREE_OPERAND (*expr_p, 0) = temp;
3939 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3940 			      *expr_p,
3941 			      build2 (MODIFY_EXPR, void_type_node,
3942 				      object, temp));
3943 	    return GS_OK;
3944 	  }
3945 
3946 	if (notify_temp_creation)
3947 	  return GS_OK;
3948 
3949 	/* If there are nonzero elements and if needed, pre-evaluate to capture
3950 	   elements overlapping with the lhs into temporaries.  We must do this
3951 	   before clearing to fetch the values before they are zeroed-out.  */
3952 	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3953 	  {
3954 	    preeval_data.lhs_base_decl = get_base_address (object);
3955 	    if (!DECL_P (preeval_data.lhs_base_decl))
3956 	      preeval_data.lhs_base_decl = NULL;
3957 	    preeval_data.lhs_alias_set = get_alias_set (object);
3958 
3959 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3960 					pre_p, post_p, &preeval_data);
3961 	  }
3962 
3963 	if (cleared)
3964 	  {
3965 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
3966 	       Note that we still have to gimplify, in order to handle the
3967 	       case of variable sized types.  Avoid shared tree structures.  */
3968 	    CONSTRUCTOR_ELTS (ctor) = NULL;
3969 	    TREE_SIDE_EFFECTS (ctor) = 0;
3970 	    object = unshare_expr (object);
3971 	    gimplify_stmt (expr_p, pre_p);
3972 	  }
3973 
3974 	/* If we have not block cleared the object, or if there are nonzero
3975 	   elements in the constructor, add assignments to the individual
3976 	   scalar fields of the object.  */
3977 	if (!cleared || num_nonzero_elements > 0)
3978 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3979 
3980 	*expr_p = NULL_TREE;
3981       }
3982       break;
3983 
3984     case COMPLEX_TYPE:
3985       {
3986 	tree r, i;
3987 
3988 	if (notify_temp_creation)
3989 	  return GS_OK;
3990 
3991 	/* Extract the real and imaginary parts out of the ctor.  */
3992 	gcc_assert (VEC_length (constructor_elt, elts) == 2);
3993 	r = VEC_index (constructor_elt, elts, 0)->value;
3994 	i = VEC_index (constructor_elt, elts, 1)->value;
3995 	if (r == NULL || i == NULL)
3996 	  {
3997 	    tree zero = build_zero_cst (TREE_TYPE (type));
3998 	    if (r == NULL)
3999 	      r = zero;
4000 	    if (i == NULL)
4001 	      i = zero;
4002 	  }
4003 
4004 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
4005 	   represent creation of a complex value.  */
4006 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
4007 	  {
4008 	    ctor = build_complex (type, r, i);
4009 	    TREE_OPERAND (*expr_p, 1) = ctor;
4010 	  }
4011 	else
4012 	  {
4013 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
4014 	    TREE_OPERAND (*expr_p, 1) = ctor;
4015 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
4016 				 pre_p,
4017 				 post_p,
4018 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
4019 				 fb_rvalue);
4020 	  }
4021       }
4022       break;
4023 
4024     case VECTOR_TYPE:
4025       {
4026 	unsigned HOST_WIDE_INT ix;
4027 	constructor_elt *ce;
4028 
4029 	if (notify_temp_creation)
4030 	  return GS_OK;
4031 
4032 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
4033 	if (TREE_CONSTANT (ctor))
4034 	  {
4035 	    bool constant_p = true;
4036 	    tree value;
4037 
4038 	    /* Even when ctor is constant, it might contain non-*_CST
4039 	       elements, such as addresses or trapping values like
4040 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
4041 	       in VECTOR_CST nodes.  */
4042 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
4043 	      if (!CONSTANT_CLASS_P (value))
4044 		{
4045 		  constant_p = false;
4046 		  break;
4047 		}
4048 
4049 	    if (constant_p)
4050 	      {
4051 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
4052 		break;
4053 	      }
4054 
4055 	    /* Don't reduce an initializer constant even if we can't
4056 	       make a VECTOR_CST.  It won't do anything for us, and it'll
4057 	       prevent us from representing it as a single constant.  */
4058 	    if (initializer_constant_valid_p (ctor, type))
4059 	      break;
4060 
4061 	    TREE_CONSTANT (ctor) = 0;
4062 	  }
4063 
4064 	/* Vector types use CONSTRUCTOR all the way through gimple
4065 	  compilation as a general initializer.  */
4066 	FOR_EACH_VEC_ELT (constructor_elt, elts, ix, ce)
4067 	  {
4068 	    enum gimplify_status tret;
4069 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
4070 				  fb_rvalue);
4071 	    if (tret == GS_ERROR)
4072 	      ret = GS_ERROR;
4073 	  }
4074 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4075 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4076       }
4077       break;
4078 
4079     default:
4080       /* So how did we get a CONSTRUCTOR for a scalar type?  */
4081       gcc_unreachable ();
4082     }
4083 
4084   if (ret == GS_ERROR)
4085     return GS_ERROR;
4086   else if (want_value)
4087     {
4088       *expr_p = object;
4089       return GS_OK;
4090     }
4091   else
4092     {
4093       /* If we have gimplified both sides of the initializer but have
4094 	 not emitted an assignment, do so now.  */
4095       if (*expr_p)
4096 	{
4097 	  tree lhs = TREE_OPERAND (*expr_p, 0);
4098 	  tree rhs = TREE_OPERAND (*expr_p, 1);
4099 	  gimple init = gimple_build_assign (lhs, rhs);
4100 	  gimplify_seq_add_stmt (pre_p, init);
4101 	  *expr_p = NULL;
4102 	}
4103 
4104       return GS_ALL_DONE;
4105     }
4106 }
4107 
4108 /* Given a pointer value OP0, return a simplified version of an
4109    indirection through OP0, or NULL_TREE if no simplification is
4110    possible.  Note that the resulting type may be different from
4111    the type pointed to in the sense that it is still compatible
4112    from the langhooks point of view. */
4113 
4114 tree
4115 gimple_fold_indirect_ref (tree t)
4116 {
4117   tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4118   tree sub = t;
4119   tree subtype;
4120 
4121   STRIP_NOPS (sub);
4122   subtype = TREE_TYPE (sub);
4123   if (!POINTER_TYPE_P (subtype))
4124     return NULL_TREE;
4125 
4126   if (TREE_CODE (sub) == ADDR_EXPR)
4127     {
4128       tree op = TREE_OPERAND (sub, 0);
4129       tree optype = TREE_TYPE (op);
4130       /* *&p => p */
4131       if (useless_type_conversion_p (type, optype))
4132         return op;
4133 
4134       /* *(foo *)&fooarray => fooarray[0] */
4135       if (TREE_CODE (optype) == ARRAY_TYPE
4136 	  && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4137 	  && useless_type_conversion_p (type, TREE_TYPE (optype)))
4138        {
4139          tree type_domain = TYPE_DOMAIN (optype);
4140          tree min_val = size_zero_node;
4141          if (type_domain && TYPE_MIN_VALUE (type_domain))
4142            min_val = TYPE_MIN_VALUE (type_domain);
4143 	 if (TREE_CODE (min_val) == INTEGER_CST)
4144 	   return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4145        }
4146       /* *(foo *)&complexfoo => __real__ complexfoo */
4147       else if (TREE_CODE (optype) == COMPLEX_TYPE
4148                && useless_type_conversion_p (type, TREE_TYPE (optype)))
4149         return fold_build1 (REALPART_EXPR, type, op);
4150       /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4151       else if (TREE_CODE (optype) == VECTOR_TYPE
4152                && useless_type_conversion_p (type, TREE_TYPE (optype)))
4153         {
4154           tree part_width = TYPE_SIZE (type);
4155           tree index = bitsize_int (0);
4156           return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4157         }
4158     }
4159 
4160   /* *(p + CST) -> ...  */
4161   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4162       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4163     {
4164       tree addr = TREE_OPERAND (sub, 0);
4165       tree off = TREE_OPERAND (sub, 1);
4166       tree addrtype;
4167 
4168       STRIP_NOPS (addr);
4169       addrtype = TREE_TYPE (addr);
4170 
4171       /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4172       if (TREE_CODE (addr) == ADDR_EXPR
4173 	  && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4174 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
4175 	  && host_integerp (off, 1))
4176 	{
4177           unsigned HOST_WIDE_INT offset = tree_low_cst (off, 1);
4178           tree part_width = TYPE_SIZE (type);
4179           unsigned HOST_WIDE_INT part_widthi
4180             = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
4181           unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4182           tree index = bitsize_int (indexi);
4183           if (offset / part_widthi
4184               <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
4185             return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4186                                 part_width, index);
4187 	}
4188 
4189       /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4190       if (TREE_CODE (addr) == ADDR_EXPR
4191 	  && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4192 	  && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4193         {
4194           tree size = TYPE_SIZE_UNIT (type);
4195           if (tree_int_cst_equal (size, off))
4196             return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4197         }
4198 
4199       /* *(p + CST) -> MEM_REF <p, CST>.  */
4200       if (TREE_CODE (addr) != ADDR_EXPR
4201 	  || DECL_P (TREE_OPERAND (addr, 0)))
4202 	return fold_build2 (MEM_REF, type,
4203 			    addr,
4204 			    build_int_cst_wide (ptype,
4205 						TREE_INT_CST_LOW (off),
4206 						TREE_INT_CST_HIGH (off)));
4207     }
4208 
4209   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4210   if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4211       && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4212       && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4213     {
4214       tree type_domain;
4215       tree min_val = size_zero_node;
4216       tree osub = sub;
4217       sub = gimple_fold_indirect_ref (sub);
4218       if (! sub)
4219 	sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4220       type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4221       if (type_domain && TYPE_MIN_VALUE (type_domain))
4222         min_val = TYPE_MIN_VALUE (type_domain);
4223       if (TREE_CODE (min_val) == INTEGER_CST)
4224 	return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4225     }
4226 
4227   return NULL_TREE;
4228 }
4229 
4230 /* Given a pointer value OP0, return a simplified version of an
4231    indirection through OP0, or NULL_TREE if no simplification is
4232    possible.  This may only be applied to a rhs of an expression.
4233    Note that the resulting type may be different from the type pointed
4234    to in the sense that it is still compatible from the langhooks
4235    point of view. */
4236 
4237 static tree
4238 gimple_fold_indirect_ref_rhs (tree t)
4239 {
4240   return gimple_fold_indirect_ref (t);
4241 }
4242 
4243 /* Subroutine of gimplify_modify_expr to do simplifications of
4244    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4245    something changes.  */
4246 
4247 static enum gimplify_status
4248 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4249 			  gimple_seq *pre_p, gimple_seq *post_p,
4250 			  bool want_value)
4251 {
4252   enum gimplify_status ret = GS_UNHANDLED;
4253   bool changed;
4254 
4255   do
4256     {
4257       changed = false;
4258       switch (TREE_CODE (*from_p))
4259 	{
4260 	case VAR_DECL:
4261 	  /* If we're assigning from a read-only variable initialized with
4262 	     a constructor, do the direct assignment from the constructor,
4263 	     but only if neither source nor target are volatile since this
4264 	     latter assignment might end up being done on a per-field basis.  */
4265 	  if (DECL_INITIAL (*from_p)
4266 	      && TREE_READONLY (*from_p)
4267 	      && !TREE_THIS_VOLATILE (*from_p)
4268 	      && !TREE_THIS_VOLATILE (*to_p)
4269 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4270 	    {
4271 	      tree old_from = *from_p;
4272 	      enum gimplify_status subret;
4273 
4274 	      /* Move the constructor into the RHS.  */
4275 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
4276 
4277 	      /* Let's see if gimplify_init_constructor will need to put
4278 		 it in memory.  */
4279 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
4280 						  false, true);
4281 	      if (subret == GS_ERROR)
4282 		{
4283 		  /* If so, revert the change.  */
4284 		  *from_p = old_from;
4285 		}
4286 	      else
4287 		{
4288 		  ret = GS_OK;
4289 		  changed = true;
4290 		}
4291 	    }
4292 	  break;
4293 	case INDIRECT_REF:
4294 	  {
4295 	    /* If we have code like
4296 
4297 	     *(const A*)(A*)&x
4298 
4299 	     where the type of "x" is a (possibly cv-qualified variant
4300 	     of "A"), treat the entire expression as identical to "x".
4301 	     This kind of code arises in C++ when an object is bound
4302 	     to a const reference, and if "x" is a TARGET_EXPR we want
4303 	     to take advantage of the optimization below.  */
4304 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4305 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4306 	    if (t)
4307 	      {
4308 		if (TREE_THIS_VOLATILE (t) != volatile_p)
4309 		  {
4310 		    if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4311 		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4312 						    build_fold_addr_expr (t));
4313 		    if (REFERENCE_CLASS_P (t))
4314 		      TREE_THIS_VOLATILE (t) = volatile_p;
4315 		  }
4316 		*from_p = t;
4317 		ret = GS_OK;
4318 		changed = true;
4319 	      }
4320 	    break;
4321 	  }
4322 
4323 	case TARGET_EXPR:
4324 	  {
4325 	    /* If we are initializing something from a TARGET_EXPR, strip the
4326 	       TARGET_EXPR and initialize it directly, if possible.  This can't
4327 	       be done if the initializer is void, since that implies that the
4328 	       temporary is set in some non-trivial way.
4329 
4330 	       ??? What about code that pulls out the temp and uses it
4331 	       elsewhere? I think that such code never uses the TARGET_EXPR as
4332 	       an initializer.  If I'm wrong, we'll die because the temp won't
4333 	       have any RTL.  In that case, I guess we'll need to replace
4334 	       references somehow.  */
4335 	    tree init = TARGET_EXPR_INITIAL (*from_p);
4336 
4337 	    if (init
4338 		&& !VOID_TYPE_P (TREE_TYPE (init)))
4339 	      {
4340 		*from_p = init;
4341 		ret = GS_OK;
4342 		changed = true;
4343 	      }
4344 	  }
4345 	  break;
4346 
4347 	case COMPOUND_EXPR:
4348 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4349 	     caught.  */
4350 	  gimplify_compound_expr (from_p, pre_p, true);
4351 	  ret = GS_OK;
4352 	  changed = true;
4353 	  break;
4354 
4355 	case CONSTRUCTOR:
4356 	  /* If we already made some changes, let the front end have a
4357 	     crack at this before we break it down.  */
4358 	  if (ret != GS_UNHANDLED)
4359 	    break;
4360 	  /* If we're initializing from a CONSTRUCTOR, break this into
4361 	     individual MODIFY_EXPRs.  */
4362 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4363 					    false);
4364 
4365 	case COND_EXPR:
4366 	  /* If we're assigning to a non-register type, push the assignment
4367 	     down into the branches.  This is mandatory for ADDRESSABLE types,
4368 	     since we cannot generate temporaries for such, but it saves a
4369 	     copy in other cases as well.  */
4370 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4371 	    {
4372 	      /* This code should mirror the code in gimplify_cond_expr. */
4373 	      enum tree_code code = TREE_CODE (*expr_p);
4374 	      tree cond = *from_p;
4375 	      tree result = *to_p;
4376 
4377 	      ret = gimplify_expr (&result, pre_p, post_p,
4378 				   is_gimple_lvalue, fb_lvalue);
4379 	      if (ret != GS_ERROR)
4380 		ret = GS_OK;
4381 
4382 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4383 		TREE_OPERAND (cond, 1)
4384 		  = build2 (code, void_type_node, result,
4385 			    TREE_OPERAND (cond, 1));
4386 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4387 		TREE_OPERAND (cond, 2)
4388 		  = build2 (code, void_type_node, unshare_expr (result),
4389 			    TREE_OPERAND (cond, 2));
4390 
4391 	      TREE_TYPE (cond) = void_type_node;
4392 	      recalculate_side_effects (cond);
4393 
4394 	      if (want_value)
4395 		{
4396 		  gimplify_and_add (cond, pre_p);
4397 		  *expr_p = unshare_expr (result);
4398 		}
4399 	      else
4400 		*expr_p = cond;
4401 	      return ret;
4402 	    }
4403 	  break;
4404 
4405 	case CALL_EXPR:
4406 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
4407 	     return slot so that we don't generate a temporary.  */
4408 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4409 	      && aggregate_value_p (*from_p, *from_p))
4410 	    {
4411 	      bool use_target;
4412 
4413 	      if (!(rhs_predicate_for (*to_p))(*from_p))
4414 		/* If we need a temporary, *to_p isn't accurate.  */
4415 		use_target = false;
4416 	      /* It's OK to use the return slot directly unless it's an NRV. */
4417 	      else if (TREE_CODE (*to_p) == RESULT_DECL
4418 		       && DECL_NAME (*to_p) == NULL_TREE
4419 		       && needs_to_live_in_memory (*to_p))
4420 		use_target = true;
4421 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4422 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4423 		/* Don't force regs into memory.  */
4424 		use_target = false;
4425 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
4426 		/* It's OK to use the target directly if it's being
4427 		   initialized. */
4428 		use_target = true;
4429 	      else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4430 		/* Always use the target and thus RSO for variable-sized types.
4431 		   GIMPLE cannot deal with a variable-sized assignment
4432 		   embedded in a call statement.  */
4433 		use_target = true;
4434 	      else if (TREE_CODE (*to_p) != SSA_NAME
4435 		      && (!is_gimple_variable (*to_p)
4436 			  || needs_to_live_in_memory (*to_p)))
4437 		/* Don't use the original target if it's already addressable;
4438 		   if its address escapes, and the called function uses the
4439 		   NRV optimization, a conforming program could see *to_p
4440 		   change before the called function returns; see c++/19317.
4441 		   When optimizing, the return_slot pass marks more functions
4442 		   as safe after we have escape info.  */
4443 		use_target = false;
4444 	      else
4445 		use_target = true;
4446 
4447 	      if (use_target)
4448 		{
4449 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4450 		  mark_addressable (*to_p);
4451 		}
4452 	    }
4453 	  break;
4454 
4455 	case WITH_SIZE_EXPR:
4456 	  /* Likewise for calls that return an aggregate of non-constant size,
4457 	     since we would not be able to generate a temporary at all.  */
4458 	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4459 	    {
4460 	      *from_p = TREE_OPERAND (*from_p, 0);
4461 	      /* We don't change ret in this case because the
4462 		 WITH_SIZE_EXPR might have been added in
4463 		 gimplify_modify_expr, so returning GS_OK would lead to an
4464 		 infinite loop.  */
4465 	      changed = true;
4466 	    }
4467 	  break;
4468 
4469 	  /* If we're initializing from a container, push the initialization
4470 	     inside it.  */
4471 	case CLEANUP_POINT_EXPR:
4472 	case BIND_EXPR:
4473 	case STATEMENT_LIST:
4474 	  {
4475 	    tree wrap = *from_p;
4476 	    tree t;
4477 
4478 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4479 				 fb_lvalue);
4480 	    if (ret != GS_ERROR)
4481 	      ret = GS_OK;
4482 
4483 	    t = voidify_wrapper_expr (wrap, *expr_p);
4484 	    gcc_assert (t == *expr_p);
4485 
4486 	    if (want_value)
4487 	      {
4488 		gimplify_and_add (wrap, pre_p);
4489 		*expr_p = unshare_expr (*to_p);
4490 	      }
4491 	    else
4492 	      *expr_p = wrap;
4493 	    return GS_OK;
4494 	  }
4495 
4496 	case COMPOUND_LITERAL_EXPR:
4497 	  {
4498 	    tree complit = TREE_OPERAND (*expr_p, 1);
4499 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4500 	    tree decl = DECL_EXPR_DECL (decl_s);
4501 	    tree init = DECL_INITIAL (decl);
4502 
4503 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4504 	       into struct T x = { 0, 1, 2 } if the address of the
4505 	       compound literal has never been taken.  */
4506 	    if (!TREE_ADDRESSABLE (complit)
4507 		&& !TREE_ADDRESSABLE (decl)
4508 		&& init)
4509 	      {
4510 		*expr_p = copy_node (*expr_p);
4511 		TREE_OPERAND (*expr_p, 1) = init;
4512 		return GS_OK;
4513 	      }
4514 	  }
4515 
4516 	default:
4517 	  break;
4518 	}
4519     }
4520   while (changed);
4521 
4522   return ret;
4523 }
4524 
4525 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4526    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4527    DECL_GIMPLE_REG_P set.
4528 
4529    IMPORTANT NOTE: This promotion is performed by introducing a load of the
4530    other, unmodified part of the complex object just before the total store.
4531    As a consequence, if the object is still uninitialized, an undefined value
4532    will be loaded into a register, which may result in a spurious exception
4533    if the register is floating-point and the value happens to be a signaling
4534    NaN for example.  Then the fully-fledged complex operations lowering pass
4535    followed by a DCE pass are necessary in order to fix things up.  */
4536 
4537 static enum gimplify_status
4538 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4539                                    bool want_value)
4540 {
4541   enum tree_code code, ocode;
4542   tree lhs, rhs, new_rhs, other, realpart, imagpart;
4543 
4544   lhs = TREE_OPERAND (*expr_p, 0);
4545   rhs = TREE_OPERAND (*expr_p, 1);
4546   code = TREE_CODE (lhs);
4547   lhs = TREE_OPERAND (lhs, 0);
4548 
4549   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4550   other = build1 (ocode, TREE_TYPE (rhs), lhs);
4551   TREE_NO_WARNING (other) = 1;
4552   other = get_formal_tmp_var (other, pre_p);
4553 
4554   realpart = code == REALPART_EXPR ? rhs : other;
4555   imagpart = code == REALPART_EXPR ? other : rhs;
4556 
4557   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4558     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4559   else
4560     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4561 
4562   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4563   *expr_p = (want_value) ? rhs : NULL_TREE;
4564 
4565   return GS_ALL_DONE;
4566 }
4567 
4568 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4569 
4570       modify_expr
4571 	      : varname '=' rhs
4572 	      | '*' ID '=' rhs
4573 
4574     PRE_P points to the list where side effects that must happen before
4575 	*EXPR_P should be stored.
4576 
4577     POST_P points to the list where side effects that must happen after
4578 	*EXPR_P should be stored.
4579 
4580     WANT_VALUE is nonzero iff we want to use the value of this expression
4581 	in another expression.  */
4582 
4583 static enum gimplify_status
4584 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4585 		      bool want_value)
4586 {
4587   tree *from_p = &TREE_OPERAND (*expr_p, 1);
4588   tree *to_p = &TREE_OPERAND (*expr_p, 0);
4589   enum gimplify_status ret = GS_UNHANDLED;
4590   gimple assign;
4591   location_t loc = EXPR_LOCATION (*expr_p);
4592 
4593   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4594 	      || TREE_CODE (*expr_p) == INIT_EXPR);
4595 
4596   /* Trying to simplify a clobber using normal logic doesn't work,
4597      so handle it here.  */
4598   if (TREE_CLOBBER_P (*from_p))
4599     {
4600       gcc_assert (!want_value && TREE_CODE (*to_p) == VAR_DECL);
4601       gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4602       *expr_p = NULL;
4603       return GS_ALL_DONE;
4604     }
4605 
4606   /* Insert pointer conversions required by the middle-end that are not
4607      required by the frontend.  This fixes middle-end type checking for
4608      for example gcc.dg/redecl-6.c.  */
4609   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4610     {
4611       STRIP_USELESS_TYPE_CONVERSION (*from_p);
4612       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4613 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4614     }
4615 
4616   /* See if any simplifications can be done based on what the RHS is.  */
4617   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4618 				  want_value);
4619   if (ret != GS_UNHANDLED)
4620     return ret;
4621 
4622   /* For zero sized types only gimplify the left hand side and right hand
4623      side as statements and throw away the assignment.  Do this after
4624      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4625      types properly.  */
4626   if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4627     {
4628       gimplify_stmt (from_p, pre_p);
4629       gimplify_stmt (to_p, pre_p);
4630       *expr_p = NULL_TREE;
4631       return GS_ALL_DONE;
4632     }
4633 
4634   /* If the value being copied is of variable width, compute the length
4635      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4636      before gimplifying any of the operands so that we can resolve any
4637      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4638      the size of the expression to be copied, not of the destination, so
4639      that is what we must do here.  */
4640   maybe_with_size_expr (from_p);
4641 
4642   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4643   if (ret == GS_ERROR)
4644     return ret;
4645 
4646   /* As a special case, we have to temporarily allow for assignments
4647      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4648      a toplevel statement, when gimplifying the GENERIC expression
4649      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4650      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4651 
4652      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4653      prevent gimplify_expr from trying to create a new temporary for
4654      foo's LHS, we tell it that it should only gimplify until it
4655      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4656      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4657      and all we need to do here is set 'a' to be its LHS.  */
4658   ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4659 		       fb_rvalue);
4660   if (ret == GS_ERROR)
4661     return ret;
4662 
4663   /* Now see if the above changed *from_p to something we handle specially.  */
4664   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4665 				  want_value);
4666   if (ret != GS_UNHANDLED)
4667     return ret;
4668 
4669   /* If we've got a variable sized assignment between two lvalues (i.e. does
4670      not involve a call), then we can make things a bit more straightforward
4671      by converting the assignment to memcpy or memset.  */
4672   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4673     {
4674       tree from = TREE_OPERAND (*from_p, 0);
4675       tree size = TREE_OPERAND (*from_p, 1);
4676 
4677       if (TREE_CODE (from) == CONSTRUCTOR)
4678 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4679 
4680       if (is_gimple_addressable (from))
4681 	{
4682 	  *from_p = from;
4683 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4684 	      					 pre_p);
4685 	}
4686     }
4687 
4688   /* Transform partial stores to non-addressable complex variables into
4689      total stores.  This allows us to use real instead of virtual operands
4690      for these variables, which improves optimization.  */
4691   if ((TREE_CODE (*to_p) == REALPART_EXPR
4692        || TREE_CODE (*to_p) == IMAGPART_EXPR)
4693       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4694     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4695 
4696   /* Try to alleviate the effects of the gimplification creating artificial
4697      temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4698   if (!gimplify_ctxp->into_ssa
4699       && TREE_CODE (*from_p) == VAR_DECL
4700       && DECL_IGNORED_P (*from_p)
4701       && DECL_P (*to_p)
4702       && !DECL_IGNORED_P (*to_p))
4703     {
4704       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4705 	DECL_NAME (*from_p)
4706 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4707       DECL_DEBUG_EXPR_IS_FROM (*from_p) = 1;
4708       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4709    }
4710 
4711   if (want_value && TREE_THIS_VOLATILE (*to_p))
4712     *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4713 
4714   if (TREE_CODE (*from_p) == CALL_EXPR)
4715     {
4716       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4717 	 instead of a GIMPLE_ASSIGN.  */
4718       tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4719       CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4720       STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4721       assign = gimple_build_call_from_tree (*from_p);
4722       gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4723       if (!gimple_call_noreturn_p (assign))
4724 	gimple_call_set_lhs (assign, *to_p);
4725     }
4726   else
4727     {
4728       assign = gimple_build_assign (*to_p, *from_p);
4729       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4730     }
4731 
4732   gimplify_seq_add_stmt (pre_p, assign);
4733 
4734   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4735     {
4736       /* If we've somehow already got an SSA_NAME on the LHS, then
4737 	 we've probably modified it twice.  Not good.  */
4738       gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
4739       *to_p = make_ssa_name (*to_p, assign);
4740       gimple_set_lhs (assign, *to_p);
4741     }
4742 
4743   if (want_value)
4744     {
4745       *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4746       return GS_OK;
4747     }
4748   else
4749     *expr_p = NULL;
4750 
4751   return GS_ALL_DONE;
4752 }
4753 
4754 /* Gimplify a comparison between two variable-sized objects.  Do this
4755    with a call to BUILT_IN_MEMCMP.  */
4756 
4757 static enum gimplify_status
4758 gimplify_variable_sized_compare (tree *expr_p)
4759 {
4760   location_t loc = EXPR_LOCATION (*expr_p);
4761   tree op0 = TREE_OPERAND (*expr_p, 0);
4762   tree op1 = TREE_OPERAND (*expr_p, 1);
4763   tree t, arg, dest, src, expr;
4764 
4765   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4766   arg = unshare_expr (arg);
4767   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4768   src = build_fold_addr_expr_loc (loc, op1);
4769   dest = build_fold_addr_expr_loc (loc, op0);
4770   t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4771   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4772 
4773   expr
4774     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4775   SET_EXPR_LOCATION (expr, loc);
4776   *expr_p = expr;
4777 
4778   return GS_OK;
4779 }
4780 
4781 /* Gimplify a comparison between two aggregate objects of integral scalar
4782    mode as a comparison between the bitwise equivalent scalar values.  */
4783 
4784 static enum gimplify_status
4785 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4786 {
4787   location_t loc = EXPR_LOCATION (*expr_p);
4788   tree op0 = TREE_OPERAND (*expr_p, 0);
4789   tree op1 = TREE_OPERAND (*expr_p, 1);
4790 
4791   tree type = TREE_TYPE (op0);
4792   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4793 
4794   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4795   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4796 
4797   *expr_p
4798     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4799 
4800   return GS_OK;
4801 }
4802 
4803 /* Gimplify an expression sequence.  This function gimplifies each
4804    expression and rewrites the original expression with the last
4805    expression of the sequence in GIMPLE form.
4806 
4807    PRE_P points to the list where the side effects for all the
4808        expressions in the sequence will be emitted.
4809 
4810    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4811 
4812 static enum gimplify_status
4813 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4814 {
4815   tree t = *expr_p;
4816 
4817   do
4818     {
4819       tree *sub_p = &TREE_OPERAND (t, 0);
4820 
4821       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4822 	gimplify_compound_expr (sub_p, pre_p, false);
4823       else
4824 	gimplify_stmt (sub_p, pre_p);
4825 
4826       t = TREE_OPERAND (t, 1);
4827     }
4828   while (TREE_CODE (t) == COMPOUND_EXPR);
4829 
4830   *expr_p = t;
4831   if (want_value)
4832     return GS_OK;
4833   else
4834     {
4835       gimplify_stmt (expr_p, pre_p);
4836       return GS_ALL_DONE;
4837     }
4838 }
4839 
4840 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4841    gimplify.  After gimplification, EXPR_P will point to a new temporary
4842    that holds the original value of the SAVE_EXPR node.
4843 
4844    PRE_P points to the list where side effects that must happen before
4845    *EXPR_P should be stored.  */
4846 
4847 static enum gimplify_status
4848 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4849 {
4850   enum gimplify_status ret = GS_ALL_DONE;
4851   tree val;
4852 
4853   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4854   val = TREE_OPERAND (*expr_p, 0);
4855 
4856   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4857   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4858     {
4859       /* The operand may be a void-valued expression such as SAVE_EXPRs
4860 	 generated by the Java frontend for class initialization.  It is
4861 	 being executed only for its side-effects.  */
4862       if (TREE_TYPE (val) == void_type_node)
4863 	{
4864 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4865 			       is_gimple_stmt, fb_none);
4866 	  val = NULL;
4867 	}
4868       else
4869 	val = get_initialized_tmp_var (val, pre_p, post_p);
4870 
4871       TREE_OPERAND (*expr_p, 0) = val;
4872       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4873     }
4874 
4875   *expr_p = val;
4876 
4877   return ret;
4878 }
4879 
4880 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4881 
4882       unary_expr
4883 	      : ...
4884 	      | '&' varname
4885 	      ...
4886 
4887     PRE_P points to the list where side effects that must happen before
4888 	*EXPR_P should be stored.
4889 
4890     POST_P points to the list where side effects that must happen after
4891 	*EXPR_P should be stored.  */
4892 
4893 static enum gimplify_status
4894 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4895 {
4896   tree expr = *expr_p;
4897   tree op0 = TREE_OPERAND (expr, 0);
4898   enum gimplify_status ret;
4899   location_t loc = EXPR_LOCATION (*expr_p);
4900 
4901   switch (TREE_CODE (op0))
4902     {
4903     case INDIRECT_REF:
4904     do_indirect_ref:
4905       /* Check if we are dealing with an expression of the form '&*ptr'.
4906 	 While the front end folds away '&*ptr' into 'ptr', these
4907 	 expressions may be generated internally by the compiler (e.g.,
4908 	 builtins like __builtin_va_end).  */
4909       /* Caution: the silent array decomposition semantics we allow for
4910 	 ADDR_EXPR means we can't always discard the pair.  */
4911       /* Gimplification of the ADDR_EXPR operand may drop
4912 	 cv-qualification conversions, so make sure we add them if
4913 	 needed.  */
4914       {
4915 	tree op00 = TREE_OPERAND (op0, 0);
4916 	tree t_expr = TREE_TYPE (expr);
4917 	tree t_op00 = TREE_TYPE (op00);
4918 
4919         if (!useless_type_conversion_p (t_expr, t_op00))
4920 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4921         *expr_p = op00;
4922         ret = GS_OK;
4923       }
4924       break;
4925 
4926     case VIEW_CONVERT_EXPR:
4927       /* Take the address of our operand and then convert it to the type of
4928 	 this ADDR_EXPR.
4929 
4930 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4931 	 all clear.  The impact of this transformation is even less clear.  */
4932 
4933       /* If the operand is a useless conversion, look through it.  Doing so
4934 	 guarantees that the ADDR_EXPR and its operand will remain of the
4935 	 same type.  */
4936       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4937 	op0 = TREE_OPERAND (op0, 0);
4938 
4939       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4940 				  build_fold_addr_expr_loc (loc,
4941 							TREE_OPERAND (op0, 0)));
4942       ret = GS_OK;
4943       break;
4944 
4945     default:
4946       /* We use fb_either here because the C frontend sometimes takes
4947 	 the address of a call that returns a struct; see
4948 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4949 	 the implied temporary explicit.  */
4950 
4951       /* Make the operand addressable.  */
4952       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4953 			   is_gimple_addressable, fb_either);
4954       if (ret == GS_ERROR)
4955 	break;
4956 
4957       /* Then mark it.  Beware that it may not be possible to do so directly
4958 	 if a temporary has been created by the gimplification.  */
4959       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4960 
4961       op0 = TREE_OPERAND (expr, 0);
4962 
4963       /* For various reasons, the gimplification of the expression
4964 	 may have made a new INDIRECT_REF.  */
4965       if (TREE_CODE (op0) == INDIRECT_REF)
4966 	goto do_indirect_ref;
4967 
4968       mark_addressable (TREE_OPERAND (expr, 0));
4969 
4970       /* The FEs may end up building ADDR_EXPRs early on a decl with
4971 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
4972 	 here.  */
4973       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4974 	*expr_p = build_fold_addr_expr (op0);
4975 
4976       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
4977       recompute_tree_invariant_for_addr_expr (*expr_p);
4978 
4979       /* If we re-built the ADDR_EXPR add a conversion to the original type
4980          if required.  */
4981       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4982 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4983 
4984       break;
4985     }
4986 
4987   return ret;
4988 }
4989 
4990 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
4991    value; output operands should be a gimple lvalue.  */
4992 
4993 static enum gimplify_status
4994 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4995 {
4996   tree expr;
4997   int noutputs;
4998   const char **oconstraints;
4999   int i;
5000   tree link;
5001   const char *constraint;
5002   bool allows_mem, allows_reg, is_inout;
5003   enum gimplify_status ret, tret;
5004   gimple stmt;
5005   VEC(tree, gc) *inputs;
5006   VEC(tree, gc) *outputs;
5007   VEC(tree, gc) *clobbers;
5008   VEC(tree, gc) *labels;
5009   tree link_next;
5010 
5011   expr = *expr_p;
5012   noutputs = list_length (ASM_OUTPUTS (expr));
5013   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
5014 
5015   inputs = outputs = clobbers = labels = NULL;
5016 
5017   ret = GS_ALL_DONE;
5018   link_next = NULL_TREE;
5019   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
5020     {
5021       bool ok;
5022       size_t constraint_len;
5023 
5024       link_next = TREE_CHAIN (link);
5025 
5026       oconstraints[i]
5027 	= constraint
5028 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5029       constraint_len = strlen (constraint);
5030       if (constraint_len == 0)
5031         continue;
5032 
5033       ok = parse_output_constraint (&constraint, i, 0, 0,
5034 				    &allows_mem, &allows_reg, &is_inout);
5035       if (!ok)
5036 	{
5037 	  ret = GS_ERROR;
5038 	  is_inout = false;
5039 	}
5040 
5041       if (!allows_reg && allows_mem)
5042 	mark_addressable (TREE_VALUE (link));
5043 
5044       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5045 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
5046 			    fb_lvalue | fb_mayfail);
5047       if (tret == GS_ERROR)
5048 	{
5049 	  error ("invalid lvalue in asm output %d", i);
5050 	  ret = tret;
5051 	}
5052 
5053       VEC_safe_push (tree, gc, outputs, link);
5054       TREE_CHAIN (link) = NULL_TREE;
5055 
5056       if (is_inout)
5057 	{
5058 	  /* An input/output operand.  To give the optimizers more
5059 	     flexibility, split it into separate input and output
5060  	     operands.  */
5061 	  tree input;
5062 	  char buf[10];
5063 
5064 	  /* Turn the in/out constraint into an output constraint.  */
5065 	  char *p = xstrdup (constraint);
5066 	  p[0] = '=';
5067 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
5068 
5069 	  /* And add a matching input constraint.  */
5070 	  if (allows_reg)
5071 	    {
5072 	      sprintf (buf, "%d", i);
5073 
5074 	      /* If there are multiple alternatives in the constraint,
5075 		 handle each of them individually.  Those that allow register
5076 		 will be replaced with operand number, the others will stay
5077 		 unchanged.  */
5078 	      if (strchr (p, ',') != NULL)
5079 		{
5080 		  size_t len = 0, buflen = strlen (buf);
5081 		  char *beg, *end, *str, *dst;
5082 
5083 		  for (beg = p + 1;;)
5084 		    {
5085 		      end = strchr (beg, ',');
5086 		      if (end == NULL)
5087 			end = strchr (beg, '\0');
5088 		      if ((size_t) (end - beg) < buflen)
5089 			len += buflen + 1;
5090 		      else
5091 			len += end - beg + 1;
5092 		      if (*end)
5093 			beg = end + 1;
5094 		      else
5095 			break;
5096 		    }
5097 
5098 		  str = (char *) alloca (len);
5099 		  for (beg = p + 1, dst = str;;)
5100 		    {
5101 		      const char *tem;
5102 		      bool mem_p, reg_p, inout_p;
5103 
5104 		      end = strchr (beg, ',');
5105 		      if (end)
5106 			*end = '\0';
5107 		      beg[-1] = '=';
5108 		      tem = beg - 1;
5109 		      parse_output_constraint (&tem, i, 0, 0,
5110 					       &mem_p, &reg_p, &inout_p);
5111 		      if (dst != str)
5112 			*dst++ = ',';
5113 		      if (reg_p)
5114 			{
5115 			  memcpy (dst, buf, buflen);
5116 			  dst += buflen;
5117 			}
5118 		      else
5119 			{
5120 			  if (end)
5121 			    len = end - beg;
5122 			  else
5123 			    len = strlen (beg);
5124 			  memcpy (dst, beg, len);
5125 			  dst += len;
5126 			}
5127 		      if (end)
5128 			beg = end + 1;
5129 		      else
5130 			break;
5131 		    }
5132 		  *dst = '\0';
5133 		  input = build_string (dst - str, str);
5134 		}
5135 	      else
5136 		input = build_string (strlen (buf), buf);
5137 	    }
5138 	  else
5139 	    input = build_string (constraint_len - 1, constraint + 1);
5140 
5141 	  free (p);
5142 
5143 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
5144 				   unshare_expr (TREE_VALUE (link)));
5145 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5146 	}
5147     }
5148 
5149   link_next = NULL_TREE;
5150   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5151     {
5152       link_next = TREE_CHAIN (link);
5153       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5154       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5155 			      oconstraints, &allows_mem, &allows_reg);
5156 
5157       /* If we can't make copies, we can only accept memory.  */
5158       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5159 	{
5160 	  if (allows_mem)
5161 	    allows_reg = 0;
5162 	  else
5163 	    {
5164 	      error ("impossible constraint in %<asm%>");
5165 	      error ("non-memory input %d must stay in memory", i);
5166 	      return GS_ERROR;
5167 	    }
5168 	}
5169 
5170       /* If the operand is a memory input, it should be an lvalue.  */
5171       if (!allows_reg && allows_mem)
5172 	{
5173 	  tree inputv = TREE_VALUE (link);
5174 	  STRIP_NOPS (inputv);
5175 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5176 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
5177 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5178 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5179 	    TREE_VALUE (link) = error_mark_node;
5180 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5181 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
5182 	  mark_addressable (TREE_VALUE (link));
5183 	  if (tret == GS_ERROR)
5184 	    {
5185 	      if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5186 	        input_location = EXPR_LOCATION (TREE_VALUE (link));
5187 	      error ("memory input %d is not directly addressable", i);
5188 	      ret = tret;
5189 	    }
5190 	}
5191       else
5192 	{
5193 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5194 				is_gimple_asm_val, fb_rvalue);
5195 	  if (tret == GS_ERROR)
5196 	    ret = tret;
5197 	}
5198 
5199       TREE_CHAIN (link) = NULL_TREE;
5200       VEC_safe_push (tree, gc, inputs, link);
5201     }
5202 
5203   for (link = ASM_CLOBBERS (expr); link; ++i, link = TREE_CHAIN (link))
5204     VEC_safe_push (tree, gc, clobbers, link);
5205 
5206   for (link = ASM_LABELS (expr); link; ++i, link = TREE_CHAIN (link))
5207     VEC_safe_push (tree, gc, labels, link);
5208 
5209   /* Do not add ASMs with errors to the gimple IL stream.  */
5210   if (ret != GS_ERROR)
5211     {
5212       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5213 				   inputs, outputs, clobbers, labels);
5214 
5215       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5216       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5217 
5218       gimplify_seq_add_stmt (pre_p, stmt);
5219     }
5220 
5221   return ret;
5222 }
5223 
5224 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5225    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5226    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5227    return to this function.
5228 
5229    FIXME should we complexify the prequeue handling instead?  Or use flags
5230    for all the cleanups and let the optimizer tighten them up?  The current
5231    code seems pretty fragile; it will break on a cleanup within any
5232    non-conditional nesting.  But any such nesting would be broken, anyway;
5233    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5234    and continues out of it.  We can do that at the RTL level, though, so
5235    having an optimizer to tighten up try/finally regions would be a Good
5236    Thing.  */
5237 
5238 static enum gimplify_status
5239 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5240 {
5241   gimple_stmt_iterator iter;
5242   gimple_seq body_sequence = NULL;
5243 
5244   tree temp = voidify_wrapper_expr (*expr_p, NULL);
5245 
5246   /* We only care about the number of conditions between the innermost
5247      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5248      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5249   int old_conds = gimplify_ctxp->conditions;
5250   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5251   bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5252   gimplify_ctxp->conditions = 0;
5253   gimplify_ctxp->conditional_cleanups = NULL;
5254   gimplify_ctxp->in_cleanup_point_expr = true;
5255 
5256   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5257 
5258   gimplify_ctxp->conditions = old_conds;
5259   gimplify_ctxp->conditional_cleanups = old_cleanups;
5260   gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5261 
5262   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5263     {
5264       gimple wce = gsi_stmt (iter);
5265 
5266       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5267 	{
5268 	  if (gsi_one_before_end_p (iter))
5269 	    {
5270               /* Note that gsi_insert_seq_before and gsi_remove do not
5271                  scan operands, unlike some other sequence mutators.  */
5272 	      if (!gimple_wce_cleanup_eh_only (wce))
5273 		gsi_insert_seq_before_without_update (&iter,
5274 						      gimple_wce_cleanup (wce),
5275 						      GSI_SAME_STMT);
5276 	      gsi_remove (&iter, true);
5277 	      break;
5278 	    }
5279 	  else
5280 	    {
5281 	      gimple gtry;
5282 	      gimple_seq seq;
5283 	      enum gimple_try_flags kind;
5284 
5285 	      if (gimple_wce_cleanup_eh_only (wce))
5286 		kind = GIMPLE_TRY_CATCH;
5287 	      else
5288 		kind = GIMPLE_TRY_FINALLY;
5289 	      seq = gsi_split_seq_after (iter);
5290 
5291 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5292               /* Do not use gsi_replace here, as it may scan operands.
5293                  We want to do a simple structural modification only.  */
5294               *gsi_stmt_ptr (&iter) = gtry;
5295 	      iter = gsi_start (seq);
5296 	    }
5297 	}
5298       else
5299 	gsi_next (&iter);
5300     }
5301 
5302   gimplify_seq_add_seq (pre_p, body_sequence);
5303   if (temp)
5304     {
5305       *expr_p = temp;
5306       return GS_OK;
5307     }
5308   else
5309     {
5310       *expr_p = NULL;
5311       return GS_ALL_DONE;
5312     }
5313 }
5314 
5315 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5316    is the cleanup action required.  EH_ONLY is true if the cleanup should
5317    only be executed if an exception is thrown, not on normal exit.  */
5318 
5319 static void
5320 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5321 {
5322   gimple wce;
5323   gimple_seq cleanup_stmts = NULL;
5324 
5325   /* Errors can result in improperly nested cleanups.  Which results in
5326      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5327   if (seen_error ())
5328     return;
5329 
5330   if (gimple_conditional_context ())
5331     {
5332       /* If we're in a conditional context, this is more complex.  We only
5333 	 want to run the cleanup if we actually ran the initialization that
5334 	 necessitates it, but we want to run it after the end of the
5335 	 conditional context.  So we wrap the try/finally around the
5336 	 condition and use a flag to determine whether or not to actually
5337 	 run the destructor.  Thus
5338 
5339 	   test ? f(A()) : 0
5340 
5341 	 becomes (approximately)
5342 
5343 	   flag = 0;
5344 	   try {
5345 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
5346 	     else { val = 0; }
5347 	   } finally {
5348 	     if (flag) A::~A(temp);
5349 	   }
5350 	   val
5351       */
5352       tree flag = create_tmp_var (boolean_type_node, "cleanup");
5353       gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5354       gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5355 
5356       cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5357       gimplify_stmt (&cleanup, &cleanup_stmts);
5358       wce = gimple_build_wce (cleanup_stmts);
5359 
5360       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5361       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5362       gimplify_seq_add_stmt (pre_p, ftrue);
5363 
5364       /* Because of this manipulation, and the EH edges that jump
5365 	 threading cannot redirect, the temporary (VAR) will appear
5366 	 to be used uninitialized.  Don't warn.  */
5367       TREE_NO_WARNING (var) = 1;
5368     }
5369   else
5370     {
5371       gimplify_stmt (&cleanup, &cleanup_stmts);
5372       wce = gimple_build_wce (cleanup_stmts);
5373       gimple_wce_set_cleanup_eh_only (wce, eh_only);
5374       gimplify_seq_add_stmt (pre_p, wce);
5375     }
5376 }
5377 
5378 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5379 
5380 static enum gimplify_status
5381 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5382 {
5383   tree targ = *expr_p;
5384   tree temp = TARGET_EXPR_SLOT (targ);
5385   tree init = TARGET_EXPR_INITIAL (targ);
5386   enum gimplify_status ret;
5387 
5388   if (init)
5389     {
5390       tree cleanup = NULL_TREE;
5391 
5392       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5393 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
5394       if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5395 	{
5396 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5397 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5398 	  gimplify_vla_decl (temp, pre_p);
5399 	}
5400       else
5401 	gimple_add_tmp_var (temp);
5402 
5403       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5404 	 expression is supposed to initialize the slot.  */
5405       if (VOID_TYPE_P (TREE_TYPE (init)))
5406 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5407       else
5408 	{
5409 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5410 	  init = init_expr;
5411 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5412 	  init = NULL;
5413 	  ggc_free (init_expr);
5414 	}
5415       if (ret == GS_ERROR)
5416 	{
5417 	  /* PR c++/28266 Make sure this is expanded only once. */
5418 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5419 	  return GS_ERROR;
5420 	}
5421       if (init)
5422 	gimplify_and_add (init, pre_p);
5423 
5424       /* If needed, push the cleanup for the temp.  */
5425       if (TARGET_EXPR_CLEANUP (targ))
5426 	{
5427 	  if (CLEANUP_EH_ONLY (targ))
5428 	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5429 				 CLEANUP_EH_ONLY (targ), pre_p);
5430 	  else
5431 	    cleanup = TARGET_EXPR_CLEANUP (targ);
5432 	}
5433 
5434       /* Add a clobber for the temporary going out of scope, like
5435 	 gimplify_bind_expr.  */
5436       if (gimplify_ctxp->in_cleanup_point_expr
5437 	  && needs_to_live_in_memory (temp))
5438 	{
5439 	  tree clobber = build_constructor (TREE_TYPE (temp), NULL);
5440 	  TREE_THIS_VOLATILE (clobber) = true;
5441 	  clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5442 	  if (cleanup)
5443 	    cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5444 			      clobber);
5445 	  else
5446 	    cleanup = clobber;
5447 	}
5448 
5449       if (cleanup)
5450 	gimple_push_cleanup (temp, cleanup, false, pre_p);
5451 
5452       /* Only expand this once.  */
5453       TREE_OPERAND (targ, 3) = init;
5454       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5455     }
5456   else
5457     /* We should have expanded this before.  */
5458     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5459 
5460   *expr_p = temp;
5461   return GS_OK;
5462 }
5463 
5464 /* Gimplification of expression trees.  */
5465 
5466 /* Gimplify an expression which appears at statement context.  The
5467    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5468    NULL, a new sequence is allocated.
5469 
5470    Return true if we actually added a statement to the queue.  */
5471 
5472 bool
5473 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5474 {
5475   gimple_seq_node last;
5476 
5477   if (!*seq_p)
5478     *seq_p = gimple_seq_alloc ();
5479 
5480   last = gimple_seq_last (*seq_p);
5481   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5482   return last != gimple_seq_last (*seq_p);
5483 }
5484 
5485 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5486    to CTX.  If entries already exist, force them to be some flavor of private.
5487    If there is no enclosing parallel, do nothing.  */
5488 
5489 void
5490 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5491 {
5492   splay_tree_node n;
5493 
5494   if (decl == NULL || !DECL_P (decl))
5495     return;
5496 
5497   do
5498     {
5499       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5500       if (n != NULL)
5501 	{
5502 	  if (n->value & GOVD_SHARED)
5503 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5504 	  else
5505 	    return;
5506 	}
5507       else if (ctx->region_type != ORT_WORKSHARE)
5508 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5509 
5510       ctx = ctx->outer_context;
5511     }
5512   while (ctx);
5513 }
5514 
5515 /* Similarly for each of the type sizes of TYPE.  */
5516 
5517 static void
5518 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5519 {
5520   if (type == NULL || type == error_mark_node)
5521     return;
5522   type = TYPE_MAIN_VARIANT (type);
5523 
5524   if (pointer_set_insert (ctx->privatized_types, type))
5525     return;
5526 
5527   switch (TREE_CODE (type))
5528     {
5529     case INTEGER_TYPE:
5530     case ENUMERAL_TYPE:
5531     case BOOLEAN_TYPE:
5532     case REAL_TYPE:
5533     case FIXED_POINT_TYPE:
5534       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5535       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5536       break;
5537 
5538     case ARRAY_TYPE:
5539       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5540       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5541       break;
5542 
5543     case RECORD_TYPE:
5544     case UNION_TYPE:
5545     case QUAL_UNION_TYPE:
5546       {
5547 	tree field;
5548 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5549 	  if (TREE_CODE (field) == FIELD_DECL)
5550 	    {
5551 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5552 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5553 	    }
5554       }
5555       break;
5556 
5557     case POINTER_TYPE:
5558     case REFERENCE_TYPE:
5559       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5560       break;
5561 
5562     default:
5563       break;
5564     }
5565 
5566   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5567   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5568   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5569 }
5570 
5571 /* Add an entry for DECL in the OpenMP context CTX with FLAGS.  */
5572 
5573 static void
5574 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5575 {
5576   splay_tree_node n;
5577   unsigned int nflags;
5578   tree t;
5579 
5580   if (error_operand_p (decl))
5581     return;
5582 
5583   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5584      there are constructors involved somewhere.  */
5585   if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5586       || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5587     flags |= GOVD_SEEN;
5588 
5589   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5590   if (n != NULL)
5591     {
5592       /* We shouldn't be re-adding the decl with the same data
5593 	 sharing class.  */
5594       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5595       /* The only combination of data sharing classes we should see is
5596 	 FIRSTPRIVATE and LASTPRIVATE.  */
5597       nflags = n->value | flags;
5598       gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5599 		  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
5600       n->value = nflags;
5601       return;
5602     }
5603 
5604   /* When adding a variable-sized variable, we have to handle all sorts
5605      of additional bits of data: the pointer replacement variable, and
5606      the parameters of the type.  */
5607   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5608     {
5609       /* Add the pointer replacement variable as PRIVATE if the variable
5610 	 replacement is private, else FIRSTPRIVATE since we'll need the
5611 	 address of the original variable either for SHARED, or for the
5612 	 copy into or out of the context.  */
5613       if (!(flags & GOVD_LOCAL))
5614 	{
5615 	  nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5616 	  nflags |= flags & GOVD_SEEN;
5617 	  t = DECL_VALUE_EXPR (decl);
5618 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5619 	  t = TREE_OPERAND (t, 0);
5620 	  gcc_assert (DECL_P (t));
5621 	  omp_add_variable (ctx, t, nflags);
5622 	}
5623 
5624       /* Add all of the variable and type parameters (which should have
5625 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5626       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5627       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5628       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5629 
5630       /* The variable-sized variable itself is never SHARED, only some form
5631 	 of PRIVATE.  The sharing would take place via the pointer variable
5632 	 which we remapped above.  */
5633       if (flags & GOVD_SHARED)
5634 	flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5635 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5636 
5637       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5638 	 alloca statement we generate for the variable, so make sure it
5639 	 is available.  This isn't automatically needed for the SHARED
5640 	 case, since we won't be allocating local storage then.
5641 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5642 	 in this case omp_notice_variable will be called later
5643 	 on when it is gimplified.  */
5644       else if (! (flags & GOVD_LOCAL)
5645 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5646 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5647     }
5648   else if (lang_hooks.decls.omp_privatize_by_reference (decl))
5649     {
5650       gcc_assert ((flags & GOVD_LOCAL) == 0);
5651       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5652 
5653       /* Similar to the direct variable sized case above, we'll need the
5654 	 size of references being privatized.  */
5655       if ((flags & GOVD_SHARED) == 0)
5656 	{
5657 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5658 	  if (TREE_CODE (t) != INTEGER_CST)
5659 	    omp_notice_variable (ctx, t, true);
5660 	}
5661     }
5662 
5663   splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5664 }
5665 
5666 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5667    This just prints out diagnostics about threadprivate variable uses
5668    in untied tasks.  If DECL2 is non-NULL, prevent this warning
5669    on that variable.  */
5670 
5671 static bool
5672 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5673 				   tree decl2)
5674 {
5675   splay_tree_node n;
5676 
5677   if (ctx->region_type != ORT_UNTIED_TASK)
5678     return false;
5679   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5680   if (n == NULL)
5681     {
5682       error ("threadprivate variable %qE used in untied task",
5683 	     DECL_NAME (decl));
5684       error_at (ctx->location, "enclosing task");
5685       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5686     }
5687   if (decl2)
5688     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5689   return false;
5690 }
5691 
5692 /* Record the fact that DECL was used within the OpenMP context CTX.
5693    IN_CODE is true when real code uses DECL, and false when we should
5694    merely emit default(none) errors.  Return true if DECL is going to
5695    be remapped and thus DECL shouldn't be gimplified into its
5696    DECL_VALUE_EXPR (if any).  */
5697 
5698 static bool
5699 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5700 {
5701   splay_tree_node n;
5702   unsigned flags = in_code ? GOVD_SEEN : 0;
5703   bool ret = false, shared;
5704 
5705   if (error_operand_p (decl))
5706     return false;
5707 
5708   /* Threadprivate variables are predetermined.  */
5709   if (is_global_var (decl))
5710     {
5711       if (DECL_THREAD_LOCAL_P (decl))
5712 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5713 
5714       if (DECL_HAS_VALUE_EXPR_P (decl))
5715 	{
5716 	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
5717 
5718 	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5719 	    return omp_notice_threadprivate_variable (ctx, decl, value);
5720 	}
5721     }
5722 
5723   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5724   if (n == NULL)
5725     {
5726       enum omp_clause_default_kind default_kind, kind;
5727       struct gimplify_omp_ctx *octx;
5728 
5729       if (ctx->region_type == ORT_WORKSHARE)
5730 	goto do_outer;
5731 
5732       /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5733 	 remapped firstprivate instead of shared.  To some extent this is
5734 	 addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5735       default_kind = ctx->default_kind;
5736       kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5737       if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5738 	default_kind = kind;
5739 
5740       switch (default_kind)
5741 	{
5742 	case OMP_CLAUSE_DEFAULT_NONE:
5743 	  error ("%qE not specified in enclosing parallel",
5744 		 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5745 	  if ((ctx->region_type & ORT_TASK) != 0)
5746 	    error_at (ctx->location, "enclosing task");
5747 	  else
5748 	    error_at (ctx->location, "enclosing parallel");
5749 	  /* FALLTHRU */
5750 	case OMP_CLAUSE_DEFAULT_SHARED:
5751 	  flags |= GOVD_SHARED;
5752 	  break;
5753 	case OMP_CLAUSE_DEFAULT_PRIVATE:
5754 	  flags |= GOVD_PRIVATE;
5755 	  break;
5756 	case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5757 	  flags |= GOVD_FIRSTPRIVATE;
5758 	  break;
5759 	case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5760 	  /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5761 	  gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5762 	  if (ctx->outer_context)
5763 	    omp_notice_variable (ctx->outer_context, decl, in_code);
5764 	  for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5765 	    {
5766 	      splay_tree_node n2;
5767 
5768 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5769 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5770 		{
5771 		  flags |= GOVD_FIRSTPRIVATE;
5772 		  break;
5773 		}
5774 	      if ((octx->region_type & ORT_PARALLEL) != 0)
5775 		break;
5776 	    }
5777 	  if (flags & GOVD_FIRSTPRIVATE)
5778 	    break;
5779 	  if (octx == NULL
5780 	      && (TREE_CODE (decl) == PARM_DECL
5781 		  || (!is_global_var (decl)
5782 		      && DECL_CONTEXT (decl) == current_function_decl)))
5783 	    {
5784 	      flags |= GOVD_FIRSTPRIVATE;
5785 	      break;
5786 	    }
5787 	  flags |= GOVD_SHARED;
5788 	  break;
5789 	default:
5790 	  gcc_unreachable ();
5791 	}
5792 
5793       if ((flags & GOVD_PRIVATE)
5794 	  && lang_hooks.decls.omp_private_outer_ref (decl))
5795 	flags |= GOVD_PRIVATE_OUTER_REF;
5796 
5797       omp_add_variable (ctx, decl, flags);
5798 
5799       shared = (flags & GOVD_SHARED) != 0;
5800       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5801       goto do_outer;
5802     }
5803 
5804   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5805       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5806       && DECL_SIZE (decl)
5807       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5808     {
5809       splay_tree_node n2;
5810       tree t = DECL_VALUE_EXPR (decl);
5811       gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5812       t = TREE_OPERAND (t, 0);
5813       gcc_assert (DECL_P (t));
5814       n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5815       n2->value |= GOVD_SEEN;
5816     }
5817 
5818   shared = ((flags | n->value) & GOVD_SHARED) != 0;
5819   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5820 
5821   /* If nothing changed, there's nothing left to do.  */
5822   if ((n->value & flags) == flags)
5823     return ret;
5824   flags |= n->value;
5825   n->value = flags;
5826 
5827  do_outer:
5828   /* If the variable is private in the current context, then we don't
5829      need to propagate anything to an outer context.  */
5830   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5831     return ret;
5832   if (ctx->outer_context
5833       && omp_notice_variable (ctx->outer_context, decl, in_code))
5834     return true;
5835   return ret;
5836 }
5837 
5838 /* Verify that DECL is private within CTX.  If there's specific information
5839    to the contrary in the innermost scope, generate an error.  */
5840 
5841 static bool
5842 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
5843 {
5844   splay_tree_node n;
5845 
5846   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5847   if (n != NULL)
5848     {
5849       if (n->value & GOVD_SHARED)
5850 	{
5851 	  if (ctx == gimplify_omp_ctxp)
5852 	    {
5853 	      error ("iteration variable %qE should be private",
5854 		     DECL_NAME (decl));
5855 	      n->value = GOVD_PRIVATE;
5856 	      return true;
5857 	    }
5858 	  else
5859 	    return false;
5860 	}
5861       else if ((n->value & GOVD_EXPLICIT) != 0
5862 	       && (ctx == gimplify_omp_ctxp
5863 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
5864 		       && gimplify_omp_ctxp->outer_context == ctx)))
5865 	{
5866 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5867 	    error ("iteration variable %qE should not be firstprivate",
5868 		   DECL_NAME (decl));
5869 	  else if ((n->value & GOVD_REDUCTION) != 0)
5870 	    error ("iteration variable %qE should not be reduction",
5871 		   DECL_NAME (decl));
5872 	}
5873       return (ctx == gimplify_omp_ctxp
5874 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
5875 		  && gimplify_omp_ctxp->outer_context == ctx));
5876     }
5877 
5878   if (ctx->region_type != ORT_WORKSHARE)
5879     return false;
5880   else if (ctx->outer_context)
5881     return omp_is_private (ctx->outer_context, decl);
5882   return false;
5883 }
5884 
5885 /* Return true if DECL is private within a parallel region
5886    that binds to the current construct's context or in parallel
5887    region's REDUCTION clause.  */
5888 
5889 static bool
5890 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
5891 {
5892   splay_tree_node n;
5893 
5894   do
5895     {
5896       ctx = ctx->outer_context;
5897       if (ctx == NULL)
5898 	return !(is_global_var (decl)
5899 		 /* References might be private, but might be shared too.  */
5900 		 || lang_hooks.decls.omp_privatize_by_reference (decl));
5901 
5902       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5903       if (n != NULL)
5904 	return (n->value & GOVD_SHARED) == 0;
5905     }
5906   while (ctx->region_type == ORT_WORKSHARE);
5907   return false;
5908 }
5909 
5910 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5911    and previous omp contexts.  */
5912 
5913 static void
5914 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5915 			   enum omp_region_type region_type)
5916 {
5917   struct gimplify_omp_ctx *ctx, *outer_ctx;
5918   struct gimplify_ctx gctx;
5919   tree c;
5920 
5921   ctx = new_omp_context (region_type);
5922   outer_ctx = ctx->outer_context;
5923 
5924   while ((c = *list_p) != NULL)
5925     {
5926       bool remove = false;
5927       bool notice_outer = true;
5928       const char *check_non_private = NULL;
5929       unsigned int flags;
5930       tree decl;
5931 
5932       switch (OMP_CLAUSE_CODE (c))
5933 	{
5934 	case OMP_CLAUSE_PRIVATE:
5935 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5936 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5937 	    {
5938 	      flags |= GOVD_PRIVATE_OUTER_REF;
5939 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5940 	    }
5941 	  else
5942 	    notice_outer = false;
5943 	  goto do_add;
5944 	case OMP_CLAUSE_SHARED:
5945 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
5946 	  goto do_add;
5947 	case OMP_CLAUSE_FIRSTPRIVATE:
5948 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5949 	  check_non_private = "firstprivate";
5950 	  goto do_add;
5951 	case OMP_CLAUSE_LASTPRIVATE:
5952 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5953 	  check_non_private = "lastprivate";
5954 	  goto do_add;
5955 	case OMP_CLAUSE_REDUCTION:
5956 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5957 	  check_non_private = "reduction";
5958 	  goto do_add;
5959 
5960 	do_add:
5961 	  decl = OMP_CLAUSE_DECL (c);
5962 	  if (error_operand_p (decl))
5963 	    {
5964 	      remove = true;
5965 	      break;
5966 	    }
5967 	  omp_add_variable (ctx, decl, flags);
5968 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
5969 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
5970 	    {
5971 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
5972 				GOVD_LOCAL | GOVD_SEEN);
5973 	      gimplify_omp_ctxp = ctx;
5974 	      push_gimplify_context (&gctx);
5975 
5976 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = gimple_seq_alloc ();
5977 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = gimple_seq_alloc ();
5978 
5979 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
5980 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
5981 	      pop_gimplify_context
5982 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
5983 	      push_gimplify_context (&gctx);
5984 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
5985 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
5986 	      pop_gimplify_context
5987 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
5988 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
5989 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
5990 
5991 	      gimplify_omp_ctxp = outer_ctx;
5992 	    }
5993 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
5994 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
5995 	    {
5996 	      gimplify_omp_ctxp = ctx;
5997 	      push_gimplify_context (&gctx);
5998 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
5999 		{
6000 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6001 				      NULL, NULL);
6002 		  TREE_SIDE_EFFECTS (bind) = 1;
6003 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6004 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6005 		}
6006 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6007 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6008 	      pop_gimplify_context
6009 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6010 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6011 
6012 	      gimplify_omp_ctxp = outer_ctx;
6013 	    }
6014 	  if (notice_outer)
6015 	    goto do_notice;
6016 	  break;
6017 
6018 	case OMP_CLAUSE_COPYIN:
6019 	case OMP_CLAUSE_COPYPRIVATE:
6020 	  decl = OMP_CLAUSE_DECL (c);
6021 	  if (error_operand_p (decl))
6022 	    {
6023 	      remove = true;
6024 	      break;
6025 	    }
6026 	do_notice:
6027 	  if (outer_ctx)
6028 	    omp_notice_variable (outer_ctx, decl, true);
6029 	  if (check_non_private
6030 	      && region_type == ORT_WORKSHARE
6031 	      && omp_check_private (ctx, decl))
6032 	    {
6033 	      error ("%s variable %qE is private in outer context",
6034 		     check_non_private, DECL_NAME (decl));
6035 	      remove = true;
6036 	    }
6037 	  break;
6038 
6039 	case OMP_CLAUSE_FINAL:
6040 	case OMP_CLAUSE_IF:
6041 	  OMP_CLAUSE_OPERAND (c, 0)
6042 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6043 	  /* Fall through.  */
6044 
6045 	case OMP_CLAUSE_SCHEDULE:
6046 	case OMP_CLAUSE_NUM_THREADS:
6047 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6048 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6049 	      remove = true;
6050 	  break;
6051 
6052 	case OMP_CLAUSE_NOWAIT:
6053 	case OMP_CLAUSE_ORDERED:
6054 	case OMP_CLAUSE_UNTIED:
6055 	case OMP_CLAUSE_COLLAPSE:
6056 	case OMP_CLAUSE_MERGEABLE:
6057 	  break;
6058 
6059 	case OMP_CLAUSE_DEFAULT:
6060 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6061 	  break;
6062 
6063 	default:
6064 	  gcc_unreachable ();
6065 	}
6066 
6067       if (remove)
6068 	*list_p = OMP_CLAUSE_CHAIN (c);
6069       else
6070 	list_p = &OMP_CLAUSE_CHAIN (c);
6071     }
6072 
6073   gimplify_omp_ctxp = ctx;
6074 }
6075 
6076 /* For all variables that were not actually used within the context,
6077    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
6078 
6079 static int
6080 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6081 {
6082   tree *list_p = (tree *) data;
6083   tree decl = (tree) n->key;
6084   unsigned flags = n->value;
6085   enum omp_clause_code code;
6086   tree clause;
6087   bool private_debug;
6088 
6089   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6090     return 0;
6091   if ((flags & GOVD_SEEN) == 0)
6092     return 0;
6093   if (flags & GOVD_DEBUG_PRIVATE)
6094     {
6095       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6096       private_debug = true;
6097     }
6098   else
6099     private_debug
6100       = lang_hooks.decls.omp_private_debug_clause (decl,
6101 						   !!(flags & GOVD_SHARED));
6102   if (private_debug)
6103     code = OMP_CLAUSE_PRIVATE;
6104   else if (flags & GOVD_SHARED)
6105     {
6106       if (is_global_var (decl))
6107 	{
6108 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6109 	  while (ctx != NULL)
6110 	    {
6111 	      splay_tree_node on
6112 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6113 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6114 				      | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
6115 		break;
6116 	      ctx = ctx->outer_context;
6117 	    }
6118 	  if (ctx == NULL)
6119 	    return 0;
6120 	}
6121       code = OMP_CLAUSE_SHARED;
6122     }
6123   else if (flags & GOVD_PRIVATE)
6124     code = OMP_CLAUSE_PRIVATE;
6125   else if (flags & GOVD_FIRSTPRIVATE)
6126     code = OMP_CLAUSE_FIRSTPRIVATE;
6127   else
6128     gcc_unreachable ();
6129 
6130   clause = build_omp_clause (input_location, code);
6131   OMP_CLAUSE_DECL (clause) = decl;
6132   OMP_CLAUSE_CHAIN (clause) = *list_p;
6133   if (private_debug)
6134     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6135   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6136     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6137   *list_p = clause;
6138   lang_hooks.decls.omp_finish_clause (clause);
6139 
6140   return 0;
6141 }
6142 
6143 static void
6144 gimplify_adjust_omp_clauses (tree *list_p)
6145 {
6146   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6147   tree c, decl;
6148 
6149   while ((c = *list_p) != NULL)
6150     {
6151       splay_tree_node n;
6152       bool remove = false;
6153 
6154       switch (OMP_CLAUSE_CODE (c))
6155 	{
6156 	case OMP_CLAUSE_PRIVATE:
6157 	case OMP_CLAUSE_SHARED:
6158 	case OMP_CLAUSE_FIRSTPRIVATE:
6159 	  decl = OMP_CLAUSE_DECL (c);
6160 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6161 	  remove = !(n->value & GOVD_SEEN);
6162 	  if (! remove)
6163 	    {
6164 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6165 	      if ((n->value & GOVD_DEBUG_PRIVATE)
6166 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6167 		{
6168 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6169 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
6170 				  == GOVD_PRIVATE));
6171 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6172 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6173 		}
6174 	    }
6175 	  break;
6176 
6177 	case OMP_CLAUSE_LASTPRIVATE:
6178 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6179 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
6180 	  decl = OMP_CLAUSE_DECL (c);
6181 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6182 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6183 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
6184 	  break;
6185 
6186 	case OMP_CLAUSE_REDUCTION:
6187 	case OMP_CLAUSE_COPYIN:
6188 	case OMP_CLAUSE_COPYPRIVATE:
6189 	case OMP_CLAUSE_IF:
6190 	case OMP_CLAUSE_NUM_THREADS:
6191 	case OMP_CLAUSE_SCHEDULE:
6192 	case OMP_CLAUSE_NOWAIT:
6193 	case OMP_CLAUSE_ORDERED:
6194 	case OMP_CLAUSE_DEFAULT:
6195 	case OMP_CLAUSE_UNTIED:
6196 	case OMP_CLAUSE_COLLAPSE:
6197 	case OMP_CLAUSE_FINAL:
6198 	case OMP_CLAUSE_MERGEABLE:
6199 	  break;
6200 
6201 	default:
6202 	  gcc_unreachable ();
6203 	}
6204 
6205       if (remove)
6206 	*list_p = OMP_CLAUSE_CHAIN (c);
6207       else
6208 	list_p = &OMP_CLAUSE_CHAIN (c);
6209     }
6210 
6211   /* Add in any implicit data sharing.  */
6212   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
6213 
6214   gimplify_omp_ctxp = ctx->outer_context;
6215   delete_omp_context (ctx);
6216 }
6217 
6218 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
6219    gimplification of the body, as well as scanning the body for used
6220    variables.  We need to do this scan now, because variable-sized
6221    decls will be decomposed during gimplification.  */
6222 
6223 static void
6224 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6225 {
6226   tree expr = *expr_p;
6227   gimple g;
6228   gimple_seq body = NULL;
6229   struct gimplify_ctx gctx;
6230 
6231   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6232 			     OMP_PARALLEL_COMBINED (expr)
6233 			     ? ORT_COMBINED_PARALLEL
6234 			     : ORT_PARALLEL);
6235 
6236   push_gimplify_context (&gctx);
6237 
6238   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6239   if (gimple_code (g) == GIMPLE_BIND)
6240     pop_gimplify_context (g);
6241   else
6242     pop_gimplify_context (NULL);
6243 
6244   gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
6245 
6246   g = gimple_build_omp_parallel (body,
6247 				 OMP_PARALLEL_CLAUSES (expr),
6248 				 NULL_TREE, NULL_TREE);
6249   if (OMP_PARALLEL_COMBINED (expr))
6250     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6251   gimplify_seq_add_stmt (pre_p, g);
6252   *expr_p = NULL_TREE;
6253 }
6254 
6255 /* Gimplify the contents of an OMP_TASK statement.  This involves
6256    gimplification of the body, as well as scanning the body for used
6257    variables.  We need to do this scan now, because variable-sized
6258    decls will be decomposed during gimplification.  */
6259 
6260 static void
6261 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6262 {
6263   tree expr = *expr_p;
6264   gimple g;
6265   gimple_seq body = NULL;
6266   struct gimplify_ctx gctx;
6267 
6268   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6269 			     find_omp_clause (OMP_TASK_CLAUSES (expr),
6270 					      OMP_CLAUSE_UNTIED)
6271 			     ? ORT_UNTIED_TASK : ORT_TASK);
6272 
6273   push_gimplify_context (&gctx);
6274 
6275   g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6276   if (gimple_code (g) == GIMPLE_BIND)
6277     pop_gimplify_context (g);
6278   else
6279     pop_gimplify_context (NULL);
6280 
6281   gimplify_adjust_omp_clauses (&OMP_TASK_CLAUSES (expr));
6282 
6283   g = gimple_build_omp_task (body,
6284 			     OMP_TASK_CLAUSES (expr),
6285 			     NULL_TREE, NULL_TREE,
6286 			     NULL_TREE, NULL_TREE, NULL_TREE);
6287   gimplify_seq_add_stmt (pre_p, g);
6288   *expr_p = NULL_TREE;
6289 }
6290 
6291 /* Gimplify the gross structure of an OMP_FOR statement.  */
6292 
6293 static enum gimplify_status
6294 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6295 {
6296   tree for_stmt, decl, var, t;
6297   enum gimplify_status ret = GS_ALL_DONE;
6298   enum gimplify_status tret;
6299   gimple gfor;
6300   gimple_seq for_body, for_pre_body;
6301   int i;
6302 
6303   for_stmt = *expr_p;
6304 
6305   gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6306 			     ORT_WORKSHARE);
6307 
6308   /* Handle OMP_FOR_INIT.  */
6309   for_pre_body = NULL;
6310   gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6311   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6312 
6313   for_body = gimple_seq_alloc ();
6314   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6315 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6316   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6317 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6318   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6319     {
6320       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6321       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6322       decl = TREE_OPERAND (t, 0);
6323       gcc_assert (DECL_P (decl));
6324       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6325 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
6326 
6327       /* Make sure the iteration variable is private.  */
6328       if (omp_is_private (gimplify_omp_ctxp, decl))
6329 	omp_notice_variable (gimplify_omp_ctxp, decl, true);
6330       else
6331 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6332 
6333       /* If DECL is not a gimple register, create a temporary variable to act
6334 	 as an iteration counter.  This is valid, since DECL cannot be
6335 	 modified in the body of the loop.  */
6336       if (!is_gimple_reg (decl))
6337 	{
6338 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6339 	  TREE_OPERAND (t, 0) = var;
6340 
6341 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6342 
6343 	  omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
6344 	}
6345       else
6346 	var = decl;
6347 
6348       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6349 			    is_gimple_val, fb_rvalue);
6350       ret = MIN (ret, tret);
6351       if (ret == GS_ERROR)
6352 	return ret;
6353 
6354       /* Handle OMP_FOR_COND.  */
6355       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6356       gcc_assert (COMPARISON_CLASS_P (t));
6357       gcc_assert (TREE_OPERAND (t, 0) == decl);
6358 
6359       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6360 			    is_gimple_val, fb_rvalue);
6361       ret = MIN (ret, tret);
6362 
6363       /* Handle OMP_FOR_INCR.  */
6364       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6365       switch (TREE_CODE (t))
6366 	{
6367 	case PREINCREMENT_EXPR:
6368 	case POSTINCREMENT_EXPR:
6369 	  t = build_int_cst (TREE_TYPE (decl), 1);
6370 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6371 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6372 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6373 	  break;
6374 
6375 	case PREDECREMENT_EXPR:
6376 	case POSTDECREMENT_EXPR:
6377 	  t = build_int_cst (TREE_TYPE (decl), -1);
6378 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6379 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6380 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6381 	  break;
6382 
6383 	case MODIFY_EXPR:
6384 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
6385 	  TREE_OPERAND (t, 0) = var;
6386 
6387 	  t = TREE_OPERAND (t, 1);
6388 	  switch (TREE_CODE (t))
6389 	    {
6390 	    case PLUS_EXPR:
6391 	      if (TREE_OPERAND (t, 1) == decl)
6392 		{
6393 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6394 		  TREE_OPERAND (t, 0) = var;
6395 		  break;
6396 		}
6397 
6398 	      /* Fallthru.  */
6399 	    case MINUS_EXPR:
6400 	    case POINTER_PLUS_EXPR:
6401 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
6402 	      TREE_OPERAND (t, 0) = var;
6403 	      break;
6404 	    default:
6405 	      gcc_unreachable ();
6406 	    }
6407 
6408 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6409 				is_gimple_val, fb_rvalue);
6410 	  ret = MIN (ret, tret);
6411 	  break;
6412 
6413 	default:
6414 	  gcc_unreachable ();
6415 	}
6416 
6417       if (var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6418 	{
6419 	  tree c;
6420 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6421 	    if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6422 		&& OMP_CLAUSE_DECL (c) == decl
6423 		&& OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6424 	      {
6425 		t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6426 		gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6427 		gcc_assert (TREE_OPERAND (t, 0) == var);
6428 		t = TREE_OPERAND (t, 1);
6429 		gcc_assert (TREE_CODE (t) == PLUS_EXPR
6430 			    || TREE_CODE (t) == MINUS_EXPR
6431 			    || TREE_CODE (t) == POINTER_PLUS_EXPR);
6432 		gcc_assert (TREE_OPERAND (t, 0) == var);
6433 		t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6434 			    TREE_OPERAND (t, 1));
6435 		gimplify_assign (decl, t,
6436 				 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6437 	    }
6438 	}
6439     }
6440 
6441   gimplify_and_add (OMP_FOR_BODY (for_stmt), &for_body);
6442 
6443   gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
6444 
6445   gfor = gimple_build_omp_for (for_body, OMP_FOR_CLAUSES (for_stmt),
6446 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
6447 			       for_pre_body);
6448 
6449   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6450     {
6451       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6452       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
6453       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
6454       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6455       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
6456       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
6457       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6458       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
6459     }
6460 
6461   gimplify_seq_add_stmt (pre_p, gfor);
6462   return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
6463 }
6464 
6465 /* Gimplify the gross structure of other OpenMP worksharing constructs.
6466    In particular, OMP_SECTIONS and OMP_SINGLE.  */
6467 
6468 static void
6469 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
6470 {
6471   tree expr = *expr_p;
6472   gimple stmt;
6473   gimple_seq body = NULL;
6474 
6475   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ORT_WORKSHARE);
6476   gimplify_and_add (OMP_BODY (expr), &body);
6477   gimplify_adjust_omp_clauses (&OMP_CLAUSES (expr));
6478 
6479   if (TREE_CODE (expr) == OMP_SECTIONS)
6480     stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
6481   else if (TREE_CODE (expr) == OMP_SINGLE)
6482     stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
6483   else
6484     gcc_unreachable ();
6485 
6486   gimplify_seq_add_stmt (pre_p, stmt);
6487 }
6488 
6489 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
6490    stabilized the lhs of the atomic operation as *ADDR.  Return true if
6491    EXPR is this stabilized form.  */
6492 
6493 static bool
6494 goa_lhs_expr_p (tree expr, tree addr)
6495 {
6496   /* Also include casts to other type variants.  The C front end is fond
6497      of adding these for e.g. volatile variables.  This is like
6498      STRIP_TYPE_NOPS but includes the main variant lookup.  */
6499   STRIP_USELESS_TYPE_CONVERSION (expr);
6500 
6501   if (TREE_CODE (expr) == INDIRECT_REF)
6502     {
6503       expr = TREE_OPERAND (expr, 0);
6504       while (expr != addr
6505 	     && (CONVERT_EXPR_P (expr)
6506 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
6507 	     && TREE_CODE (expr) == TREE_CODE (addr)
6508 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
6509 	{
6510 	  expr = TREE_OPERAND (expr, 0);
6511 	  addr = TREE_OPERAND (addr, 0);
6512 	}
6513       if (expr == addr)
6514 	return true;
6515       return (TREE_CODE (addr) == ADDR_EXPR
6516 	      && TREE_CODE (expr) == ADDR_EXPR
6517 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
6518     }
6519   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
6520     return true;
6521   return false;
6522 }
6523 
6524 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
6525    expression does not involve the lhs, evaluate it into a temporary.
6526    Return 1 if the lhs appeared as a subexpression, 0 if it did not,
6527    or -1 if an error was encountered.  */
6528 
6529 static int
6530 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
6531 		    tree lhs_var)
6532 {
6533   tree expr = *expr_p;
6534   int saw_lhs;
6535 
6536   if (goa_lhs_expr_p (expr, lhs_addr))
6537     {
6538       *expr_p = lhs_var;
6539       return 1;
6540     }
6541   if (is_gimple_val (expr))
6542     return 0;
6543 
6544   saw_lhs = 0;
6545   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
6546     {
6547     case tcc_binary:
6548     case tcc_comparison:
6549       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
6550 				     lhs_var);
6551     case tcc_unary:
6552       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
6553 				     lhs_var);
6554       break;
6555     case tcc_expression:
6556       switch (TREE_CODE (expr))
6557 	{
6558 	case TRUTH_ANDIF_EXPR:
6559 	case TRUTH_ORIF_EXPR:
6560 	case TRUTH_AND_EXPR:
6561 	case TRUTH_OR_EXPR:
6562 	case TRUTH_XOR_EXPR:
6563 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
6564 					 lhs_addr, lhs_var);
6565 	case TRUTH_NOT_EXPR:
6566 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
6567 					 lhs_addr, lhs_var);
6568 	  break;
6569 	case COMPOUND_EXPR:
6570 	  /* Break out any preevaluations from cp_build_modify_expr.  */
6571 	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
6572 	       expr = TREE_OPERAND (expr, 1))
6573 	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
6574 	  *expr_p = expr;
6575 	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
6576 	default:
6577 	  break;
6578 	}
6579       break;
6580     default:
6581       break;
6582     }
6583 
6584   if (saw_lhs == 0)
6585     {
6586       enum gimplify_status gs;
6587       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
6588       if (gs != GS_ALL_DONE)
6589 	saw_lhs = -1;
6590     }
6591 
6592   return saw_lhs;
6593 }
6594 
6595 /* Gimplify an OMP_ATOMIC statement.  */
6596 
6597 static enum gimplify_status
6598 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
6599 {
6600   tree addr = TREE_OPERAND (*expr_p, 0);
6601   tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
6602 	     ? NULL : TREE_OPERAND (*expr_p, 1);
6603   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
6604   tree tmp_load;
6605   gimple loadstmt, storestmt;
6606 
6607   tmp_load = create_tmp_reg (type, NULL);
6608   if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
6609     return GS_ERROR;
6610 
6611   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
6612       != GS_ALL_DONE)
6613     return GS_ERROR;
6614 
6615   loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
6616   gimplify_seq_add_stmt (pre_p, loadstmt);
6617   if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
6618       != GS_ALL_DONE)
6619     return GS_ERROR;
6620 
6621   if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
6622     rhs = tmp_load;
6623   storestmt = gimple_build_omp_atomic_store (rhs);
6624   gimplify_seq_add_stmt (pre_p, storestmt);
6625   switch (TREE_CODE (*expr_p))
6626     {
6627     case OMP_ATOMIC_READ:
6628     case OMP_ATOMIC_CAPTURE_OLD:
6629       *expr_p = tmp_load;
6630       gimple_omp_atomic_set_need_value (loadstmt);
6631       break;
6632     case OMP_ATOMIC_CAPTURE_NEW:
6633       *expr_p = rhs;
6634       gimple_omp_atomic_set_need_value (storestmt);
6635       break;
6636     default:
6637       *expr_p = NULL;
6638       break;
6639     }
6640 
6641    return GS_ALL_DONE;
6642 }
6643 
6644 /* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
6645    body, and adding some EH bits.  */
6646 
6647 static enum gimplify_status
6648 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
6649 {
6650   tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
6651   gimple g;
6652   gimple_seq body = NULL;
6653   struct gimplify_ctx gctx;
6654   int subcode = 0;
6655 
6656   /* Wrap the transaction body in a BIND_EXPR so we have a context
6657      where to put decls for OpenMP.  */
6658   if (TREE_CODE (tbody) != BIND_EXPR)
6659     {
6660       tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
6661       TREE_SIDE_EFFECTS (bind) = 1;
6662       SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
6663       TRANSACTION_EXPR_BODY (expr) = bind;
6664     }
6665 
6666   push_gimplify_context (&gctx);
6667   temp = voidify_wrapper_expr (*expr_p, NULL);
6668 
6669   g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
6670   pop_gimplify_context (g);
6671 
6672   g = gimple_build_transaction (body, NULL);
6673   if (TRANSACTION_EXPR_OUTER (expr))
6674     subcode = GTMA_IS_OUTER;
6675   else if (TRANSACTION_EXPR_RELAXED (expr))
6676     subcode = GTMA_IS_RELAXED;
6677   gimple_transaction_set_subcode (g, subcode);
6678 
6679   gimplify_seq_add_stmt (pre_p, g);
6680 
6681   if (temp)
6682     {
6683       *expr_p = temp;
6684       return GS_OK;
6685     }
6686 
6687   *expr_p = NULL_TREE;
6688   return GS_ALL_DONE;
6689 }
6690 
6691 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
6692    expression produces a value to be used as an operand inside a GIMPLE
6693    statement, the value will be stored back in *EXPR_P.  This value will
6694    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
6695    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
6696    emitted in PRE_P and POST_P.
6697 
6698    Additionally, this process may overwrite parts of the input
6699    expression during gimplification.  Ideally, it should be
6700    possible to do non-destructive gimplification.
6701 
6702    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
6703       the expression needs to evaluate to a value to be used as
6704       an operand in a GIMPLE statement, this value will be stored in
6705       *EXPR_P on exit.  This happens when the caller specifies one
6706       of fb_lvalue or fb_rvalue fallback flags.
6707 
6708    PRE_P will contain the sequence of GIMPLE statements corresponding
6709        to the evaluation of EXPR and all the side-effects that must
6710        be executed before the main expression.  On exit, the last
6711        statement of PRE_P is the core statement being gimplified.  For
6712        instance, when gimplifying 'if (++a)' the last statement in
6713        PRE_P will be 'if (t.1)' where t.1 is the result of
6714        pre-incrementing 'a'.
6715 
6716    POST_P will contain the sequence of GIMPLE statements corresponding
6717        to the evaluation of all the side-effects that must be executed
6718        after the main expression.  If this is NULL, the post
6719        side-effects are stored at the end of PRE_P.
6720 
6721        The reason why the output is split in two is to handle post
6722        side-effects explicitly.  In some cases, an expression may have
6723        inner and outer post side-effects which need to be emitted in
6724        an order different from the one given by the recursive
6725        traversal.  For instance, for the expression (*p--)++ the post
6726        side-effects of '--' must actually occur *after* the post
6727        side-effects of '++'.  However, gimplification will first visit
6728        the inner expression, so if a separate POST sequence was not
6729        used, the resulting sequence would be:
6730 
6731        	    1	t.1 = *p
6732        	    2	p = p - 1
6733        	    3	t.2 = t.1 + 1
6734        	    4	*p = t.2
6735 
6736        However, the post-decrement operation in line #2 must not be
6737        evaluated until after the store to *p at line #4, so the
6738        correct sequence should be:
6739 
6740        	    1	t.1 = *p
6741        	    2	t.2 = t.1 + 1
6742        	    3	*p = t.2
6743        	    4	p = p - 1
6744 
6745        So, by specifying a separate post queue, it is possible
6746        to emit the post side-effects in the correct order.
6747        If POST_P is NULL, an internal queue will be used.  Before
6748        returning to the caller, the sequence POST_P is appended to
6749        the main output sequence PRE_P.
6750 
6751    GIMPLE_TEST_F points to a function that takes a tree T and
6752        returns nonzero if T is in the GIMPLE form requested by the
6753        caller.  The GIMPLE predicates are in gimple.c.
6754 
6755    FALLBACK tells the function what sort of a temporary we want if
6756        gimplification cannot produce an expression that complies with
6757        GIMPLE_TEST_F.
6758 
6759        fb_none means that no temporary should be generated
6760        fb_rvalue means that an rvalue is OK to generate
6761        fb_lvalue means that an lvalue is OK to generate
6762        fb_either means that either is OK, but an lvalue is preferable.
6763        fb_mayfail means that gimplification may fail (in which case
6764        GS_ERROR will be returned)
6765 
6766    The return value is either GS_ERROR or GS_ALL_DONE, since this
6767    function iterates until EXPR is completely gimplified or an error
6768    occurs.  */
6769 
6770 enum gimplify_status
6771 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
6772 	       bool (*gimple_test_f) (tree), fallback_t fallback)
6773 {
6774   tree tmp;
6775   gimple_seq internal_pre = NULL;
6776   gimple_seq internal_post = NULL;
6777   tree save_expr;
6778   bool is_statement;
6779   location_t saved_location;
6780   enum gimplify_status ret;
6781   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
6782 
6783   save_expr = *expr_p;
6784   if (save_expr == NULL_TREE)
6785     return GS_ALL_DONE;
6786 
6787   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
6788   is_statement = gimple_test_f == is_gimple_stmt;
6789   if (is_statement)
6790     gcc_assert (pre_p);
6791 
6792   /* Consistency checks.  */
6793   if (gimple_test_f == is_gimple_reg)
6794     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
6795   else if (gimple_test_f == is_gimple_val
6796            || gimple_test_f == is_gimple_call_addr
6797            || gimple_test_f == is_gimple_condexpr
6798            || gimple_test_f == is_gimple_mem_rhs
6799            || gimple_test_f == is_gimple_mem_rhs_or_call
6800            || gimple_test_f == is_gimple_reg_rhs
6801            || gimple_test_f == is_gimple_reg_rhs_or_call
6802            || gimple_test_f == is_gimple_asm_val
6803 	   || gimple_test_f == is_gimple_mem_ref_addr)
6804     gcc_assert (fallback & fb_rvalue);
6805   else if (gimple_test_f == is_gimple_min_lval
6806 	   || gimple_test_f == is_gimple_lvalue)
6807     gcc_assert (fallback & fb_lvalue);
6808   else if (gimple_test_f == is_gimple_addressable)
6809     gcc_assert (fallback & fb_either);
6810   else if (gimple_test_f == is_gimple_stmt)
6811     gcc_assert (fallback == fb_none);
6812   else
6813     {
6814       /* We should have recognized the GIMPLE_TEST_F predicate to
6815 	 know what kind of fallback to use in case a temporary is
6816 	 needed to hold the value or address of *EXPR_P.  */
6817       gcc_unreachable ();
6818     }
6819 
6820   /* We used to check the predicate here and return immediately if it
6821      succeeds.  This is wrong; the design is for gimplification to be
6822      idempotent, and for the predicates to only test for valid forms, not
6823      whether they are fully simplified.  */
6824   if (pre_p == NULL)
6825     pre_p = &internal_pre;
6826 
6827   if (post_p == NULL)
6828     post_p = &internal_post;
6829 
6830   /* Remember the last statements added to PRE_P and POST_P.  Every
6831      new statement added by the gimplification helpers needs to be
6832      annotated with location information.  To centralize the
6833      responsibility, we remember the last statement that had been
6834      added to both queues before gimplifying *EXPR_P.  If
6835      gimplification produces new statements in PRE_P and POST_P, those
6836      statements will be annotated with the same location information
6837      as *EXPR_P.  */
6838   pre_last_gsi = gsi_last (*pre_p);
6839   post_last_gsi = gsi_last (*post_p);
6840 
6841   saved_location = input_location;
6842   if (save_expr != error_mark_node
6843       && EXPR_HAS_LOCATION (*expr_p))
6844     input_location = EXPR_LOCATION (*expr_p);
6845 
6846   /* Loop over the specific gimplifiers until the toplevel node
6847      remains the same.  */
6848   do
6849     {
6850       /* Strip away as many useless type conversions as possible
6851 	 at the toplevel.  */
6852       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
6853 
6854       /* Remember the expr.  */
6855       save_expr = *expr_p;
6856 
6857       /* Die, die, die, my darling.  */
6858       if (save_expr == error_mark_node
6859 	  || (TREE_TYPE (save_expr)
6860 	      && TREE_TYPE (save_expr) == error_mark_node))
6861 	{
6862 	  ret = GS_ERROR;
6863 	  break;
6864 	}
6865 
6866       /* Do any language-specific gimplification.  */
6867       ret = ((enum gimplify_status)
6868 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
6869       if (ret == GS_OK)
6870 	{
6871 	  if (*expr_p == NULL_TREE)
6872 	    break;
6873 	  if (*expr_p != save_expr)
6874 	    continue;
6875 	}
6876       else if (ret != GS_UNHANDLED)
6877 	break;
6878 
6879       /* Make sure that all the cases set 'ret' appropriately.  */
6880       ret = GS_UNHANDLED;
6881       switch (TREE_CODE (*expr_p))
6882 	{
6883 	  /* First deal with the special cases.  */
6884 
6885 	case POSTINCREMENT_EXPR:
6886 	case POSTDECREMENT_EXPR:
6887 	case PREINCREMENT_EXPR:
6888 	case PREDECREMENT_EXPR:
6889 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
6890 					fallback != fb_none);
6891 	  break;
6892 
6893 	case ARRAY_REF:
6894 	case ARRAY_RANGE_REF:
6895 	case REALPART_EXPR:
6896 	case IMAGPART_EXPR:
6897 	case COMPONENT_REF:
6898 	case VIEW_CONVERT_EXPR:
6899 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
6900 					fallback ? fallback : fb_rvalue);
6901 	  break;
6902 
6903 	case COND_EXPR:
6904 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
6905 
6906 	  /* C99 code may assign to an array in a structure value of a
6907 	     conditional expression, and this has undefined behavior
6908 	     only on execution, so create a temporary if an lvalue is
6909 	     required.  */
6910 	  if (fallback == fb_lvalue)
6911 	    {
6912 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6913 	      mark_addressable (*expr_p);
6914 	      ret = GS_OK;
6915 	    }
6916 	  break;
6917 
6918 	case CALL_EXPR:
6919 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
6920 
6921 	  /* C99 code may assign to an array in a structure returned
6922 	     from a function, and this has undefined behavior only on
6923 	     execution, so create a temporary if an lvalue is
6924 	     required.  */
6925 	  if (fallback == fb_lvalue)
6926 	    {
6927 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6928 	      mark_addressable (*expr_p);
6929 	      ret = GS_OK;
6930 	    }
6931 	  break;
6932 
6933 	case TREE_LIST:
6934 	  gcc_unreachable ();
6935 
6936 	case COMPOUND_EXPR:
6937 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
6938 	  break;
6939 
6940 	case COMPOUND_LITERAL_EXPR:
6941 	  ret = gimplify_compound_literal_expr (expr_p, pre_p);
6942 	  break;
6943 
6944 	case MODIFY_EXPR:
6945 	case INIT_EXPR:
6946 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
6947 				      fallback != fb_none);
6948 	  break;
6949 
6950 	case TRUTH_ANDIF_EXPR:
6951 	case TRUTH_ORIF_EXPR:
6952 	  {
6953 	    /* Preserve the original type of the expression and the
6954 	       source location of the outer expression.  */
6955 	    tree org_type = TREE_TYPE (*expr_p);
6956 	    *expr_p = gimple_boolify (*expr_p);
6957 	    *expr_p = build3_loc (input_location, COND_EXPR,
6958 				  org_type, *expr_p,
6959 				  fold_convert_loc
6960 				    (input_location,
6961 				     org_type, boolean_true_node),
6962 				  fold_convert_loc
6963 				    (input_location,
6964 				     org_type, boolean_false_node));
6965 	    ret = GS_OK;
6966 	    break;
6967 	  }
6968 
6969 	case TRUTH_NOT_EXPR:
6970 	  {
6971 	    tree type = TREE_TYPE (*expr_p);
6972 	    /* The parsers are careful to generate TRUTH_NOT_EXPR
6973 	       only with operands that are always zero or one.
6974 	       We do not fold here but handle the only interesting case
6975 	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
6976 	    *expr_p = gimple_boolify (*expr_p);
6977 	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
6978 	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
6979 				    TREE_TYPE (*expr_p),
6980 				    TREE_OPERAND (*expr_p, 0));
6981 	    else
6982 	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
6983 				    TREE_TYPE (*expr_p),
6984 				    TREE_OPERAND (*expr_p, 0),
6985 				    build_int_cst (TREE_TYPE (*expr_p), 1));
6986 	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
6987 	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
6988 	    ret = GS_OK;
6989 	    break;
6990 	  }
6991 
6992 	case ADDR_EXPR:
6993 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
6994 	  break;
6995 
6996 	case VA_ARG_EXPR:
6997 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
6998 	  break;
6999 
7000 	CASE_CONVERT:
7001 	  if (IS_EMPTY_STMT (*expr_p))
7002 	    {
7003 	      ret = GS_ALL_DONE;
7004 	      break;
7005 	    }
7006 
7007 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7008 	      || fallback == fb_none)
7009 	    {
7010 	      /* Just strip a conversion to void (or in void context) and
7011 		 try again.  */
7012 	      *expr_p = TREE_OPERAND (*expr_p, 0);
7013 	      ret = GS_OK;
7014 	      break;
7015 	    }
7016 
7017 	  ret = gimplify_conversion (expr_p);
7018 	  if (ret == GS_ERROR)
7019 	    break;
7020 	  if (*expr_p != save_expr)
7021 	    break;
7022 	  /* FALLTHRU */
7023 
7024 	case FIX_TRUNC_EXPR:
7025 	  /* unary_expr: ... | '(' cast ')' val | ...  */
7026 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7027 			       is_gimple_val, fb_rvalue);
7028 	  recalculate_side_effects (*expr_p);
7029 	  break;
7030 
7031 	case INDIRECT_REF:
7032 	  {
7033 	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7034 	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
7035 	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7036 
7037 	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7038 	    if (*expr_p != save_expr)
7039 	      {
7040 		ret = GS_OK;
7041 		break;
7042 	      }
7043 
7044 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7045 				 is_gimple_reg, fb_rvalue);
7046 	    if (ret == GS_ERROR)
7047 	      break;
7048 
7049 	    recalculate_side_effects (*expr_p);
7050 	    *expr_p = fold_build2_loc (input_location, MEM_REF,
7051 				       TREE_TYPE (*expr_p),
7052 				       TREE_OPERAND (*expr_p, 0),
7053 				       build_int_cst (saved_ptr_type, 0));
7054 	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
7055 	    TREE_THIS_NOTRAP (*expr_p) = notrap;
7056 	    ret = GS_OK;
7057 	    break;
7058 	  }
7059 
7060 	/* We arrive here through the various re-gimplifcation paths.  */
7061 	case MEM_REF:
7062 	  /* First try re-folding the whole thing.  */
7063 	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7064 			     TREE_OPERAND (*expr_p, 0),
7065 			     TREE_OPERAND (*expr_p, 1));
7066 	  if (tmp)
7067 	    {
7068 	      *expr_p = tmp;
7069 	      recalculate_side_effects (*expr_p);
7070 	      ret = GS_OK;
7071 	      break;
7072 	    }
7073 	  /* Avoid re-gimplifying the address operand if it is already
7074 	     in suitable form.  Re-gimplifying would mark the address
7075 	     operand addressable.  Always gimplify when not in SSA form
7076 	     as we still may have to gimplify decls with value-exprs.  */
7077 	  if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7078 	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7079 	    {
7080 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7081 				   is_gimple_mem_ref_addr, fb_rvalue);
7082 	      if (ret == GS_ERROR)
7083 		break;
7084 	    }
7085 	  recalculate_side_effects (*expr_p);
7086 	  ret = GS_ALL_DONE;
7087 	  break;
7088 
7089 	/* Constants need not be gimplified.  */
7090 	case INTEGER_CST:
7091 	case REAL_CST:
7092 	case FIXED_CST:
7093 	case STRING_CST:
7094 	case COMPLEX_CST:
7095 	case VECTOR_CST:
7096 	  ret = GS_ALL_DONE;
7097 	  break;
7098 
7099 	case CONST_DECL:
7100 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
7101 	     CONST_DECL node.  Otherwise the decl is replaceable by its
7102 	     value.  */
7103 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
7104 	  if (fallback & fb_lvalue)
7105 	    ret = GS_ALL_DONE;
7106 	  else
7107 	    {
7108 	      *expr_p = DECL_INITIAL (*expr_p);
7109 	      ret = GS_OK;
7110 	    }
7111 	  break;
7112 
7113 	case DECL_EXPR:
7114 	  ret = gimplify_decl_expr (expr_p, pre_p);
7115 	  break;
7116 
7117 	case BIND_EXPR:
7118 	  ret = gimplify_bind_expr (expr_p, pre_p);
7119 	  break;
7120 
7121 	case LOOP_EXPR:
7122 	  ret = gimplify_loop_expr (expr_p, pre_p);
7123 	  break;
7124 
7125 	case SWITCH_EXPR:
7126 	  ret = gimplify_switch_expr (expr_p, pre_p);
7127 	  break;
7128 
7129 	case EXIT_EXPR:
7130 	  ret = gimplify_exit_expr (expr_p);
7131 	  break;
7132 
7133 	case GOTO_EXPR:
7134 	  /* If the target is not LABEL, then it is a computed jump
7135 	     and the target needs to be gimplified.  */
7136 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7137 	    {
7138 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7139 				   NULL, is_gimple_val, fb_rvalue);
7140 	      if (ret == GS_ERROR)
7141 		break;
7142 	    }
7143 	  gimplify_seq_add_stmt (pre_p,
7144 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7145 	  ret = GS_ALL_DONE;
7146 	  break;
7147 
7148 	case PREDICT_EXPR:
7149 	  gimplify_seq_add_stmt (pre_p,
7150 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7151 					      PREDICT_EXPR_OUTCOME (*expr_p)));
7152 	  ret = GS_ALL_DONE;
7153 	  break;
7154 
7155 	case LABEL_EXPR:
7156 	  ret = GS_ALL_DONE;
7157 	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7158 		      == current_function_decl);
7159 	  gimplify_seq_add_stmt (pre_p,
7160 			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7161 	  break;
7162 
7163 	case CASE_LABEL_EXPR:
7164 	  ret = gimplify_case_label_expr (expr_p, pre_p);
7165 	  break;
7166 
7167 	case RETURN_EXPR:
7168 	  ret = gimplify_return_expr (*expr_p, pre_p);
7169 	  break;
7170 
7171 	case CONSTRUCTOR:
7172 	  /* Don't reduce this in place; let gimplify_init_constructor work its
7173 	     magic.  Buf if we're just elaborating this for side effects, just
7174 	     gimplify any element that has side-effects.  */
7175 	  if (fallback == fb_none)
7176 	    {
7177 	      unsigned HOST_WIDE_INT ix;
7178 	      tree val;
7179 	      tree temp = NULL_TREE;
7180 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7181 		if (TREE_SIDE_EFFECTS (val))
7182 		  append_to_statement_list (val, &temp);
7183 
7184 	      *expr_p = temp;
7185 	      ret = temp ? GS_OK : GS_ALL_DONE;
7186 	    }
7187 	  /* C99 code may assign to an array in a constructed
7188 	     structure or union, and this has undefined behavior only
7189 	     on execution, so create a temporary if an lvalue is
7190 	     required.  */
7191 	  else if (fallback == fb_lvalue)
7192 	    {
7193 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7194 	      mark_addressable (*expr_p);
7195 	      ret = GS_OK;
7196 	    }
7197 	  else
7198 	    ret = GS_ALL_DONE;
7199 	  break;
7200 
7201 	  /* The following are special cases that are not handled by the
7202 	     original GIMPLE grammar.  */
7203 
7204 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7205 	     eliminated.  */
7206 	case SAVE_EXPR:
7207 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
7208 	  break;
7209 
7210 	case BIT_FIELD_REF:
7211 	  {
7212 	    enum gimplify_status r0, r1, r2;
7213 
7214 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7215 				post_p, is_gimple_lvalue, fb_either);
7216 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7217 				post_p, is_gimple_val, fb_rvalue);
7218 	    r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7219 				post_p, is_gimple_val, fb_rvalue);
7220 	    recalculate_side_effects (*expr_p);
7221 
7222 	    ret = MIN (r0, MIN (r1, r2));
7223 	  }
7224 	  break;
7225 
7226 	case TARGET_MEM_REF:
7227 	  {
7228 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7229 
7230 	    if (TMR_BASE (*expr_p))
7231 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7232 				  post_p, is_gimple_mem_ref_addr, fb_either);
7233 	    if (TMR_INDEX (*expr_p))
7234 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7235 				  post_p, is_gimple_val, fb_rvalue);
7236 	    if (TMR_INDEX2 (*expr_p))
7237 	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7238 				  post_p, is_gimple_val, fb_rvalue);
7239 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
7240 	    ret = MIN (r0, r1);
7241 	  }
7242 	  break;
7243 
7244 	case NON_LVALUE_EXPR:
7245 	  /* This should have been stripped above.  */
7246 	  gcc_unreachable ();
7247 
7248 	case ASM_EXPR:
7249 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7250 	  break;
7251 
7252 	case TRY_FINALLY_EXPR:
7253 	case TRY_CATCH_EXPR:
7254 	  {
7255 	    gimple_seq eval, cleanup;
7256 	    gimple try_;
7257 
7258 	    eval = cleanup = NULL;
7259 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7260 	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7261 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
7262 	    if (gimple_seq_empty_p (cleanup))
7263 	      {
7264 		gimple_seq_add_seq (pre_p, eval);
7265 		ret = GS_ALL_DONE;
7266 		break;
7267 	      }
7268 	    try_ = gimple_build_try (eval, cleanup,
7269 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7270 				     ? GIMPLE_TRY_FINALLY
7271 				     : GIMPLE_TRY_CATCH);
7272 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7273 	      gimple_try_set_catch_is_cleanup (try_,
7274 					       TRY_CATCH_IS_CLEANUP (*expr_p));
7275 	    gimplify_seq_add_stmt (pre_p, try_);
7276 	    ret = GS_ALL_DONE;
7277 	    break;
7278 	  }
7279 
7280 	case CLEANUP_POINT_EXPR:
7281 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7282 	  break;
7283 
7284 	case TARGET_EXPR:
7285 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
7286 	  break;
7287 
7288 	case CATCH_EXPR:
7289 	  {
7290 	    gimple c;
7291 	    gimple_seq handler = NULL;
7292 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
7293 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
7294 	    gimplify_seq_add_stmt (pre_p, c);
7295 	    ret = GS_ALL_DONE;
7296 	    break;
7297 	  }
7298 
7299 	case EH_FILTER_EXPR:
7300 	  {
7301 	    gimple ehf;
7302 	    gimple_seq failure = NULL;
7303 
7304 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
7305 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
7306 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
7307 	    gimplify_seq_add_stmt (pre_p, ehf);
7308 	    ret = GS_ALL_DONE;
7309 	    break;
7310 	  }
7311 
7312 	case OBJ_TYPE_REF:
7313 	  {
7314 	    enum gimplify_status r0, r1;
7315 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
7316 				post_p, is_gimple_val, fb_rvalue);
7317 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
7318 				post_p, is_gimple_val, fb_rvalue);
7319 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
7320 	    ret = MIN (r0, r1);
7321 	  }
7322 	  break;
7323 
7324 	case LABEL_DECL:
7325 	  /* We get here when taking the address of a label.  We mark
7326 	     the label as "forced"; meaning it can never be removed and
7327 	     it is a potential target for any computed goto.  */
7328 	  FORCED_LABEL (*expr_p) = 1;
7329 	  ret = GS_ALL_DONE;
7330 	  break;
7331 
7332 	case STATEMENT_LIST:
7333 	  ret = gimplify_statement_list (expr_p, pre_p);
7334 	  break;
7335 
7336 	case WITH_SIZE_EXPR:
7337 	  {
7338 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7339 			   post_p == &internal_post ? NULL : post_p,
7340 			   gimple_test_f, fallback);
7341 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7342 			   is_gimple_val, fb_rvalue);
7343 	    ret = GS_ALL_DONE;
7344 	  }
7345 	  break;
7346 
7347 	case VAR_DECL:
7348 	case PARM_DECL:
7349 	  ret = gimplify_var_or_parm_decl (expr_p);
7350 	  break;
7351 
7352 	case RESULT_DECL:
7353 	  /* When within an OpenMP context, notice uses of variables.  */
7354 	  if (gimplify_omp_ctxp)
7355 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
7356 	  ret = GS_ALL_DONE;
7357 	  break;
7358 
7359 	case SSA_NAME:
7360 	  /* Allow callbacks into the gimplifier during optimization.  */
7361 	  ret = GS_ALL_DONE;
7362 	  break;
7363 
7364 	case OMP_PARALLEL:
7365 	  gimplify_omp_parallel (expr_p, pre_p);
7366 	  ret = GS_ALL_DONE;
7367 	  break;
7368 
7369 	case OMP_TASK:
7370 	  gimplify_omp_task (expr_p, pre_p);
7371 	  ret = GS_ALL_DONE;
7372 	  break;
7373 
7374 	case OMP_FOR:
7375 	  ret = gimplify_omp_for (expr_p, pre_p);
7376 	  break;
7377 
7378 	case OMP_SECTIONS:
7379 	case OMP_SINGLE:
7380 	  gimplify_omp_workshare (expr_p, pre_p);
7381 	  ret = GS_ALL_DONE;
7382 	  break;
7383 
7384 	case OMP_SECTION:
7385 	case OMP_MASTER:
7386 	case OMP_ORDERED:
7387 	case OMP_CRITICAL:
7388 	  {
7389 	    gimple_seq body = NULL;
7390 	    gimple g;
7391 
7392 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
7393 	    switch (TREE_CODE (*expr_p))
7394 	      {
7395 	      case OMP_SECTION:
7396 	        g = gimple_build_omp_section (body);
7397 	        break;
7398 	      case OMP_MASTER:
7399 	        g = gimple_build_omp_master (body);
7400 		break;
7401 	      case OMP_ORDERED:
7402 		g = gimple_build_omp_ordered (body);
7403 		break;
7404 	      case OMP_CRITICAL:
7405 		g = gimple_build_omp_critical (body,
7406 		    			       OMP_CRITICAL_NAME (*expr_p));
7407 		break;
7408 	      default:
7409 		gcc_unreachable ();
7410 	      }
7411 	    gimplify_seq_add_stmt (pre_p, g);
7412 	    ret = GS_ALL_DONE;
7413 	    break;
7414 	  }
7415 
7416 	case OMP_ATOMIC:
7417 	case OMP_ATOMIC_READ:
7418 	case OMP_ATOMIC_CAPTURE_OLD:
7419 	case OMP_ATOMIC_CAPTURE_NEW:
7420 	  ret = gimplify_omp_atomic (expr_p, pre_p);
7421 	  break;
7422 
7423 	case TRANSACTION_EXPR:
7424 	  ret = gimplify_transaction (expr_p, pre_p);
7425 	  break;
7426 
7427 	case TRUTH_AND_EXPR:
7428 	case TRUTH_OR_EXPR:
7429 	case TRUTH_XOR_EXPR:
7430 	  {
7431 	    tree orig_type = TREE_TYPE (*expr_p);
7432 	    tree new_type, xop0, xop1;
7433 	    *expr_p = gimple_boolify (*expr_p);
7434 	    new_type = TREE_TYPE (*expr_p);
7435 	    if (!useless_type_conversion_p (orig_type, new_type))
7436 	      {
7437 		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
7438 		ret = GS_OK;
7439 		break;
7440 	      }
7441 
7442 	  /* Boolified binary truth expressions are semantically equivalent
7443 	     to bitwise binary expressions.  Canonicalize them to the
7444 	     bitwise variant.  */
7445 	    switch (TREE_CODE (*expr_p))
7446 	      {
7447 	      case TRUTH_AND_EXPR:
7448 		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
7449 		break;
7450 	      case TRUTH_OR_EXPR:
7451 		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
7452 		break;
7453 	      case TRUTH_XOR_EXPR:
7454 		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
7455 		break;
7456 	      default:
7457 		break;
7458 	      }
7459 	    /* Now make sure that operands have compatible type to
7460 	       expression's new_type.  */
7461 	    xop0 = TREE_OPERAND (*expr_p, 0);
7462 	    xop1 = TREE_OPERAND (*expr_p, 1);
7463 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
7464 	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
7465 							    new_type,
7466 	      						    xop0);
7467 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
7468 	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
7469 							    new_type,
7470 	      						    xop1);
7471 	    /* Continue classified as tcc_binary.  */
7472 	    goto expr_2;
7473 	  }
7474 
7475 	case FMA_EXPR:
7476 	case VEC_PERM_EXPR:
7477 	  /* Classified as tcc_expression.  */
7478 	  goto expr_3;
7479 
7480 	case POINTER_PLUS_EXPR:
7481 	  {
7482 	    enum gimplify_status r0, r1;
7483 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7484 				post_p, is_gimple_val, fb_rvalue);
7485 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7486 				post_p, is_gimple_val, fb_rvalue);
7487 	    recalculate_side_effects (*expr_p);
7488 	    ret = MIN (r0, r1);
7489 	    /* Convert &X + CST to invariant &MEM[&X, CST].  Do this
7490 	       after gimplifying operands - this is similar to how
7491 	       it would be folding all gimplified stmts on creation
7492 	       to have them canonicalized, which is what we eventually
7493 	       should do anyway.  */
7494 	    if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
7495 		&& is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
7496 	      {
7497 		*expr_p = build_fold_addr_expr_with_type_loc
7498 		   (input_location,
7499 		    fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
7500 				 TREE_OPERAND (*expr_p, 0),
7501 				 fold_convert (ptr_type_node,
7502 					       TREE_OPERAND (*expr_p, 1))),
7503 		    TREE_TYPE (*expr_p));
7504 		ret = MIN (ret, GS_OK);
7505 	      }
7506 	    break;
7507 	  }
7508 
7509 	default:
7510 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
7511 	    {
7512 	    case tcc_comparison:
7513 	      /* Handle comparison of objects of non scalar mode aggregates
7514 	     	 with a call to memcmp.  It would be nice to only have to do
7515 	     	 this for variable-sized objects, but then we'd have to allow
7516 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
7517 	     	 that's too complex.
7518 
7519 		 Compare scalar mode aggregates as scalar mode values.  Using
7520 		 memcmp for them would be very inefficient at best, and is
7521 		 plain wrong if bitfields are involved.  */
7522 		{
7523 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
7524 
7525 		  /* Vector comparisons need no boolification.  */
7526 		  if (TREE_CODE (type) == VECTOR_TYPE)
7527 		    goto expr_2;
7528 		  else if (!AGGREGATE_TYPE_P (type))
7529 		    {
7530 		      tree org_type = TREE_TYPE (*expr_p);
7531 		      *expr_p = gimple_boolify (*expr_p);
7532 		      if (!useless_type_conversion_p (org_type,
7533 						      TREE_TYPE (*expr_p)))
7534 			{
7535 			  *expr_p = fold_convert_loc (input_location,
7536 						      org_type, *expr_p);
7537 			  ret = GS_OK;
7538 			}
7539 		      else
7540 			goto expr_2;
7541 		    }
7542 		  else if (TYPE_MODE (type) != BLKmode)
7543 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
7544 		  else
7545 		    ret = gimplify_variable_sized_compare (expr_p);
7546 
7547 		  break;
7548 		}
7549 
7550 	    /* If *EXPR_P does not need to be special-cased, handle it
7551 	       according to its class.  */
7552 	    case tcc_unary:
7553 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7554 				   post_p, is_gimple_val, fb_rvalue);
7555 	      break;
7556 
7557 	    case tcc_binary:
7558 	    expr_2:
7559 	      {
7560 		enum gimplify_status r0, r1;
7561 
7562 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7563 		                    post_p, is_gimple_val, fb_rvalue);
7564 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7565 				    post_p, is_gimple_val, fb_rvalue);
7566 
7567 		ret = MIN (r0, r1);
7568 		break;
7569 	      }
7570 
7571 	    expr_3:
7572 	      {
7573 		enum gimplify_status r0, r1, r2;
7574 
7575 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7576 		                    post_p, is_gimple_val, fb_rvalue);
7577 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
7578 				    post_p, is_gimple_val, fb_rvalue);
7579 		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
7580 				    post_p, is_gimple_val, fb_rvalue);
7581 
7582 		ret = MIN (MIN (r0, r1), r2);
7583 		break;
7584 	      }
7585 
7586 	    case tcc_declaration:
7587 	    case tcc_constant:
7588 	      ret = GS_ALL_DONE;
7589 	      goto dont_recalculate;
7590 
7591 	    default:
7592 	      gcc_unreachable ();
7593 	    }
7594 
7595 	  recalculate_side_effects (*expr_p);
7596 
7597 	dont_recalculate:
7598 	  break;
7599 	}
7600 
7601       gcc_assert (*expr_p || ret != GS_OK);
7602     }
7603   while (ret == GS_OK);
7604 
7605   /* If we encountered an error_mark somewhere nested inside, either
7606      stub out the statement or propagate the error back out.  */
7607   if (ret == GS_ERROR)
7608     {
7609       if (is_statement)
7610 	*expr_p = NULL;
7611       goto out;
7612     }
7613 
7614   /* This was only valid as a return value from the langhook, which
7615      we handled.  Make sure it doesn't escape from any other context.  */
7616   gcc_assert (ret != GS_UNHANDLED);
7617 
7618   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
7619     {
7620       /* We aren't looking for a value, and we don't have a valid
7621 	 statement.  If it doesn't have side-effects, throw it away.  */
7622       if (!TREE_SIDE_EFFECTS (*expr_p))
7623 	*expr_p = NULL;
7624       else if (!TREE_THIS_VOLATILE (*expr_p))
7625 	{
7626 	  /* This is probably a _REF that contains something nested that
7627 	     has side effects.  Recurse through the operands to find it.  */
7628 	  enum tree_code code = TREE_CODE (*expr_p);
7629 
7630 	  switch (code)
7631 	    {
7632 	    case COMPONENT_REF:
7633 	    case REALPART_EXPR:
7634 	    case IMAGPART_EXPR:
7635 	    case VIEW_CONVERT_EXPR:
7636 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7637 			     gimple_test_f, fallback);
7638 	      break;
7639 
7640 	    case ARRAY_REF:
7641 	    case ARRAY_RANGE_REF:
7642 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7643 			     gimple_test_f, fallback);
7644 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
7645 			     gimple_test_f, fallback);
7646 	      break;
7647 
7648 	    default:
7649 	       /* Anything else with side-effects must be converted to
7650 		  a valid statement before we get here.  */
7651 	      gcc_unreachable ();
7652 	    }
7653 
7654 	  *expr_p = NULL;
7655 	}
7656       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
7657 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
7658 	{
7659 	  /* Historically, the compiler has treated a bare reference
7660 	     to a non-BLKmode volatile lvalue as forcing a load.  */
7661 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
7662 
7663 	  /* Normally, we do not want to create a temporary for a
7664 	     TREE_ADDRESSABLE type because such a type should not be
7665 	     copied by bitwise-assignment.  However, we make an
7666 	     exception here, as all we are doing here is ensuring that
7667 	     we read the bytes that make up the type.  We use
7668 	     create_tmp_var_raw because create_tmp_var will abort when
7669 	     given a TREE_ADDRESSABLE type.  */
7670 	  tree tmp = create_tmp_var_raw (type, "vol");
7671 	  gimple_add_tmp_var (tmp);
7672 	  gimplify_assign (tmp, *expr_p, pre_p);
7673 	  *expr_p = NULL;
7674 	}
7675       else
7676 	/* We can't do anything useful with a volatile reference to
7677 	   an incomplete type, so just throw it away.  Likewise for
7678 	   a BLKmode type, since any implicit inner load should
7679 	   already have been turned into an explicit one by the
7680 	   gimplification process.  */
7681 	*expr_p = NULL;
7682     }
7683 
7684   /* If we are gimplifying at the statement level, we're done.  Tack
7685      everything together and return.  */
7686   if (fallback == fb_none || is_statement)
7687     {
7688       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
7689          it out for GC to reclaim it.  */
7690       *expr_p = NULL_TREE;
7691 
7692       if (!gimple_seq_empty_p (internal_pre)
7693 	  || !gimple_seq_empty_p (internal_post))
7694 	{
7695 	  gimplify_seq_add_seq (&internal_pre, internal_post);
7696 	  gimplify_seq_add_seq (pre_p, internal_pre);
7697 	}
7698 
7699       /* The result of gimplifying *EXPR_P is going to be the last few
7700 	 statements in *PRE_P and *POST_P.  Add location information
7701 	 to all the statements that were added by the gimplification
7702 	 helpers.  */
7703       if (!gimple_seq_empty_p (*pre_p))
7704 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
7705 
7706       if (!gimple_seq_empty_p (*post_p))
7707 	annotate_all_with_location_after (*post_p, post_last_gsi,
7708 					  input_location);
7709 
7710       goto out;
7711     }
7712 
7713 #ifdef ENABLE_GIMPLE_CHECKING
7714   if (*expr_p)
7715     {
7716       enum tree_code code = TREE_CODE (*expr_p);
7717       /* These expressions should already be in gimple IR form.  */
7718       gcc_assert (code != MODIFY_EXPR
7719 		  && code != ASM_EXPR
7720 		  && code != BIND_EXPR
7721 		  && code != CATCH_EXPR
7722 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
7723 		  && code != EH_FILTER_EXPR
7724 		  && code != GOTO_EXPR
7725 		  && code != LABEL_EXPR
7726 		  && code != LOOP_EXPR
7727 		  && code != SWITCH_EXPR
7728 		  && code != TRY_FINALLY_EXPR
7729 		  && code != OMP_CRITICAL
7730 		  && code != OMP_FOR
7731 		  && code != OMP_MASTER
7732 		  && code != OMP_ORDERED
7733 		  && code != OMP_PARALLEL
7734 		  && code != OMP_SECTIONS
7735 		  && code != OMP_SECTION
7736 		  && code != OMP_SINGLE);
7737     }
7738 #endif
7739 
7740   /* Otherwise we're gimplifying a subexpression, so the resulting
7741      value is interesting.  If it's a valid operand that matches
7742      GIMPLE_TEST_F, we're done. Unless we are handling some
7743      post-effects internally; if that's the case, we need to copy into
7744      a temporary before adding the post-effects to POST_P.  */
7745   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
7746     goto out;
7747 
7748   /* Otherwise, we need to create a new temporary for the gimplified
7749      expression.  */
7750 
7751   /* We can't return an lvalue if we have an internal postqueue.  The
7752      object the lvalue refers to would (probably) be modified by the
7753      postqueue; we need to copy the value out first, which means an
7754      rvalue.  */
7755   if ((fallback & fb_lvalue)
7756       && gimple_seq_empty_p (internal_post)
7757       && is_gimple_addressable (*expr_p))
7758     {
7759       /* An lvalue will do.  Take the address of the expression, store it
7760 	 in a temporary, and replace the expression with an INDIRECT_REF of
7761 	 that temporary.  */
7762       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
7763       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
7764       *expr_p = build_simple_mem_ref (tmp);
7765     }
7766   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
7767     {
7768       /* An rvalue will do.  Assign the gimplified expression into a
7769 	 new temporary TMP and replace the original expression with
7770 	 TMP.  First, make sure that the expression has a type so that
7771 	 it can be assigned into a temporary.  */
7772       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
7773 
7774       if (!gimple_seq_empty_p (internal_post) || (fallback & fb_lvalue))
7775 	/* The postqueue might change the value of the expression between
7776 	   the initialization and use of the temporary, so we can't use a
7777 	   formal temp.  FIXME do we care?  */
7778 	{
7779 	  *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7780 	  if (TREE_CODE (TREE_TYPE (*expr_p)) == COMPLEX_TYPE
7781 	      || TREE_CODE (TREE_TYPE (*expr_p)) == VECTOR_TYPE)
7782 	    DECL_GIMPLE_REG_P (*expr_p) = 1;
7783 	}
7784       else
7785 	*expr_p = get_formal_tmp_var (*expr_p, pre_p);
7786     }
7787   else
7788     {
7789 #ifdef ENABLE_GIMPLE_CHECKING
7790       if (!(fallback & fb_mayfail))
7791 	{
7792 	  fprintf (stderr, "gimplification failed:\n");
7793 	  print_generic_expr (stderr, *expr_p, 0);
7794 	  debug_tree (*expr_p);
7795 	  internal_error ("gimplification failed");
7796 	}
7797 #endif
7798       gcc_assert (fallback & fb_mayfail);
7799 
7800       /* If this is an asm statement, and the user asked for the
7801 	 impossible, don't die.  Fail and let gimplify_asm_expr
7802 	 issue an error.  */
7803       ret = GS_ERROR;
7804       goto out;
7805     }
7806 
7807   /* Make sure the temporary matches our predicate.  */
7808   gcc_assert ((*gimple_test_f) (*expr_p));
7809 
7810   if (!gimple_seq_empty_p (internal_post))
7811     {
7812       annotate_all_with_location (internal_post, input_location);
7813       gimplify_seq_add_seq (pre_p, internal_post);
7814     }
7815 
7816  out:
7817   input_location = saved_location;
7818   return ret;
7819 }
7820 
7821 /* Look through TYPE for variable-sized objects and gimplify each such
7822    size that we find.  Add to LIST_P any statements generated.  */
7823 
7824 void
7825 gimplify_type_sizes (tree type, gimple_seq *list_p)
7826 {
7827   tree field, t;
7828 
7829   if (type == NULL || type == error_mark_node)
7830     return;
7831 
7832   /* We first do the main variant, then copy into any other variants.  */
7833   type = TYPE_MAIN_VARIANT (type);
7834 
7835   /* Avoid infinite recursion.  */
7836   if (TYPE_SIZES_GIMPLIFIED (type))
7837     return;
7838 
7839   TYPE_SIZES_GIMPLIFIED (type) = 1;
7840 
7841   switch (TREE_CODE (type))
7842     {
7843     case INTEGER_TYPE:
7844     case ENUMERAL_TYPE:
7845     case BOOLEAN_TYPE:
7846     case REAL_TYPE:
7847     case FIXED_POINT_TYPE:
7848       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
7849       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
7850 
7851       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7852 	{
7853 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
7854 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
7855 	}
7856       break;
7857 
7858     case ARRAY_TYPE:
7859       /* These types may not have declarations, so handle them here.  */
7860       gimplify_type_sizes (TREE_TYPE (type), list_p);
7861       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
7862       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
7863 	 with assigned stack slots, for -O1+ -g they should be tracked
7864 	 by VTA.  */
7865       if (!(TYPE_NAME (type)
7866 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
7867 	    && DECL_IGNORED_P (TYPE_NAME (type)))
7868 	  && TYPE_DOMAIN (type)
7869 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
7870 	{
7871 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
7872 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7873 	    DECL_IGNORED_P (t) = 0;
7874 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7875 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
7876 	    DECL_IGNORED_P (t) = 0;
7877 	}
7878       break;
7879 
7880     case RECORD_TYPE:
7881     case UNION_TYPE:
7882     case QUAL_UNION_TYPE:
7883       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
7884 	if (TREE_CODE (field) == FIELD_DECL)
7885 	  {
7886 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
7887 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
7888 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
7889 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
7890 	  }
7891       break;
7892 
7893     case POINTER_TYPE:
7894     case REFERENCE_TYPE:
7895 	/* We used to recurse on the pointed-to type here, which turned out to
7896 	   be incorrect because its definition might refer to variables not
7897 	   yet initialized at this point if a forward declaration is involved.
7898 
7899 	   It was actually useful for anonymous pointed-to types to ensure
7900 	   that the sizes evaluation dominates every possible later use of the
7901 	   values.  Restricting to such types here would be safe since there
7902 	   is no possible forward declaration around, but would introduce an
7903 	   undesirable middle-end semantic to anonymity.  We then defer to
7904 	   front-ends the responsibility of ensuring that the sizes are
7905 	   evaluated both early and late enough, e.g. by attaching artificial
7906 	   type declarations to the tree.  */
7907       break;
7908 
7909     default:
7910       break;
7911     }
7912 
7913   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
7914   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
7915 
7916   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
7917     {
7918       TYPE_SIZE (t) = TYPE_SIZE (type);
7919       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
7920       TYPE_SIZES_GIMPLIFIED (t) = 1;
7921     }
7922 }
7923 
7924 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
7925    a size or position, has had all of its SAVE_EXPRs evaluated.
7926    We add any required statements to *STMT_P.  */
7927 
7928 void
7929 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
7930 {
7931   tree type, expr = *expr_p;
7932 
7933   /* We don't do anything if the value isn't there, is constant, or contains
7934      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
7935      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
7936      will want to replace it with a new variable, but that will cause problems
7937      if this type is from outside the function.  It's OK to have that here.  */
7938   if (expr == NULL_TREE || TREE_CONSTANT (expr)
7939       || TREE_CODE (expr) == VAR_DECL
7940       || CONTAINS_PLACEHOLDER_P (expr))
7941     return;
7942 
7943   type = TREE_TYPE (expr);
7944   *expr_p = unshare_expr (expr);
7945 
7946   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
7947   expr = *expr_p;
7948 
7949   /* Verify that we've an exact type match with the original expression.
7950      In particular, we do not wish to drop a "sizetype" in favour of a
7951      type of similar dimensions.  We don't want to pollute the generic
7952      type-stripping code with this knowledge because it doesn't matter
7953      for the bulk of GENERIC/GIMPLE.  It only matters that TYPE_SIZE_UNIT
7954      and friends retain their "sizetype-ness".  */
7955   if (TREE_TYPE (expr) != type
7956       && TREE_CODE (type) == INTEGER_TYPE
7957       && TYPE_IS_SIZETYPE (type))
7958     {
7959       tree tmp;
7960       gimple stmt;
7961 
7962       *expr_p = create_tmp_var (type, NULL);
7963       tmp = build1 (NOP_EXPR, type, expr);
7964       stmt = gimplify_assign (*expr_p, tmp, stmt_p);
7965       gimple_set_location (stmt, EXPR_LOC_OR_HERE (expr));
7966     }
7967 }
7968 
7969 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
7970    containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
7971    is true, also gimplify the parameters.  */
7972 
7973 gimple
7974 gimplify_body (tree fndecl, bool do_parms)
7975 {
7976   location_t saved_location = input_location;
7977   gimple_seq parm_stmts, seq;
7978   gimple outer_bind;
7979   struct gimplify_ctx gctx;
7980   struct cgraph_node *cgn;
7981 
7982   timevar_push (TV_TREE_GIMPLIFY);
7983 
7984   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
7985      gimplification.  */
7986   default_rtl_profile ();
7987 
7988   gcc_assert (gimplify_ctxp == NULL);
7989   push_gimplify_context (&gctx);
7990 
7991   /* Unshare most shared trees in the body and in that of any nested functions.
7992      It would seem we don't have to do this for nested functions because
7993      they are supposed to be output and then the outer function gimplified
7994      first, but the g++ front end doesn't always do it that way.  */
7995   unshare_body (fndecl);
7996   unvisit_body (fndecl);
7997 
7998   cgn = cgraph_get_node (fndecl);
7999   if (cgn && cgn->origin)
8000     nonlocal_vlas = pointer_set_create ();
8001 
8002   /* Make sure input_location isn't set to something weird.  */
8003   input_location = DECL_SOURCE_LOCATION (fndecl);
8004 
8005   /* Resolve callee-copies.  This has to be done before processing
8006      the body so that DECL_VALUE_EXPR gets processed correctly.  */
8007   parm_stmts = do_parms ? gimplify_parameters () : NULL;
8008 
8009   /* Gimplify the function's body.  */
8010   seq = NULL;
8011   gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8012   outer_bind = gimple_seq_first_stmt (seq);
8013   if (!outer_bind)
8014     {
8015       outer_bind = gimple_build_nop ();
8016       gimplify_seq_add_stmt (&seq, outer_bind);
8017     }
8018 
8019   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
8020      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
8021   if (gimple_code (outer_bind) == GIMPLE_BIND
8022       && gimple_seq_first (seq) == gimple_seq_last (seq))
8023     ;
8024   else
8025     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8026 
8027   DECL_SAVED_TREE (fndecl) = NULL_TREE;
8028 
8029   /* If we had callee-copies statements, insert them at the beginning
8030      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
8031   if (!gimple_seq_empty_p (parm_stmts))
8032     {
8033       tree parm;
8034 
8035       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8036       gimple_bind_set_body (outer_bind, parm_stmts);
8037 
8038       for (parm = DECL_ARGUMENTS (current_function_decl);
8039 	   parm; parm = DECL_CHAIN (parm))
8040 	if (DECL_HAS_VALUE_EXPR_P (parm))
8041 	  {
8042 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
8043 	    DECL_IGNORED_P (parm) = 0;
8044 	  }
8045     }
8046 
8047   if (nonlocal_vlas)
8048     {
8049       pointer_set_destroy (nonlocal_vlas);
8050       nonlocal_vlas = NULL;
8051     }
8052 
8053   pop_gimplify_context (outer_bind);
8054   gcc_assert (gimplify_ctxp == NULL);
8055 
8056   if (!seen_error ())
8057     verify_gimple_in_seq (gimple_bind_body (outer_bind));
8058 
8059   timevar_pop (TV_TREE_GIMPLIFY);
8060   input_location = saved_location;
8061 
8062   return outer_bind;
8063 }
8064 
8065 typedef char *char_p; /* For DEF_VEC_P.  */
8066 DEF_VEC_P(char_p);
8067 DEF_VEC_ALLOC_P(char_p,heap);
8068 
8069 /* Return whether we should exclude FNDECL from instrumentation.  */
8070 
8071 static bool
8072 flag_instrument_functions_exclude_p (tree fndecl)
8073 {
8074   VEC(char_p,heap) *vec;
8075 
8076   vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_functions;
8077   if (VEC_length (char_p, vec) > 0)
8078     {
8079       const char *name;
8080       int i;
8081       char *s;
8082 
8083       name = lang_hooks.decl_printable_name (fndecl, 0);
8084       FOR_EACH_VEC_ELT (char_p, vec, i, s)
8085 	if (strstr (name, s) != NULL)
8086 	  return true;
8087     }
8088 
8089   vec = (VEC(char_p,heap) *) flag_instrument_functions_exclude_files;
8090   if (VEC_length (char_p, vec) > 0)
8091     {
8092       const char *name;
8093       int i;
8094       char *s;
8095 
8096       name = DECL_SOURCE_FILE (fndecl);
8097       FOR_EACH_VEC_ELT (char_p, vec, i, s)
8098 	if (strstr (name, s) != NULL)
8099 	  return true;
8100     }
8101 
8102   return false;
8103 }
8104 
8105 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
8106    node for the function we want to gimplify.
8107 
8108    Return the sequence of GIMPLE statements corresponding to the body
8109    of FNDECL.  */
8110 
8111 void
8112 gimplify_function_tree (tree fndecl)
8113 {
8114   tree oldfn, parm, ret;
8115   gimple_seq seq;
8116   gimple bind;
8117 
8118   gcc_assert (!gimple_body (fndecl));
8119 
8120   oldfn = current_function_decl;
8121   current_function_decl = fndecl;
8122   if (DECL_STRUCT_FUNCTION (fndecl))
8123     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8124   else
8125     push_struct_function (fndecl);
8126 
8127   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8128     {
8129       /* Preliminarily mark non-addressed complex variables as eligible
8130          for promotion to gimple registers.  We'll transform their uses
8131          as we find them.  */
8132       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8133 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8134           && !TREE_THIS_VOLATILE (parm)
8135           && !needs_to_live_in_memory (parm))
8136         DECL_GIMPLE_REG_P (parm) = 1;
8137     }
8138 
8139   ret = DECL_RESULT (fndecl);
8140   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8141        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8142       && !needs_to_live_in_memory (ret))
8143     DECL_GIMPLE_REG_P (ret) = 1;
8144 
8145   bind = gimplify_body (fndecl, true);
8146 
8147   /* The tree body of the function is no longer needed, replace it
8148      with the new GIMPLE body.  */
8149   seq = gimple_seq_alloc ();
8150   gimple_seq_add_stmt (&seq, bind);
8151   gimple_set_body (fndecl, seq);
8152 
8153   /* If we're instrumenting function entry/exit, then prepend the call to
8154      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8155      catch the exit hook.  */
8156   /* ??? Add some way to ignore exceptions for this TFE.  */
8157   if (flag_instrument_function_entry_exit
8158       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8159       && !flag_instrument_functions_exclude_p (fndecl))
8160     {
8161       tree x;
8162       gimple new_bind;
8163       gimple tf;
8164       gimple_seq cleanup = NULL, body = NULL;
8165       tree tmp_var;
8166       gimple call;
8167 
8168       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8169       call = gimple_build_call (x, 1, integer_zero_node);
8170       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8171       gimple_call_set_lhs (call, tmp_var);
8172       gimplify_seq_add_stmt (&cleanup, call);
8173       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8174       call = gimple_build_call (x, 2,
8175 				build_fold_addr_expr (current_function_decl),
8176 				tmp_var);
8177       gimplify_seq_add_stmt (&cleanup, call);
8178       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8179 
8180       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8181       call = gimple_build_call (x, 1, integer_zero_node);
8182       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8183       gimple_call_set_lhs (call, tmp_var);
8184       gimplify_seq_add_stmt (&body, call);
8185       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8186       call = gimple_build_call (x, 2,
8187 				build_fold_addr_expr (current_function_decl),
8188 				tmp_var);
8189       gimplify_seq_add_stmt (&body, call);
8190       gimplify_seq_add_stmt (&body, tf);
8191       new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8192       /* Clear the block for BIND, since it is no longer directly inside
8193          the function, but within a try block.  */
8194       gimple_bind_set_block (bind, NULL);
8195 
8196       /* Replace the current function body with the body
8197          wrapped in the try/finally TF.  */
8198       seq = gimple_seq_alloc ();
8199       gimple_seq_add_stmt (&seq, new_bind);
8200       gimple_set_body (fndecl, seq);
8201     }
8202 
8203   DECL_SAVED_TREE (fndecl) = NULL_TREE;
8204   cfun->curr_properties = PROP_gimple_any;
8205 
8206   current_function_decl = oldfn;
8207   pop_cfun ();
8208 }
8209 
8210 /* Some transformations like inlining may invalidate the GIMPLE form
8211    for operands.  This function traverses all the operands in STMT and
8212    gimplifies anything that is not a valid gimple operand.  Any new
8213    GIMPLE statements are inserted before *GSI_P.  */
8214 
8215 void
8216 gimple_regimplify_operands (gimple stmt, gimple_stmt_iterator *gsi_p)
8217 {
8218   size_t i, num_ops;
8219   tree orig_lhs = NULL_TREE, lhs, t;
8220   gimple_seq pre = NULL;
8221   gimple post_stmt = NULL;
8222   struct gimplify_ctx gctx;
8223 
8224   push_gimplify_context (&gctx);
8225   gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8226 
8227   switch (gimple_code (stmt))
8228     {
8229     case GIMPLE_COND:
8230       gimplify_expr (gimple_cond_lhs_ptr (stmt), &pre, NULL,
8231 		     is_gimple_val, fb_rvalue);
8232       gimplify_expr (gimple_cond_rhs_ptr (stmt), &pre, NULL,
8233 		     is_gimple_val, fb_rvalue);
8234       break;
8235     case GIMPLE_SWITCH:
8236       gimplify_expr (gimple_switch_index_ptr (stmt), &pre, NULL,
8237 		     is_gimple_val, fb_rvalue);
8238       break;
8239     case GIMPLE_OMP_ATOMIC_LOAD:
8240       gimplify_expr (gimple_omp_atomic_load_rhs_ptr (stmt), &pre, NULL,
8241 		     is_gimple_val, fb_rvalue);
8242       break;
8243     case GIMPLE_ASM:
8244       {
8245 	size_t i, noutputs = gimple_asm_noutputs (stmt);
8246 	const char *constraint, **oconstraints;
8247 	bool allows_mem, allows_reg, is_inout;
8248 
8249 	oconstraints
8250 	  = (const char **) alloca ((noutputs) * sizeof (const char *));
8251 	for (i = 0; i < noutputs; i++)
8252 	  {
8253 	    tree op = gimple_asm_output_op (stmt, i);
8254 	    constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8255 	    oconstraints[i] = constraint;
8256 	    parse_output_constraint (&constraint, i, 0, 0, &allows_mem,
8257 				     &allows_reg, &is_inout);
8258 	    gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8259 			   is_inout ? is_gimple_min_lval : is_gimple_lvalue,
8260 			   fb_lvalue | fb_mayfail);
8261 	  }
8262 	for (i = 0; i < gimple_asm_ninputs (stmt); i++)
8263 	  {
8264 	    tree op = gimple_asm_input_op (stmt, i);
8265 	    constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (op)));
8266 	    parse_input_constraint (&constraint, 0, 0, noutputs, 0,
8267 				    oconstraints, &allows_mem, &allows_reg);
8268 	    if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (op))) && allows_mem)
8269 	      allows_reg = 0;
8270 	    if (!allows_reg && allows_mem)
8271 	      gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8272 			     is_gimple_lvalue, fb_lvalue | fb_mayfail);
8273 	    else
8274 	      gimplify_expr (&TREE_VALUE (op), &pre, NULL,
8275 			     is_gimple_asm_val, fb_rvalue);
8276 	  }
8277       }
8278       break;
8279     default:
8280       /* NOTE: We start gimplifying operands from last to first to
8281 	 make sure that side-effects on the RHS of calls, assignments
8282 	 and ASMs are executed before the LHS.  The ordering is not
8283 	 important for other statements.  */
8284       num_ops = gimple_num_ops (stmt);
8285       orig_lhs = gimple_get_lhs (stmt);
8286       for (i = num_ops; i > 0; i--)
8287 	{
8288 	  tree op = gimple_op (stmt, i - 1);
8289 	  if (op == NULL_TREE)
8290 	    continue;
8291 	  if (i == 1 && (is_gimple_call (stmt) || is_gimple_assign (stmt)))
8292 	    gimplify_expr (&op, &pre, NULL, is_gimple_lvalue, fb_lvalue);
8293 	  else if (i == 2
8294 		   && is_gimple_assign (stmt)
8295 		   && num_ops == 2
8296 		   && get_gimple_rhs_class (gimple_expr_code (stmt))
8297 		      == GIMPLE_SINGLE_RHS)
8298 	    gimplify_expr (&op, &pre, NULL,
8299 			   rhs_predicate_for (gimple_assign_lhs (stmt)),
8300 			   fb_rvalue);
8301 	  else if (i == 2 && is_gimple_call (stmt))
8302 	    {
8303 	      if (TREE_CODE (op) == FUNCTION_DECL)
8304 		continue;
8305 	      gimplify_expr (&op, &pre, NULL, is_gimple_call_addr, fb_rvalue);
8306 	    }
8307 	  else
8308 	    gimplify_expr (&op, &pre, NULL, is_gimple_val, fb_rvalue);
8309 	  gimple_set_op (stmt, i - 1, op);
8310 	}
8311 
8312       lhs = gimple_get_lhs (stmt);
8313       /* If the LHS changed it in a way that requires a simple RHS,
8314 	 create temporary.  */
8315       if (lhs && !is_gimple_reg (lhs))
8316 	{
8317 	  bool need_temp = false;
8318 
8319 	  if (is_gimple_assign (stmt)
8320 	      && num_ops == 2
8321 	      && get_gimple_rhs_class (gimple_expr_code (stmt))
8322 		 == GIMPLE_SINGLE_RHS)
8323 	    gimplify_expr (gimple_assign_rhs1_ptr (stmt), &pre, NULL,
8324 			   rhs_predicate_for (gimple_assign_lhs (stmt)),
8325 			   fb_rvalue);
8326 	  else if (is_gimple_reg (lhs))
8327 	    {
8328 	      if (is_gimple_reg_type (TREE_TYPE (lhs)))
8329 		{
8330 		  if (is_gimple_call (stmt))
8331 		    {
8332 		      i = gimple_call_flags (stmt);
8333 		      if ((i & ECF_LOOPING_CONST_OR_PURE)
8334 			  || !(i & (ECF_CONST | ECF_PURE)))
8335 			need_temp = true;
8336 		    }
8337 		  if (stmt_can_throw_internal (stmt))
8338 		    need_temp = true;
8339 		}
8340 	    }
8341 	  else
8342 	    {
8343 	      if (is_gimple_reg_type (TREE_TYPE (lhs)))
8344 		need_temp = true;
8345 	      else if (TYPE_MODE (TREE_TYPE (lhs)) != BLKmode)
8346 		{
8347 		  if (is_gimple_call (stmt))
8348 		    {
8349 		      tree fndecl = gimple_call_fndecl (stmt);
8350 
8351 		      if (!aggregate_value_p (TREE_TYPE (lhs), fndecl)
8352 			  && !(fndecl && DECL_RESULT (fndecl)
8353 			       && DECL_BY_REFERENCE (DECL_RESULT (fndecl))))
8354 			need_temp = true;
8355 		    }
8356 		  else
8357 		    need_temp = true;
8358 		}
8359 	    }
8360 	  if (need_temp)
8361 	    {
8362 	      tree temp = create_tmp_reg (TREE_TYPE (lhs), NULL);
8363 
8364 	      if (TREE_CODE (orig_lhs) == SSA_NAME)
8365 		orig_lhs = SSA_NAME_VAR (orig_lhs);
8366 
8367 	      if (gimple_in_ssa_p (cfun))
8368 		temp = make_ssa_name (temp, NULL);
8369 	      gimple_set_lhs (stmt, temp);
8370 	      post_stmt = gimple_build_assign (lhs, temp);
8371 	      if (TREE_CODE (lhs) == SSA_NAME)
8372 		SSA_NAME_DEF_STMT (lhs) = post_stmt;
8373 	    }
8374 	}
8375       break;
8376     }
8377 
8378   if (gimple_referenced_vars (cfun))
8379     for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
8380       add_referenced_var (t);
8381 
8382   if (!gimple_seq_empty_p (pre))
8383     {
8384       if (gimple_in_ssa_p (cfun))
8385 	{
8386 	  gimple_stmt_iterator i;
8387 
8388 	  for (i = gsi_start (pre); !gsi_end_p (i); gsi_next (&i))
8389 	    mark_symbols_for_renaming (gsi_stmt (i));
8390 	}
8391       gsi_insert_seq_before (gsi_p, pre, GSI_SAME_STMT);
8392     }
8393   if (post_stmt)
8394     gsi_insert_after (gsi_p, post_stmt, GSI_NEW_STMT);
8395 
8396   pop_gimplify_context (NULL);
8397 }
8398 
8399 /* Expand EXPR to list of gimple statements STMTS.  GIMPLE_TEST_F specifies
8400    the predicate that will hold for the result.  If VAR is not NULL, make the
8401    base variable of the final destination be VAR if suitable.  */
8402 
8403 tree
8404 force_gimple_operand_1 (tree expr, gimple_seq *stmts,
8405 			gimple_predicate gimple_test_f, tree var)
8406 {
8407   tree t;
8408   enum gimplify_status ret;
8409   struct gimplify_ctx gctx;
8410 
8411   *stmts = NULL;
8412 
8413   /* gimple_test_f might be more strict than is_gimple_val, make
8414      sure we pass both.  Just checking gimple_test_f doesn't work
8415      because most gimple predicates do not work recursively.  */
8416   if (is_gimple_val (expr)
8417       && (*gimple_test_f) (expr))
8418     return expr;
8419 
8420   push_gimplify_context (&gctx);
8421   gimplify_ctxp->into_ssa = gimple_in_ssa_p (cfun);
8422   gimplify_ctxp->allow_rhs_cond_expr = true;
8423 
8424   if (var)
8425     expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
8426 
8427   if (TREE_CODE (expr) != MODIFY_EXPR
8428       && TREE_TYPE (expr) == void_type_node)
8429     {
8430       gimplify_and_add (expr, stmts);
8431       expr = NULL_TREE;
8432     }
8433   else
8434     {
8435       ret = gimplify_expr (&expr, stmts, NULL, gimple_test_f, fb_rvalue);
8436       gcc_assert (ret != GS_ERROR);
8437     }
8438 
8439   if (gimple_referenced_vars (cfun))
8440     for (t = gimplify_ctxp->temps; t ; t = DECL_CHAIN (t))
8441       add_referenced_var (t);
8442 
8443   pop_gimplify_context (NULL);
8444 
8445   return expr;
8446 }
8447 
8448 /* Expand EXPR to list of gimple statements STMTS.  If SIMPLE is true,
8449    force the result to be either ssa_name or an invariant, otherwise
8450    just force it to be a rhs expression.  If VAR is not NULL, make the
8451    base variable of the final destination be VAR if suitable.  */
8452 
8453 tree
8454 force_gimple_operand (tree expr, gimple_seq *stmts, bool simple, tree var)
8455 {
8456   return force_gimple_operand_1 (expr, stmts,
8457 				 simple ? is_gimple_val : is_gimple_reg_rhs,
8458 				 var);
8459 }
8460 
8461 /* Invoke force_gimple_operand_1 for EXPR with parameters GIMPLE_TEST_F
8462    and VAR.  If some statements are produced, emits them at GSI.
8463    If BEFORE is true.  the statements are appended before GSI, otherwise
8464    they are appended after it.  M specifies the way GSI moves after
8465    insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING are the usual values).  */
8466 
8467 tree
8468 force_gimple_operand_gsi_1 (gimple_stmt_iterator *gsi, tree expr,
8469 			    gimple_predicate gimple_test_f,
8470 			    tree var, bool before,
8471 			    enum gsi_iterator_update m)
8472 {
8473   gimple_seq stmts;
8474 
8475   expr = force_gimple_operand_1 (expr, &stmts, gimple_test_f, var);
8476 
8477   if (!gimple_seq_empty_p (stmts))
8478     {
8479       if (gimple_in_ssa_p (cfun))
8480 	{
8481 	  gimple_stmt_iterator i;
8482 
8483 	  for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
8484 	    mark_symbols_for_renaming (gsi_stmt (i));
8485 	}
8486 
8487       if (before)
8488 	gsi_insert_seq_before (gsi, stmts, m);
8489       else
8490 	gsi_insert_seq_after (gsi, stmts, m);
8491     }
8492 
8493   return expr;
8494 }
8495 
8496 /* Invoke force_gimple_operand_1 for EXPR with parameter VAR.
8497    If SIMPLE is true, force the result to be either ssa_name or an invariant,
8498    otherwise just force it to be a rhs expression.  If some statements are
8499    produced, emits them at GSI.  If BEFORE is true, the statements are
8500    appended before GSI, otherwise they are appended after it.  M specifies
8501    the way GSI moves after insertion (GSI_SAME_STMT or GSI_CONTINUE_LINKING
8502    are the usual values).  */
8503 
8504 tree
8505 force_gimple_operand_gsi (gimple_stmt_iterator *gsi, tree expr,
8506 			  bool simple_p, tree var, bool before,
8507 			  enum gsi_iterator_update m)
8508 {
8509   return force_gimple_operand_gsi_1 (gsi, expr,
8510 				     simple_p
8511 				     ? is_gimple_val : is_gimple_reg_rhs,
8512 				     var, before, m);
8513 }
8514 
8515 
8516 #include "gt-gimplify.h"
8517