1 /* Tree lowering pass.  This pass converts the GENERIC functions-as-trees
2    tree representation into the GIMPLE form.
3    Copyright (C) 2002-2014 Free Software Foundation, Inc.
4    Major work done by Sebastian Pop <s.pop@laposte.net>,
5    Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6 
7 This file is part of GCC.
8 
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13 
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
17 for more details.
18 
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3.  If not see
21 <http://www.gnu.org/licenses/>.  */
22 
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tree.h"
27 #include "expr.h"
28 #include "pointer-set.h"
29 #include "hash-table.h"
30 #include "basic-block.h"
31 #include "tree-ssa-alias.h"
32 #include "internal-fn.h"
33 #include "gimple-fold.h"
34 #include "tree-eh.h"
35 #include "gimple-expr.h"
36 #include "is-a.h"
37 #include "gimple.h"
38 #include "gimplify.h"
39 #include "gimple-iterator.h"
40 #include "stringpool.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stor-layout.h"
44 #include "stmt.h"
45 #include "print-tree.h"
46 #include "tree-iterator.h"
47 #include "tree-inline.h"
48 #include "tree-pretty-print.h"
49 #include "langhooks.h"
50 #include "bitmap.h"
51 #include "gimple-ssa.h"
52 #include "cgraph.h"
53 #include "tree-cfg.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa.h"
56 #include "diagnostic-core.h"
57 #include "target.h"
58 #include "splay-tree.h"
59 #include "omp-low.h"
60 #include "gimple-low.h"
61 #include "cilk.h"
62 
63 #include "langhooks-def.h"	/* FIXME: for lhd_set_decl_assembler_name */
64 #include "tree-pass.h"		/* FIXME: only for PROP_gimple_any */
65 
66 enum gimplify_omp_var_data
67 {
68   GOVD_SEEN = 1,
69   GOVD_EXPLICIT = 2,
70   GOVD_SHARED = 4,
71   GOVD_PRIVATE = 8,
72   GOVD_FIRSTPRIVATE = 16,
73   GOVD_LASTPRIVATE = 32,
74   GOVD_REDUCTION = 64,
75   GOVD_LOCAL = 128,
76   GOVD_MAP = 256,
77   GOVD_DEBUG_PRIVATE = 512,
78   GOVD_PRIVATE_OUTER_REF = 1024,
79   GOVD_LINEAR = 2048,
80   GOVD_ALIGNED = 4096,
81   GOVD_MAP_TO_ONLY = 8192,
82   GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
83 			   | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
84 			   | GOVD_LOCAL)
85 };
86 
87 
88 enum omp_region_type
89 {
90   ORT_WORKSHARE = 0,
91   ORT_SIMD = 1,
92   ORT_PARALLEL = 2,
93   ORT_COMBINED_PARALLEL = 3,
94   ORT_TASK = 4,
95   ORT_UNTIED_TASK = 5,
96   ORT_TEAMS = 8,
97   ORT_TARGET_DATA = 16,
98   ORT_TARGET = 32
99 };
100 
101 /* Gimplify hashtable helper.  */
102 
103 struct gimplify_hasher : typed_free_remove <elt_t>
104 {
105   typedef elt_t value_type;
106   typedef elt_t compare_type;
107   static inline hashval_t hash (const value_type *);
108   static inline bool equal (const value_type *, const compare_type *);
109 };
110 
111 struct gimplify_ctx
112 {
113   struct gimplify_ctx *prev_context;
114 
115   vec<gimple> bind_expr_stack;
116   tree temps;
117   gimple_seq conditional_cleanups;
118   tree exit_label;
119   tree return_temp;
120 
121   vec<tree> case_labels;
122   /* The formal temporary table.  Should this be persistent?  */
123   hash_table <gimplify_hasher> temp_htab;
124 
125   int conditions;
126   bool save_stack;
127   bool into_ssa;
128   bool allow_rhs_cond_expr;
129   bool in_cleanup_point_expr;
130 };
131 
132 struct gimplify_omp_ctx
133 {
134   struct gimplify_omp_ctx *outer_context;
135   splay_tree variables;
136   struct pointer_set_t *privatized_types;
137   location_t location;
138   enum omp_clause_default_kind default_kind;
139   enum omp_region_type region_type;
140   bool combined_loop;
141   bool distribute;
142 };
143 
144 static struct gimplify_ctx *gimplify_ctxp;
145 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
146 
147 /* Forward declaration.  */
148 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
149 
150 /* Shorter alias name for the above function for use in gimplify.c
151    only.  */
152 
153 static inline void
gimplify_seq_add_stmt(gimple_seq * seq_p,gimple gs)154 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple gs)
155 {
156   gimple_seq_add_stmt_without_update (seq_p, gs);
157 }
158 
159 /* Append sequence SRC to the end of sequence *DST_P.  If *DST_P is
160    NULL, a new sequence is allocated.   This function is
161    similar to gimple_seq_add_seq, but does not scan the operands.
162    During gimplification, we need to manipulate statement sequences
163    before the def/use vectors have been constructed.  */
164 
165 static void
gimplify_seq_add_seq(gimple_seq * dst_p,gimple_seq src)166 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
167 {
168   gimple_stmt_iterator si;
169 
170   if (src == NULL)
171     return;
172 
173   si = gsi_last (*dst_p);
174   gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
175 }
176 
177 
178 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
179    and popping gimplify contexts.  */
180 
181 static struct gimplify_ctx *ctx_pool = NULL;
182 
183 /* Return a gimplify context struct from the pool.  */
184 
185 static inline struct gimplify_ctx *
ctx_alloc(void)186 ctx_alloc (void)
187 {
188   struct gimplify_ctx * c = ctx_pool;
189 
190   if (c)
191     ctx_pool = c->prev_context;
192   else
193     c = XNEW (struct gimplify_ctx);
194 
195   memset (c, '\0', sizeof (*c));
196   return c;
197 }
198 
199 /* Put gimplify context C back into the pool.  */
200 
201 static inline void
ctx_free(struct gimplify_ctx * c)202 ctx_free (struct gimplify_ctx *c)
203 {
204   c->prev_context = ctx_pool;
205   ctx_pool = c;
206 }
207 
208 /* Free allocated ctx stack memory.  */
209 
210 void
free_gimplify_stack(void)211 free_gimplify_stack (void)
212 {
213   struct gimplify_ctx *c;
214 
215   while ((c = ctx_pool))
216     {
217       ctx_pool = c->prev_context;
218       free (c);
219     }
220 }
221 
222 
223 /* Set up a context for the gimplifier.  */
224 
225 void
push_gimplify_context(bool in_ssa,bool rhs_cond_ok)226 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
227 {
228   struct gimplify_ctx *c = ctx_alloc ();
229 
230   c->prev_context = gimplify_ctxp;
231   gimplify_ctxp = c;
232   gimplify_ctxp->into_ssa = in_ssa;
233   gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
234 }
235 
236 /* Tear down a context for the gimplifier.  If BODY is non-null, then
237    put the temporaries into the outer BIND_EXPR.  Otherwise, put them
238    in the local_decls.
239 
240    BODY is not a sequence, but the first tuple in a sequence.  */
241 
242 void
pop_gimplify_context(gimple body)243 pop_gimplify_context (gimple body)
244 {
245   struct gimplify_ctx *c = gimplify_ctxp;
246 
247   gcc_assert (c
248               && (!c->bind_expr_stack.exists ()
249 		  || c->bind_expr_stack.is_empty ()));
250   c->bind_expr_stack.release ();
251   gimplify_ctxp = c->prev_context;
252 
253   if (body)
254     declare_vars (c->temps, body, false);
255   else
256     record_vars (c->temps);
257 
258   if (c->temp_htab.is_created ())
259     c->temp_htab.dispose ();
260   ctx_free (c);
261 }
262 
263 /* Push a GIMPLE_BIND tuple onto the stack of bindings.  */
264 
265 static void
gimple_push_bind_expr(gimple gimple_bind)266 gimple_push_bind_expr (gimple gimple_bind)
267 {
268   gimplify_ctxp->bind_expr_stack.reserve (8);
269   gimplify_ctxp->bind_expr_stack.safe_push (gimple_bind);
270 }
271 
272 /* Pop the first element off the stack of bindings.  */
273 
274 static void
gimple_pop_bind_expr(void)275 gimple_pop_bind_expr (void)
276 {
277   gimplify_ctxp->bind_expr_stack.pop ();
278 }
279 
280 /* Return the first element of the stack of bindings.  */
281 
282 gimple
gimple_current_bind_expr(void)283 gimple_current_bind_expr (void)
284 {
285   return gimplify_ctxp->bind_expr_stack.last ();
286 }
287 
288 /* Return the stack of bindings created during gimplification.  */
289 
290 vec<gimple>
gimple_bind_expr_stack(void)291 gimple_bind_expr_stack (void)
292 {
293   return gimplify_ctxp->bind_expr_stack;
294 }
295 
296 /* Return true iff there is a COND_EXPR between us and the innermost
297    CLEANUP_POINT_EXPR.  This info is used by gimple_push_cleanup.  */
298 
299 static bool
gimple_conditional_context(void)300 gimple_conditional_context (void)
301 {
302   return gimplify_ctxp->conditions > 0;
303 }
304 
305 /* Note that we've entered a COND_EXPR.  */
306 
307 static void
gimple_push_condition(void)308 gimple_push_condition (void)
309 {
310 #ifdef ENABLE_GIMPLE_CHECKING
311   if (gimplify_ctxp->conditions == 0)
312     gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
313 #endif
314   ++(gimplify_ctxp->conditions);
315 }
316 
317 /* Note that we've left a COND_EXPR.  If we're back at unconditional scope
318    now, add any conditional cleanups we've seen to the prequeue.  */
319 
320 static void
gimple_pop_condition(gimple_seq * pre_p)321 gimple_pop_condition (gimple_seq *pre_p)
322 {
323   int conds = --(gimplify_ctxp->conditions);
324 
325   gcc_assert (conds >= 0);
326   if (conds == 0)
327     {
328       gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
329       gimplify_ctxp->conditional_cleanups = NULL;
330     }
331 }
332 
333 /* A stable comparison routine for use with splay trees and DECLs.  */
334 
335 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)336 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
337 {
338   tree a = (tree) xa;
339   tree b = (tree) xb;
340 
341   return DECL_UID (a) - DECL_UID (b);
342 }
343 
344 /* Create a new omp construct that deals with variable remapping.  */
345 
346 static struct gimplify_omp_ctx *
new_omp_context(enum omp_region_type region_type)347 new_omp_context (enum omp_region_type region_type)
348 {
349   struct gimplify_omp_ctx *c;
350 
351   c = XCNEW (struct gimplify_omp_ctx);
352   c->outer_context = gimplify_omp_ctxp;
353   c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
354   c->privatized_types = pointer_set_create ();
355   c->location = input_location;
356   c->region_type = region_type;
357   if ((region_type & ORT_TASK) == 0)
358     c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
359   else
360     c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
361 
362   return c;
363 }
364 
365 /* Destroy an omp construct that deals with variable remapping.  */
366 
367 static void
delete_omp_context(struct gimplify_omp_ctx * c)368 delete_omp_context (struct gimplify_omp_ctx *c)
369 {
370   splay_tree_delete (c->variables);
371   pointer_set_destroy (c->privatized_types);
372   XDELETE (c);
373 }
374 
375 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
376 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
377 
378 /* Both gimplify the statement T and append it to *SEQ_P.  This function
379    behaves exactly as gimplify_stmt, but you don't have to pass T as a
380    reference.  */
381 
382 void
gimplify_and_add(tree t,gimple_seq * seq_p)383 gimplify_and_add (tree t, gimple_seq *seq_p)
384 {
385   gimplify_stmt (&t, seq_p);
386 }
387 
388 /* Gimplify statement T into sequence *SEQ_P, and return the first
389    tuple in the sequence of generated tuples for this statement.
390    Return NULL if gimplifying T produced no tuples.  */
391 
392 static gimple
gimplify_and_return_first(tree t,gimple_seq * seq_p)393 gimplify_and_return_first (tree t, gimple_seq *seq_p)
394 {
395   gimple_stmt_iterator last = gsi_last (*seq_p);
396 
397   gimplify_and_add (t, seq_p);
398 
399   if (!gsi_end_p (last))
400     {
401       gsi_next (&last);
402       return gsi_stmt (last);
403     }
404   else
405     return gimple_seq_first_stmt (*seq_p);
406 }
407 
408 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
409    LHS, or for a call argument.  */
410 
411 static bool
is_gimple_mem_rhs(tree t)412 is_gimple_mem_rhs (tree t)
413 {
414   /* If we're dealing with a renamable type, either source or dest must be
415      a renamed variable.  */
416   if (is_gimple_reg_type (TREE_TYPE (t)))
417     return is_gimple_val (t);
418   else
419     return is_gimple_val (t) || is_gimple_lvalue (t);
420 }
421 
422 /* Return true if T is a CALL_EXPR or an expression that can be
423    assigned to a temporary.  Note that this predicate should only be
424    used during gimplification.  See the rationale for this in
425    gimplify_modify_expr.  */
426 
427 static bool
is_gimple_reg_rhs_or_call(tree t)428 is_gimple_reg_rhs_or_call (tree t)
429 {
430   return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
431 	  || TREE_CODE (t) == CALL_EXPR);
432 }
433 
434 /* Return true if T is a valid memory RHS or a CALL_EXPR.  Note that
435    this predicate should only be used during gimplification.  See the
436    rationale for this in gimplify_modify_expr.  */
437 
438 static bool
is_gimple_mem_rhs_or_call(tree t)439 is_gimple_mem_rhs_or_call (tree t)
440 {
441   /* If we're dealing with a renamable type, either source or dest must be
442      a renamed variable.  */
443   if (is_gimple_reg_type (TREE_TYPE (t)))
444     return is_gimple_val (t);
445   else
446     return (is_gimple_val (t) || is_gimple_lvalue (t)
447 	    || TREE_CODE (t) == CALL_EXPR);
448 }
449 
450 /* Create a temporary with a name derived from VAL.  Subroutine of
451    lookup_tmp_var; nobody else should call this function.  */
452 
453 static inline tree
create_tmp_from_val(tree val,bool is_formal)454 create_tmp_from_val (tree val, bool is_formal)
455 {
456   /* Drop all qualifiers and address-space information from the value type.  */
457   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
458   tree var = create_tmp_var (type, get_name (val));
459   if (is_formal
460       && (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
461 	  || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE))
462     DECL_GIMPLE_REG_P (var) = 1;
463   return var;
464 }
465 
466 /* Create a temporary to hold the value of VAL.  If IS_FORMAL, try to reuse
467    an existing expression temporary.  */
468 
469 static tree
lookup_tmp_var(tree val,bool is_formal)470 lookup_tmp_var (tree val, bool is_formal)
471 {
472   tree ret;
473 
474   /* If not optimizing, never really reuse a temporary.  local-alloc
475      won't allocate any variable that is used in more than one basic
476      block, which means it will go into memory, causing much extra
477      work in reload and final and poorer code generation, outweighing
478      the extra memory allocation here.  */
479   if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
480     ret = create_tmp_from_val (val, is_formal);
481   else
482     {
483       elt_t elt, *elt_p;
484       elt_t **slot;
485 
486       elt.val = val;
487       if (!gimplify_ctxp->temp_htab.is_created ())
488         gimplify_ctxp->temp_htab.create (1000);
489       slot = gimplify_ctxp->temp_htab.find_slot (&elt, INSERT);
490       if (*slot == NULL)
491 	{
492 	  elt_p = XNEW (elt_t);
493 	  elt_p->val = val;
494 	  elt_p->temp = ret = create_tmp_from_val (val, is_formal);
495 	  *slot = elt_p;
496 	}
497       else
498 	{
499 	  elt_p = *slot;
500           ret = elt_p->temp;
501 	}
502     }
503 
504   return ret;
505 }
506 
507 /* Helper for get_formal_tmp_var and get_initialized_tmp_var.  */
508 
509 static tree
internal_get_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool is_formal)510 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
511                       bool is_formal)
512 {
513   tree t, mod;
514 
515   /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
516      can create an INIT_EXPR and convert it into a GIMPLE_CALL below.  */
517   gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
518 		 fb_rvalue);
519 
520   if (gimplify_ctxp->into_ssa
521       && is_gimple_reg_type (TREE_TYPE (val)))
522     t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)), NULL);
523   else
524     t = lookup_tmp_var (val, is_formal);
525 
526   mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
527 
528   SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
529 
530   /* gimplify_modify_expr might want to reduce this further.  */
531   gimplify_and_add (mod, pre_p);
532   ggc_free (mod);
533 
534   return t;
535 }
536 
537 /* Return a formal temporary variable initialized with VAL.  PRE_P is as
538    in gimplify_expr.  Only use this function if:
539 
540    1) The value of the unfactored expression represented by VAL will not
541       change between the initialization and use of the temporary, and
542    2) The temporary will not be otherwise modified.
543 
544    For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
545    and #2 means it is inappropriate for && temps.
546 
547    For other cases, use get_initialized_tmp_var instead.  */
548 
549 tree
get_formal_tmp_var(tree val,gimple_seq * pre_p)550 get_formal_tmp_var (tree val, gimple_seq *pre_p)
551 {
552   return internal_get_tmp_var (val, pre_p, NULL, true);
553 }
554 
555 /* Return a temporary variable initialized with VAL.  PRE_P and POST_P
556    are as in gimplify_expr.  */
557 
558 tree
get_initialized_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p)559 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p)
560 {
561   return internal_get_tmp_var (val, pre_p, post_p, false);
562 }
563 
564 /* Declare all the variables in VARS in SCOPE.  If DEBUG_INFO is true,
565    generate debug info for them; otherwise don't.  */
566 
567 void
declare_vars(tree vars,gimple scope,bool debug_info)568 declare_vars (tree vars, gimple scope, bool debug_info)
569 {
570   tree last = vars;
571   if (last)
572     {
573       tree temps, block;
574 
575       gcc_assert (gimple_code (scope) == GIMPLE_BIND);
576 
577       temps = nreverse (last);
578 
579       block = gimple_bind_block (scope);
580       gcc_assert (!block || TREE_CODE (block) == BLOCK);
581       if (!block || !debug_info)
582 	{
583 	  DECL_CHAIN (last) = gimple_bind_vars (scope);
584 	  gimple_bind_set_vars (scope, temps);
585 	}
586       else
587 	{
588 	  /* We need to attach the nodes both to the BIND_EXPR and to its
589 	     associated BLOCK for debugging purposes.  The key point here
590 	     is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
591 	     is a subchain of the BIND_EXPR_VARS of the BIND_EXPR.  */
592 	  if (BLOCK_VARS (block))
593 	    BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
594 	  else
595 	    {
596 	      gimple_bind_set_vars (scope,
597 	      			    chainon (gimple_bind_vars (scope), temps));
598 	      BLOCK_VARS (block) = temps;
599 	    }
600 	}
601     }
602 }
603 
604 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
605    for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly.  Abort if
606    no such upper bound can be obtained.  */
607 
608 static void
force_constant_size(tree var)609 force_constant_size (tree var)
610 {
611   /* The only attempt we make is by querying the maximum size of objects
612      of the variable's type.  */
613 
614   HOST_WIDE_INT max_size;
615 
616   gcc_assert (TREE_CODE (var) == VAR_DECL);
617 
618   max_size = max_int_size_in_bytes (TREE_TYPE (var));
619 
620   gcc_assert (max_size >= 0);
621 
622   DECL_SIZE_UNIT (var)
623     = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
624   DECL_SIZE (var)
625     = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
626 }
627 
628 /* Push the temporary variable TMP into the current binding.  */
629 
630 void
gimple_add_tmp_var_fn(struct function * fn,tree tmp)631 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
632 {
633   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
634 
635   /* Later processing assumes that the object size is constant, which might
636      not be true at this point.  Force the use of a constant upper bound in
637      this case.  */
638   if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
639     force_constant_size (tmp);
640 
641   DECL_CONTEXT (tmp) = fn->decl;
642   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
643 
644   record_vars_into (tmp, fn->decl);
645 }
646 
647 /* Push the temporary variable TMP into the current binding.  */
648 
649 void
gimple_add_tmp_var(tree tmp)650 gimple_add_tmp_var (tree tmp)
651 {
652   gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
653 
654   /* Later processing assumes that the object size is constant, which might
655      not be true at this point.  Force the use of a constant upper bound in
656      this case.  */
657   if (!tree_fits_uhwi_p (DECL_SIZE_UNIT (tmp)))
658     force_constant_size (tmp);
659 
660   DECL_CONTEXT (tmp) = current_function_decl;
661   DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
662 
663   if (gimplify_ctxp)
664     {
665       DECL_CHAIN (tmp) = gimplify_ctxp->temps;
666       gimplify_ctxp->temps = tmp;
667 
668       /* Mark temporaries local within the nearest enclosing parallel.  */
669       if (gimplify_omp_ctxp)
670 	{
671 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
672 	  while (ctx
673 		 && (ctx->region_type == ORT_WORKSHARE
674 		     || ctx->region_type == ORT_SIMD))
675 	    ctx = ctx->outer_context;
676 	  if (ctx)
677 	    omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
678 	}
679     }
680   else if (cfun)
681     record_vars (tmp);
682   else
683     {
684       gimple_seq body_seq;
685 
686       /* This case is for nested functions.  We need to expose the locals
687 	 they create.  */
688       body_seq = gimple_body (current_function_decl);
689       declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
690     }
691 }
692 
693 
694 
695 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
696    nodes that are referenced more than once in GENERIC functions.  This is
697    necessary because gimplification (translation into GIMPLE) is performed
698    by modifying tree nodes in-place, so gimplication of a shared node in a
699    first context could generate an invalid GIMPLE form in a second context.
700 
701    This is achieved with a simple mark/copy/unmark algorithm that walks the
702    GENERIC representation top-down, marks nodes with TREE_VISITED the first
703    time it encounters them, duplicates them if they already have TREE_VISITED
704    set, and finally removes the TREE_VISITED marks it has set.
705 
706    The algorithm works only at the function level, i.e. it generates a GENERIC
707    representation of a function with no nodes shared within the function when
708    passed a GENERIC function (except for nodes that are allowed to be shared).
709 
710    At the global level, it is also necessary to unshare tree nodes that are
711    referenced in more than one function, for the same aforementioned reason.
712    This requires some cooperation from the front-end.  There are 2 strategies:
713 
714      1. Manual unsharing.  The front-end needs to call unshare_expr on every
715         expression that might end up being shared across functions.
716 
717      2. Deep unsharing.  This is an extension of regular unsharing.  Instead
718         of calling unshare_expr on expressions that might be shared across
719         functions, the front-end pre-marks them with TREE_VISITED.  This will
720         ensure that they are unshared on the first reference within functions
721         when the regular unsharing algorithm runs.  The counterpart is that
722         this algorithm must look deeper than for manual unsharing, which is
723         specified by LANG_HOOKS_DEEP_UNSHARING.
724 
725   If there are only few specific cases of node sharing across functions, it is
726   probably easier for a front-end to unshare the expressions manually.  On the
727   contrary, if the expressions generated at the global level are as widespread
728   as expressions generated within functions, deep unsharing is very likely the
729   way to go.  */
730 
731 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
732    These nodes model computations that must be done once.  If we were to
733    unshare something like SAVE_EXPR(i++), the gimplification process would
734    create wrong code.  However, if DATA is non-null, it must hold a pointer
735    set that is used to unshare the subtrees of these nodes.  */
736 
737 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)738 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
739 {
740   tree t = *tp;
741   enum tree_code code = TREE_CODE (t);
742 
743   /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
744      copy their subtrees if we can make sure to do it only once.  */
745   if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
746     {
747       if (data && !pointer_set_insert ((struct pointer_set_t *)data, t))
748 	;
749       else
750 	*walk_subtrees = 0;
751     }
752 
753   /* Stop at types, decls, constants like copy_tree_r.  */
754   else if (TREE_CODE_CLASS (code) == tcc_type
755 	   || TREE_CODE_CLASS (code) == tcc_declaration
756 	   || TREE_CODE_CLASS (code) == tcc_constant
757 	   /* We can't do anything sensible with a BLOCK used as an
758 	      expression, but we also can't just die when we see it
759 	      because of non-expression uses.  So we avert our eyes
760 	      and cross our fingers.  Silly Java.  */
761 	   || code == BLOCK)
762     *walk_subtrees = 0;
763 
764   /* Cope with the statement expression extension.  */
765   else if (code == STATEMENT_LIST)
766     ;
767 
768   /* Leave the bulk of the work to copy_tree_r itself.  */
769   else
770     copy_tree_r (tp, walk_subtrees, NULL);
771 
772   return NULL_TREE;
773 }
774 
775 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
776    If *TP has been visited already, then *TP is deeply copied by calling
777    mostly_copy_tree_r.  DATA is passed to mostly_copy_tree_r unmodified.  */
778 
779 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees,void * data)780 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
781 {
782   tree t = *tp;
783   enum tree_code code = TREE_CODE (t);
784 
785   /* Skip types, decls, and constants.  But we do want to look at their
786      types and the bounds of types.  Mark them as visited so we properly
787      unmark their subtrees on the unmark pass.  If we've already seen them,
788      don't look down further.  */
789   if (TREE_CODE_CLASS (code) == tcc_type
790       || TREE_CODE_CLASS (code) == tcc_declaration
791       || TREE_CODE_CLASS (code) == tcc_constant)
792     {
793       if (TREE_VISITED (t))
794 	*walk_subtrees = 0;
795       else
796 	TREE_VISITED (t) = 1;
797     }
798 
799   /* If this node has been visited already, unshare it and don't look
800      any deeper.  */
801   else if (TREE_VISITED (t))
802     {
803       walk_tree (tp, mostly_copy_tree_r, data, NULL);
804       *walk_subtrees = 0;
805     }
806 
807   /* Otherwise, mark the node as visited and keep looking.  */
808   else
809     TREE_VISITED (t) = 1;
810 
811   return NULL_TREE;
812 }
813 
814 /* Unshare most of the shared trees rooted at *TP.  DATA is passed to the
815    copy_if_shared_r callback unmodified.  */
816 
817 static inline void
copy_if_shared(tree * tp,void * data)818 copy_if_shared (tree *tp, void *data)
819 {
820   walk_tree (tp, copy_if_shared_r, data, NULL);
821 }
822 
823 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
824    any nested functions.  */
825 
826 static void
unshare_body(tree fndecl)827 unshare_body (tree fndecl)
828 {
829   struct cgraph_node *cgn = cgraph_get_node (fndecl);
830   /* If the language requires deep unsharing, we need a pointer set to make
831      sure we don't repeatedly unshare subtrees of unshareable nodes.  */
832   struct pointer_set_t *visited
833     = lang_hooks.deep_unsharing ? pointer_set_create () : NULL;
834 
835   copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
836   copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
837   copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
838 
839   if (visited)
840     pointer_set_destroy (visited);
841 
842   if (cgn)
843     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
844       unshare_body (cgn->decl);
845 }
846 
847 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
848    Subtrees are walked until the first unvisited node is encountered.  */
849 
850 static tree
unmark_visited_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)851 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
852 {
853   tree t = *tp;
854 
855   /* If this node has been visited, unmark it and keep looking.  */
856   if (TREE_VISITED (t))
857     TREE_VISITED (t) = 0;
858 
859   /* Otherwise, don't look any deeper.  */
860   else
861     *walk_subtrees = 0;
862 
863   return NULL_TREE;
864 }
865 
866 /* Unmark the visited trees rooted at *TP.  */
867 
868 static inline void
unmark_visited(tree * tp)869 unmark_visited (tree *tp)
870 {
871   walk_tree (tp, unmark_visited_r, NULL, NULL);
872 }
873 
874 /* Likewise, but mark all trees as not visited.  */
875 
876 static void
unvisit_body(tree fndecl)877 unvisit_body (tree fndecl)
878 {
879   struct cgraph_node *cgn = cgraph_get_node (fndecl);
880 
881   unmark_visited (&DECL_SAVED_TREE (fndecl));
882   unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
883   unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
884 
885   if (cgn)
886     for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
887       unvisit_body (cgn->decl);
888 }
889 
890 /* Unconditionally make an unshared copy of EXPR.  This is used when using
891    stored expressions which span multiple functions, such as BINFO_VTABLE,
892    as the normal unsharing process can't tell that they're shared.  */
893 
894 tree
unshare_expr(tree expr)895 unshare_expr (tree expr)
896 {
897   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
898   return expr;
899 }
900 
901 /* Worker for unshare_expr_without_location.  */
902 
903 static tree
prune_expr_location(tree * tp,int * walk_subtrees,void *)904 prune_expr_location (tree *tp, int *walk_subtrees, void *)
905 {
906   if (EXPR_P (*tp))
907     SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
908   else
909     *walk_subtrees = 0;
910   return NULL_TREE;
911 }
912 
913 /* Similar to unshare_expr but also prune all expression locations
914    from EXPR.  */
915 
916 tree
unshare_expr_without_location(tree expr)917 unshare_expr_without_location (tree expr)
918 {
919   walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
920   if (EXPR_P (expr))
921     walk_tree (&expr, prune_expr_location, NULL, NULL);
922   return expr;
923 }
924 
925 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
926    contain statements and have a value.  Assign its value to a temporary
927    and give it void_type_node.  Return the temporary, or NULL_TREE if
928    WRAPPER was already void.  */
929 
930 tree
voidify_wrapper_expr(tree wrapper,tree temp)931 voidify_wrapper_expr (tree wrapper, tree temp)
932 {
933   tree type = TREE_TYPE (wrapper);
934   if (type && !VOID_TYPE_P (type))
935     {
936       tree *p;
937 
938       /* Set p to point to the body of the wrapper.  Loop until we find
939 	 something that isn't a wrapper.  */
940       for (p = &wrapper; p && *p; )
941 	{
942 	  switch (TREE_CODE (*p))
943 	    {
944 	    case BIND_EXPR:
945 	      TREE_SIDE_EFFECTS (*p) = 1;
946 	      TREE_TYPE (*p) = void_type_node;
947 	      /* For a BIND_EXPR, the body is operand 1.  */
948 	      p = &BIND_EXPR_BODY (*p);
949 	      break;
950 
951 	    case CLEANUP_POINT_EXPR:
952 	    case TRY_FINALLY_EXPR:
953 	    case TRY_CATCH_EXPR:
954 	      TREE_SIDE_EFFECTS (*p) = 1;
955 	      TREE_TYPE (*p) = void_type_node;
956 	      p = &TREE_OPERAND (*p, 0);
957 	      break;
958 
959 	    case STATEMENT_LIST:
960 	      {
961 		tree_stmt_iterator i = tsi_last (*p);
962 		TREE_SIDE_EFFECTS (*p) = 1;
963 		TREE_TYPE (*p) = void_type_node;
964 		p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
965 	      }
966 	      break;
967 
968 	    case COMPOUND_EXPR:
969 	      /* Advance to the last statement.  Set all container types to
970 		 void.  */
971 	      for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
972 		{
973 		  TREE_SIDE_EFFECTS (*p) = 1;
974 		  TREE_TYPE (*p) = void_type_node;
975 		}
976 	      break;
977 
978 	    case TRANSACTION_EXPR:
979 	      TREE_SIDE_EFFECTS (*p) = 1;
980 	      TREE_TYPE (*p) = void_type_node;
981 	      p = &TRANSACTION_EXPR_BODY (*p);
982 	      break;
983 
984 	    default:
985 	      /* Assume that any tree upon which voidify_wrapper_expr is
986 		 directly called is a wrapper, and that its body is op0.  */
987 	      if (p == &wrapper)
988 		{
989 		  TREE_SIDE_EFFECTS (*p) = 1;
990 		  TREE_TYPE (*p) = void_type_node;
991 		  p = &TREE_OPERAND (*p, 0);
992 		  break;
993 		}
994 	      goto out;
995 	    }
996 	}
997 
998     out:
999       if (p == NULL || IS_EMPTY_STMT (*p))
1000 	temp = NULL_TREE;
1001       else if (temp)
1002 	{
1003 	  /* The wrapper is on the RHS of an assignment that we're pushing
1004 	     down.  */
1005 	  gcc_assert (TREE_CODE (temp) == INIT_EXPR
1006 		      || TREE_CODE (temp) == MODIFY_EXPR);
1007 	  TREE_OPERAND (temp, 1) = *p;
1008 	  *p = temp;
1009 	}
1010       else
1011 	{
1012 	  temp = create_tmp_var (type, "retval");
1013 	  *p = build2 (INIT_EXPR, type, temp, *p);
1014 	}
1015 
1016       return temp;
1017     }
1018 
1019   return NULL_TREE;
1020 }
1021 
1022 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1023    a temporary through which they communicate.  */
1024 
1025 static void
build_stack_save_restore(gimple * save,gimple * restore)1026 build_stack_save_restore (gimple *save, gimple *restore)
1027 {
1028   tree tmp_var;
1029 
1030   *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1031   tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1032   gimple_call_set_lhs (*save, tmp_var);
1033 
1034   *restore
1035     = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1036 			 1, tmp_var);
1037 }
1038 
1039 /* Gimplify a BIND_EXPR.  Just voidify and recurse.  */
1040 
1041 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,gimple_seq * pre_p)1042 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1043 {
1044   tree bind_expr = *expr_p;
1045   bool old_save_stack = gimplify_ctxp->save_stack;
1046   tree t;
1047   gimple gimple_bind;
1048   gimple_seq body, cleanup;
1049   gimple stack_save;
1050 
1051   tree temp = voidify_wrapper_expr (bind_expr, NULL);
1052 
1053   /* Mark variables seen in this bind expr.  */
1054   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1055     {
1056       if (TREE_CODE (t) == VAR_DECL)
1057 	{
1058 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1059 
1060 	  /* Mark variable as local.  */
1061 	  if (ctx && !DECL_EXTERNAL (t)
1062 	      && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1063 		  || splay_tree_lookup (ctx->variables,
1064 					(splay_tree_key) t) == NULL))
1065 	    {
1066 	      if (ctx->region_type == ORT_SIMD
1067 		  && TREE_ADDRESSABLE (t)
1068 		  && !TREE_STATIC (t))
1069 		omp_add_variable (ctx, t, GOVD_PRIVATE | GOVD_SEEN);
1070 	      else
1071 		omp_add_variable (ctx, t, GOVD_LOCAL | GOVD_SEEN);
1072 	    }
1073 
1074 	  DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1075 
1076 	  if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1077 	    cfun->has_local_explicit_reg_vars = true;
1078 	}
1079 
1080       /* Preliminarily mark non-addressed complex variables as eligible
1081 	 for promotion to gimple registers.  We'll transform their uses
1082 	 as we find them.  */
1083       if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1084 	   || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1085 	  && !TREE_THIS_VOLATILE (t)
1086 	  && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1087 	  && !needs_to_live_in_memory (t))
1088 	DECL_GIMPLE_REG_P (t) = 1;
1089     }
1090 
1091   gimple_bind = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1092                                    BIND_EXPR_BLOCK (bind_expr));
1093   gimple_push_bind_expr (gimple_bind);
1094 
1095   gimplify_ctxp->save_stack = false;
1096 
1097   /* Gimplify the body into the GIMPLE_BIND tuple's body.  */
1098   body = NULL;
1099   gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1100   gimple_bind_set_body (gimple_bind, body);
1101 
1102   cleanup = NULL;
1103   stack_save = NULL;
1104   if (gimplify_ctxp->save_stack)
1105     {
1106       gimple stack_restore;
1107 
1108       /* Save stack on entry and restore it on exit.  Add a try_finally
1109 	 block to achieve this.  */
1110       build_stack_save_restore (&stack_save, &stack_restore);
1111 
1112       gimplify_seq_add_stmt (&cleanup, stack_restore);
1113     }
1114 
1115   /* Add clobbers for all variables that go out of scope.  */
1116   for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1117     {
1118       if (TREE_CODE (t) == VAR_DECL
1119 	  && !is_global_var (t)
1120 	  && DECL_CONTEXT (t) == current_function_decl
1121 	  && !DECL_HARD_REGISTER (t)
1122 	  && !TREE_THIS_VOLATILE (t)
1123 	  && !DECL_HAS_VALUE_EXPR_P (t)
1124 	  /* Only care for variables that have to be in memory.  Others
1125 	     will be rewritten into SSA names, hence moved to the top-level.  */
1126 	  && !is_gimple_reg (t)
1127 	  && flag_stack_reuse != SR_NONE)
1128 	{
1129 	  tree clobber = build_constructor (TREE_TYPE (t),
1130 					    NULL);
1131 	  TREE_THIS_VOLATILE (clobber) = 1;
1132 	  gimplify_seq_add_stmt (&cleanup, gimple_build_assign (t, clobber));
1133 	}
1134     }
1135 
1136   if (cleanup)
1137     {
1138       gimple gs;
1139       gimple_seq new_body;
1140 
1141       new_body = NULL;
1142       gs = gimple_build_try (gimple_bind_body (gimple_bind), cleanup,
1143 	  		     GIMPLE_TRY_FINALLY);
1144 
1145       if (stack_save)
1146 	gimplify_seq_add_stmt (&new_body, stack_save);
1147       gimplify_seq_add_stmt (&new_body, gs);
1148       gimple_bind_set_body (gimple_bind, new_body);
1149     }
1150 
1151   gimplify_ctxp->save_stack = old_save_stack;
1152   gimple_pop_bind_expr ();
1153 
1154   gimplify_seq_add_stmt (pre_p, gimple_bind);
1155 
1156   if (temp)
1157     {
1158       *expr_p = temp;
1159       return GS_OK;
1160     }
1161 
1162   *expr_p = NULL_TREE;
1163   return GS_ALL_DONE;
1164 }
1165 
1166 /* Gimplify a RETURN_EXPR.  If the expression to be returned is not a
1167    GIMPLE value, it is assigned to a new temporary and the statement is
1168    re-written to return the temporary.
1169 
1170    PRE_P points to the sequence where side effects that must happen before
1171    STMT should be stored.  */
1172 
1173 static enum gimplify_status
gimplify_return_expr(tree stmt,gimple_seq * pre_p)1174 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1175 {
1176   gimple ret;
1177   tree ret_expr = TREE_OPERAND (stmt, 0);
1178   tree result_decl, result;
1179 
1180   if (ret_expr == error_mark_node)
1181     return GS_ERROR;
1182 
1183   /* Implicit _Cilk_sync must be inserted right before any return statement
1184      if there is a _Cilk_spawn in the function.  If the user has provided a
1185      _Cilk_sync, the optimizer should remove this duplicate one.  */
1186   if (fn_contains_cilk_spawn_p (cfun))
1187     {
1188       tree impl_sync = build0 (CILK_SYNC_STMT, void_type_node);
1189       gimplify_and_add (impl_sync, pre_p);
1190     }
1191 
1192   if (!ret_expr
1193       || TREE_CODE (ret_expr) == RESULT_DECL
1194       || ret_expr == error_mark_node)
1195     {
1196       gimple ret = gimple_build_return (ret_expr);
1197       gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1198       gimplify_seq_add_stmt (pre_p, ret);
1199       return GS_ALL_DONE;
1200     }
1201 
1202   if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1203     result_decl = NULL_TREE;
1204   else
1205     {
1206       result_decl = TREE_OPERAND (ret_expr, 0);
1207 
1208       /* See through a return by reference.  */
1209       if (TREE_CODE (result_decl) == INDIRECT_REF)
1210 	result_decl = TREE_OPERAND (result_decl, 0);
1211 
1212       gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1213 		   || TREE_CODE (ret_expr) == INIT_EXPR)
1214 		  && TREE_CODE (result_decl) == RESULT_DECL);
1215     }
1216 
1217   /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1218      Recall that aggregate_value_p is FALSE for any aggregate type that is
1219      returned in registers.  If we're returning values in registers, then
1220      we don't want to extend the lifetime of the RESULT_DECL, particularly
1221      across another call.  In addition, for those aggregates for which
1222      hard_function_value generates a PARALLEL, we'll die during normal
1223      expansion of structure assignments; there's special code in expand_return
1224      to handle this case that does not exist in expand_expr.  */
1225   if (!result_decl)
1226     result = NULL_TREE;
1227   else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1228     {
1229       if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1230 	{
1231 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1232 	    gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1233 	  /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1234 	     should be effectively allocated by the caller, i.e. all calls to
1235 	     this function must be subject to the Return Slot Optimization.  */
1236 	  gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1237 	  gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1238 	}
1239       result = result_decl;
1240     }
1241   else if (gimplify_ctxp->return_temp)
1242     result = gimplify_ctxp->return_temp;
1243   else
1244     {
1245       result = create_tmp_reg (TREE_TYPE (result_decl), NULL);
1246 
1247       /* ??? With complex control flow (usually involving abnormal edges),
1248 	 we can wind up warning about an uninitialized value for this.  Due
1249 	 to how this variable is constructed and initialized, this is never
1250 	 true.  Give up and never warn.  */
1251       TREE_NO_WARNING (result) = 1;
1252 
1253       gimplify_ctxp->return_temp = result;
1254     }
1255 
1256   /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1257      Then gimplify the whole thing.  */
1258   if (result != result_decl)
1259     TREE_OPERAND (ret_expr, 0) = result;
1260 
1261   gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1262 
1263   ret = gimple_build_return (result);
1264   gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1265   gimplify_seq_add_stmt (pre_p, ret);
1266 
1267   return GS_ALL_DONE;
1268 }
1269 
1270 /* Gimplify a variable-length array DECL.  */
1271 
1272 static void
gimplify_vla_decl(tree decl,gimple_seq * seq_p)1273 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1274 {
1275   /* This is a variable-sized decl.  Simplify its size and mark it
1276      for deferred expansion.  */
1277   tree t, addr, ptr_type;
1278 
1279   gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1280   gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1281 
1282   /* Don't mess with a DECL_VALUE_EXPR set by the front-end.  */
1283   if (DECL_HAS_VALUE_EXPR_P (decl))
1284     return;
1285 
1286   /* All occurrences of this decl in final gimplified code will be
1287      replaced by indirection.  Setting DECL_VALUE_EXPR does two
1288      things: First, it lets the rest of the gimplifier know what
1289      replacement to use.  Second, it lets the debug info know
1290      where to find the value.  */
1291   ptr_type = build_pointer_type (TREE_TYPE (decl));
1292   addr = create_tmp_var (ptr_type, get_name (decl));
1293   DECL_IGNORED_P (addr) = 0;
1294   t = build_fold_indirect_ref (addr);
1295   TREE_THIS_NOTRAP (t) = 1;
1296   SET_DECL_VALUE_EXPR (decl, t);
1297   DECL_HAS_VALUE_EXPR_P (decl) = 1;
1298 
1299   t = builtin_decl_explicit (BUILT_IN_ALLOCA_WITH_ALIGN);
1300   t = build_call_expr (t, 2, DECL_SIZE_UNIT (decl),
1301 		       size_int (DECL_ALIGN (decl)));
1302   /* The call has been built for a variable-sized object.  */
1303   CALL_ALLOCA_FOR_VAR_P (t) = 1;
1304   t = fold_convert (ptr_type, t);
1305   t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1306 
1307   gimplify_and_add (t, seq_p);
1308 
1309   /* Indicate that we need to restore the stack level when the
1310      enclosing BIND_EXPR is exited.  */
1311   gimplify_ctxp->save_stack = true;
1312 }
1313 
1314 /* A helper function to be called via walk_tree.  Mark all labels under *TP
1315    as being forced.  To be called for DECL_INITIAL of static variables.  */
1316 
1317 static tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1318 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1319 {
1320   if (TYPE_P (*tp))
1321     *walk_subtrees = 0;
1322   if (TREE_CODE (*tp) == LABEL_DECL)
1323     FORCED_LABEL (*tp) = 1;
1324 
1325   return NULL_TREE;
1326 }
1327 
1328 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1329    and initialization explicit.  */
1330 
1331 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p,gimple_seq * seq_p)1332 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1333 {
1334   tree stmt = *stmt_p;
1335   tree decl = DECL_EXPR_DECL (stmt);
1336 
1337   *stmt_p = NULL_TREE;
1338 
1339   if (TREE_TYPE (decl) == error_mark_node)
1340     return GS_ERROR;
1341 
1342   if ((TREE_CODE (decl) == TYPE_DECL
1343        || TREE_CODE (decl) == VAR_DECL)
1344       && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1345     gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1346 
1347   /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1348      in case its size expressions contain problematic nodes like CALL_EXPR.  */
1349   if (TREE_CODE (decl) == TYPE_DECL
1350       && DECL_ORIGINAL_TYPE (decl)
1351       && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1352     gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1353 
1354   if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1355     {
1356       tree init = DECL_INITIAL (decl);
1357 
1358       if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1359 	  || (!TREE_STATIC (decl)
1360 	      && flag_stack_check == GENERIC_STACK_CHECK
1361 	      && compare_tree_int (DECL_SIZE_UNIT (decl),
1362 				   STACK_CHECK_MAX_VAR_SIZE) > 0))
1363 	gimplify_vla_decl (decl, seq_p);
1364 
1365       /* Some front ends do not explicitly declare all anonymous
1366 	 artificial variables.  We compensate here by declaring the
1367 	 variables, though it would be better if the front ends would
1368 	 explicitly declare them.  */
1369       if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1370 	  && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1371 	gimple_add_tmp_var (decl);
1372 
1373       if (init && init != error_mark_node)
1374 	{
1375 	  if (!TREE_STATIC (decl))
1376 	    {
1377 	      DECL_INITIAL (decl) = NULL_TREE;
1378 	      init = build2 (INIT_EXPR, void_type_node, decl, init);
1379 	      gimplify_and_add (init, seq_p);
1380 	      ggc_free (init);
1381 	    }
1382 	  else
1383 	    /* We must still examine initializers for static variables
1384 	       as they may contain a label address.  */
1385 	    walk_tree (&init, force_labels_r, NULL, NULL);
1386 	}
1387     }
1388 
1389   return GS_ALL_DONE;
1390 }
1391 
1392 /* Gimplify a LOOP_EXPR.  Normally this just involves gimplifying the body
1393    and replacing the LOOP_EXPR with goto, but if the loop contains an
1394    EXIT_EXPR, we need to append a label for it to jump to.  */
1395 
1396 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,gimple_seq * pre_p)1397 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1398 {
1399   tree saved_label = gimplify_ctxp->exit_label;
1400   tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1401 
1402   gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1403 
1404   gimplify_ctxp->exit_label = NULL_TREE;
1405 
1406   gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1407 
1408   gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1409 
1410   if (gimplify_ctxp->exit_label)
1411     gimplify_seq_add_stmt (pre_p,
1412 			   gimple_build_label (gimplify_ctxp->exit_label));
1413 
1414   gimplify_ctxp->exit_label = saved_label;
1415 
1416   *expr_p = NULL;
1417   return GS_ALL_DONE;
1418 }
1419 
1420 /* Gimplify a statement list onto a sequence.  These may be created either
1421    by an enlightened front-end, or by shortcut_cond_expr.  */
1422 
1423 static enum gimplify_status
gimplify_statement_list(tree * expr_p,gimple_seq * pre_p)1424 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1425 {
1426   tree temp = voidify_wrapper_expr (*expr_p, NULL);
1427 
1428   tree_stmt_iterator i = tsi_start (*expr_p);
1429 
1430   while (!tsi_end_p (i))
1431     {
1432       gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1433       tsi_delink (&i);
1434     }
1435 
1436   if (temp)
1437     {
1438       *expr_p = temp;
1439       return GS_OK;
1440     }
1441 
1442   return GS_ALL_DONE;
1443 }
1444 
1445 
1446 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
1447    branch to.  */
1448 
1449 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,gimple_seq * pre_p)1450 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
1451 {
1452   tree switch_expr = *expr_p;
1453   gimple_seq switch_body_seq = NULL;
1454   enum gimplify_status ret;
1455   tree index_type = TREE_TYPE (switch_expr);
1456   if (index_type == NULL_TREE)
1457     index_type = TREE_TYPE (SWITCH_COND (switch_expr));
1458 
1459   ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
1460                        fb_rvalue);
1461   if (ret == GS_ERROR || ret == GS_UNHANDLED)
1462     return ret;
1463 
1464   if (SWITCH_BODY (switch_expr))
1465     {
1466       vec<tree> labels;
1467       vec<tree> saved_labels;
1468       tree default_case = NULL_TREE;
1469       gimple gimple_switch;
1470 
1471       /* If someone can be bothered to fill in the labels, they can
1472 	 be bothered to null out the body too.  */
1473       gcc_assert (!SWITCH_LABELS (switch_expr));
1474 
1475       /* Save old labels, get new ones from body, then restore the old
1476          labels.  Save all the things from the switch body to append after.  */
1477       saved_labels = gimplify_ctxp->case_labels;
1478       gimplify_ctxp->case_labels.create (8);
1479 
1480       gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
1481       labels = gimplify_ctxp->case_labels;
1482       gimplify_ctxp->case_labels = saved_labels;
1483 
1484       preprocess_case_label_vec_for_gimple (labels, index_type,
1485 					    &default_case);
1486 
1487       if (!default_case)
1488 	{
1489 	  gimple new_default;
1490 
1491 	  default_case
1492 	    = build_case_label (NULL_TREE, NULL_TREE,
1493 				create_artificial_label (UNKNOWN_LOCATION));
1494 	  new_default = gimple_build_label (CASE_LABEL (default_case));
1495 	  gimplify_seq_add_stmt (&switch_body_seq, new_default);
1496 	}
1497 
1498       gimple_switch = gimple_build_switch (SWITCH_COND (switch_expr),
1499 					   default_case, labels);
1500       gimplify_seq_add_stmt (pre_p, gimple_switch);
1501       gimplify_seq_add_seq (pre_p, switch_body_seq);
1502       labels.release ();
1503     }
1504   else
1505     gcc_assert (SWITCH_LABELS (switch_expr));
1506 
1507   return GS_ALL_DONE;
1508 }
1509 
1510 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P.  */
1511 
1512 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p,gimple_seq * pre_p)1513 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
1514 {
1515   struct gimplify_ctx *ctxp;
1516   gimple gimple_label;
1517 
1518   /* Invalid OpenMP programs can play Duff's Device type games with
1519      #pragma omp parallel.  At least in the C front end, we don't
1520      detect such invalid branches until after gimplification.  */
1521   for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1522     if (ctxp->case_labels.exists ())
1523       break;
1524 
1525   gimple_label = gimple_build_label (CASE_LABEL (*expr_p));
1526   ctxp->case_labels.safe_push (*expr_p);
1527   gimplify_seq_add_stmt (pre_p, gimple_label);
1528 
1529   return GS_ALL_DONE;
1530 }
1531 
1532 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1533    if necessary.  */
1534 
1535 tree
build_and_jump(tree * label_p)1536 build_and_jump (tree *label_p)
1537 {
1538   if (label_p == NULL)
1539     /* If there's nowhere to jump, just fall through.  */
1540     return NULL_TREE;
1541 
1542   if (*label_p == NULL_TREE)
1543     {
1544       tree label = create_artificial_label (UNKNOWN_LOCATION);
1545       *label_p = label;
1546     }
1547 
1548   return build1 (GOTO_EXPR, void_type_node, *label_p);
1549 }
1550 
1551 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1552    This also involves building a label to jump to and communicating it to
1553    gimplify_loop_expr through gimplify_ctxp->exit_label.  */
1554 
1555 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)1556 gimplify_exit_expr (tree *expr_p)
1557 {
1558   tree cond = TREE_OPERAND (*expr_p, 0);
1559   tree expr;
1560 
1561   expr = build_and_jump (&gimplify_ctxp->exit_label);
1562   expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1563   *expr_p = expr;
1564 
1565   return GS_OK;
1566 }
1567 
1568 /* *EXPR_P is a COMPONENT_REF being used as an rvalue.  If its type is
1569    different from its canonical type, wrap the whole thing inside a
1570    NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1571    type.
1572 
1573    The canonical type of a COMPONENT_REF is the type of the field being
1574    referenced--unless the field is a bit-field which can be read directly
1575    in a smaller mode, in which case the canonical type is the
1576    sign-appropriate type corresponding to that mode.  */
1577 
1578 static void
canonicalize_component_ref(tree * expr_p)1579 canonicalize_component_ref (tree *expr_p)
1580 {
1581   tree expr = *expr_p;
1582   tree type;
1583 
1584   gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1585 
1586   if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1587     type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1588   else
1589     type = TREE_TYPE (TREE_OPERAND (expr, 1));
1590 
1591   /* One could argue that all the stuff below is not necessary for
1592      the non-bitfield case and declare it a FE error if type
1593      adjustment would be needed.  */
1594   if (TREE_TYPE (expr) != type)
1595     {
1596 #ifdef ENABLE_TYPES_CHECKING
1597       tree old_type = TREE_TYPE (expr);
1598 #endif
1599       int type_quals;
1600 
1601       /* We need to preserve qualifiers and propagate them from
1602 	 operand 0.  */
1603       type_quals = TYPE_QUALS (type)
1604 	| TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
1605       if (TYPE_QUALS (type) != type_quals)
1606 	type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
1607 
1608       /* Set the type of the COMPONENT_REF to the underlying type.  */
1609       TREE_TYPE (expr) = type;
1610 
1611 #ifdef ENABLE_TYPES_CHECKING
1612       /* It is now a FE error, if the conversion from the canonical
1613 	 type to the original expression type is not useless.  */
1614       gcc_assert (useless_type_conversion_p (old_type, type));
1615 #endif
1616     }
1617 }
1618 
1619 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1620    to foo, embed that change in the ADDR_EXPR by converting
1621       T array[U];
1622       (T *)&array
1623    ==>
1624       &array[L]
1625    where L is the lower bound.  For simplicity, only do this for constant
1626    lower bound.
1627    The constraint is that the type of &array[L] is trivially convertible
1628    to T *.  */
1629 
1630 static void
canonicalize_addr_expr(tree * expr_p)1631 canonicalize_addr_expr (tree *expr_p)
1632 {
1633   tree expr = *expr_p;
1634   tree addr_expr = TREE_OPERAND (expr, 0);
1635   tree datype, ddatype, pddatype;
1636 
1637   /* We simplify only conversions from an ADDR_EXPR to a pointer type.  */
1638   if (!POINTER_TYPE_P (TREE_TYPE (expr))
1639       || TREE_CODE (addr_expr) != ADDR_EXPR)
1640     return;
1641 
1642   /* The addr_expr type should be a pointer to an array.  */
1643   datype = TREE_TYPE (TREE_TYPE (addr_expr));
1644   if (TREE_CODE (datype) != ARRAY_TYPE)
1645     return;
1646 
1647   /* The pointer to element type shall be trivially convertible to
1648      the expression pointer type.  */
1649   ddatype = TREE_TYPE (datype);
1650   pddatype = build_pointer_type (ddatype);
1651   if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
1652 				  pddatype))
1653     return;
1654 
1655   /* The lower bound and element sizes must be constant.  */
1656   if (!TYPE_SIZE_UNIT (ddatype)
1657       || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
1658       || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1659       || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1660     return;
1661 
1662   /* All checks succeeded.  Build a new node to merge the cast.  */
1663   *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
1664 		    TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1665 		    NULL_TREE, NULL_TREE);
1666   *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
1667 
1668   /* We can have stripped a required restrict qualifier above.  */
1669   if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
1670     *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
1671 }
1672 
1673 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR.  Remove it and/or other conversions
1674    underneath as appropriate.  */
1675 
1676 static enum gimplify_status
gimplify_conversion(tree * expr_p)1677 gimplify_conversion (tree *expr_p)
1678 {
1679   location_t loc = EXPR_LOCATION (*expr_p);
1680   gcc_assert (CONVERT_EXPR_P (*expr_p));
1681 
1682   /* Then strip away all but the outermost conversion.  */
1683   STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1684 
1685   /* And remove the outermost conversion if it's useless.  */
1686   if (tree_ssa_useless_type_conversion (*expr_p))
1687     *expr_p = TREE_OPERAND (*expr_p, 0);
1688 
1689   /* If we still have a conversion at the toplevel,
1690      then canonicalize some constructs.  */
1691   if (CONVERT_EXPR_P (*expr_p))
1692     {
1693       tree sub = TREE_OPERAND (*expr_p, 0);
1694 
1695       /* If a NOP conversion is changing the type of a COMPONENT_REF
1696 	 expression, then canonicalize its type now in order to expose more
1697 	 redundant conversions.  */
1698       if (TREE_CODE (sub) == COMPONENT_REF)
1699 	canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1700 
1701       /* If a NOP conversion is changing a pointer to array of foo
1702 	 to a pointer to foo, embed that change in the ADDR_EXPR.  */
1703       else if (TREE_CODE (sub) == ADDR_EXPR)
1704 	canonicalize_addr_expr (expr_p);
1705     }
1706 
1707   /* If we have a conversion to a non-register type force the
1708      use of a VIEW_CONVERT_EXPR instead.  */
1709   if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
1710     *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
1711 			       TREE_OPERAND (*expr_p, 0));
1712 
1713   return GS_OK;
1714 }
1715 
1716 /* Nonlocal VLAs seen in the current function.  */
1717 static struct pointer_set_t *nonlocal_vlas;
1718 
1719 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes.  */
1720 static tree nonlocal_vla_vars;
1721 
1722 /* Gimplify a VAR_DECL or PARM_DECL.  Return GS_OK if we expanded a
1723    DECL_VALUE_EXPR, and it's worth re-examining things.  */
1724 
1725 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)1726 gimplify_var_or_parm_decl (tree *expr_p)
1727 {
1728   tree decl = *expr_p;
1729 
1730   /* ??? If this is a local variable, and it has not been seen in any
1731      outer BIND_EXPR, then it's probably the result of a duplicate
1732      declaration, for which we've already issued an error.  It would
1733      be really nice if the front end wouldn't leak these at all.
1734      Currently the only known culprit is C++ destructors, as seen
1735      in g++.old-deja/g++.jason/binding.C.  */
1736   if (TREE_CODE (decl) == VAR_DECL
1737       && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1738       && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1739       && decl_function_context (decl) == current_function_decl)
1740     {
1741       gcc_assert (seen_error ());
1742       return GS_ERROR;
1743     }
1744 
1745   /* When within an OpenMP context, notice uses of variables.  */
1746   if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1747     return GS_ALL_DONE;
1748 
1749   /* If the decl is an alias for another expression, substitute it now.  */
1750   if (DECL_HAS_VALUE_EXPR_P (decl))
1751     {
1752       tree value_expr = DECL_VALUE_EXPR (decl);
1753 
1754       /* For referenced nonlocal VLAs add a decl for debugging purposes
1755 	 to the current function.  */
1756       if (TREE_CODE (decl) == VAR_DECL
1757 	  && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1758 	  && nonlocal_vlas != NULL
1759 	  && TREE_CODE (value_expr) == INDIRECT_REF
1760 	  && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
1761 	  && decl_function_context (decl) != current_function_decl)
1762 	{
1763 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1764 	  while (ctx
1765 		 && (ctx->region_type == ORT_WORKSHARE
1766 		     || ctx->region_type == ORT_SIMD))
1767 	    ctx = ctx->outer_context;
1768 	  if (!ctx && !pointer_set_insert (nonlocal_vlas, decl))
1769 	    {
1770 	      tree copy = copy_node (decl);
1771 
1772 	      lang_hooks.dup_lang_specific_decl (copy);
1773 	      SET_DECL_RTL (copy, 0);
1774 	      TREE_USED (copy) = 1;
1775 	      DECL_CHAIN (copy) = nonlocal_vla_vars;
1776 	      nonlocal_vla_vars = copy;
1777 	      SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
1778 	      DECL_HAS_VALUE_EXPR_P (copy) = 1;
1779 	    }
1780 	}
1781 
1782       *expr_p = unshare_expr (value_expr);
1783       return GS_OK;
1784     }
1785 
1786   return GS_ALL_DONE;
1787 }
1788 
1789 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T.  */
1790 
1791 static void
recalculate_side_effects(tree t)1792 recalculate_side_effects (tree t)
1793 {
1794   enum tree_code code = TREE_CODE (t);
1795   int len = TREE_OPERAND_LENGTH (t);
1796   int i;
1797 
1798   switch (TREE_CODE_CLASS (code))
1799     {
1800     case tcc_expression:
1801       switch (code)
1802 	{
1803 	case INIT_EXPR:
1804 	case MODIFY_EXPR:
1805 	case VA_ARG_EXPR:
1806 	case PREDECREMENT_EXPR:
1807 	case PREINCREMENT_EXPR:
1808 	case POSTDECREMENT_EXPR:
1809 	case POSTINCREMENT_EXPR:
1810 	  /* All of these have side-effects, no matter what their
1811 	     operands are.  */
1812 	  return;
1813 
1814 	default:
1815 	  break;
1816 	}
1817       /* Fall through.  */
1818 
1819     case tcc_comparison:  /* a comparison expression */
1820     case tcc_unary:       /* a unary arithmetic expression */
1821     case tcc_binary:      /* a binary arithmetic expression */
1822     case tcc_reference:   /* a reference */
1823     case tcc_vl_exp:        /* a function call */
1824       TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
1825       for (i = 0; i < len; ++i)
1826 	{
1827 	  tree op = TREE_OPERAND (t, i);
1828 	  if (op && TREE_SIDE_EFFECTS (op))
1829 	    TREE_SIDE_EFFECTS (t) = 1;
1830 	}
1831       break;
1832 
1833     case tcc_constant:
1834       /* No side-effects.  */
1835       return;
1836 
1837     default:
1838       gcc_unreachable ();
1839    }
1840 }
1841 
1842 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1843    node *EXPR_P.
1844 
1845       compound_lval
1846 	      : min_lval '[' val ']'
1847 	      | min_lval '.' ID
1848 	      | compound_lval '[' val ']'
1849 	      | compound_lval '.' ID
1850 
1851    This is not part of the original SIMPLE definition, which separates
1852    array and member references, but it seems reasonable to handle them
1853    together.  Also, this way we don't run into problems with union
1854    aliasing; gcc requires that for accesses through a union to alias, the
1855    union reference must be explicit, which was not always the case when we
1856    were splitting up array and member refs.
1857 
1858    PRE_P points to the sequence where side effects that must happen before
1859      *EXPR_P should be stored.
1860 
1861    POST_P points to the sequence where side effects that must happen after
1862      *EXPR_P should be stored.  */
1863 
1864 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,fallback_t fallback)1865 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
1866 			fallback_t fallback)
1867 {
1868   tree *p;
1869   enum gimplify_status ret = GS_ALL_DONE, tret;
1870   int i;
1871   location_t loc = EXPR_LOCATION (*expr_p);
1872   tree expr = *expr_p;
1873 
1874   /* Create a stack of the subexpressions so later we can walk them in
1875      order from inner to outer.  */
1876   auto_vec<tree, 10> expr_stack;
1877 
1878   /* We can handle anything that get_inner_reference can deal with.  */
1879   for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1880     {
1881     restart:
1882       /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs.  */
1883       if (TREE_CODE (*p) == INDIRECT_REF)
1884 	*p = fold_indirect_ref_loc (loc, *p);
1885 
1886       if (handled_component_p (*p))
1887 	;
1888       /* Expand DECL_VALUE_EXPR now.  In some cases that may expose
1889 	 additional COMPONENT_REFs.  */
1890       else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1891 	       && gimplify_var_or_parm_decl (p) == GS_OK)
1892 	goto restart;
1893       else
1894 	break;
1895 
1896       expr_stack.safe_push (*p);
1897     }
1898 
1899   gcc_assert (expr_stack.length ());
1900 
1901   /* Now EXPR_STACK is a stack of pointers to all the refs we've
1902      walked through and P points to the innermost expression.
1903 
1904      Java requires that we elaborated nodes in source order.  That
1905      means we must gimplify the inner expression followed by each of
1906      the indices, in order.  But we can't gimplify the inner
1907      expression until we deal with any variable bounds, sizes, or
1908      positions in order to deal with PLACEHOLDER_EXPRs.
1909 
1910      So we do this in three steps.  First we deal with the annotations
1911      for any variables in the components, then we gimplify the base,
1912      then we gimplify any indices, from left to right.  */
1913   for (i = expr_stack.length () - 1; i >= 0; i--)
1914     {
1915       tree t = expr_stack[i];
1916 
1917       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1918 	{
1919 	  /* Gimplify the low bound and element type size and put them into
1920 	     the ARRAY_REF.  If these values are set, they have already been
1921 	     gimplified.  */
1922 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
1923 	    {
1924 	      tree low = unshare_expr (array_ref_low_bound (t));
1925 	      if (!is_gimple_min_invariant (low))
1926 		{
1927 		  TREE_OPERAND (t, 2) = low;
1928 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1929 					post_p, is_gimple_reg,
1930 					fb_rvalue);
1931 		  ret = MIN (ret, tret);
1932 		}
1933 	    }
1934 	  else
1935 	    {
1936 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1937 				    is_gimple_reg, fb_rvalue);
1938 	      ret = MIN (ret, tret);
1939 	    }
1940 
1941 	  if (TREE_OPERAND (t, 3) == NULL_TREE)
1942 	    {
1943 	      tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1944 	      tree elmt_size = unshare_expr (array_ref_element_size (t));
1945 	      tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1946 
1947 	      /* Divide the element size by the alignment of the element
1948 		 type (above).  */
1949 	      elmt_size
1950 		= size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
1951 
1952 	      if (!is_gimple_min_invariant (elmt_size))
1953 		{
1954 		  TREE_OPERAND (t, 3) = elmt_size;
1955 		  tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
1956 					post_p, is_gimple_reg,
1957 					fb_rvalue);
1958 		  ret = MIN (ret, tret);
1959 		}
1960 	    }
1961 	  else
1962 	    {
1963 	      tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1964 				    is_gimple_reg, fb_rvalue);
1965 	      ret = MIN (ret, tret);
1966 	    }
1967 	}
1968       else if (TREE_CODE (t) == COMPONENT_REF)
1969 	{
1970 	  /* Set the field offset into T and gimplify it.  */
1971 	  if (TREE_OPERAND (t, 2) == NULL_TREE)
1972 	    {
1973 	      tree offset = unshare_expr (component_ref_field_offset (t));
1974 	      tree field = TREE_OPERAND (t, 1);
1975 	      tree factor
1976 		= size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1977 
1978 	      /* Divide the offset by its alignment.  */
1979 	      offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
1980 
1981 	      if (!is_gimple_min_invariant (offset))
1982 		{
1983 		  TREE_OPERAND (t, 2) = offset;
1984 		  tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
1985 					post_p, is_gimple_reg,
1986 					fb_rvalue);
1987 		  ret = MIN (ret, tret);
1988 		}
1989 	    }
1990 	  else
1991 	    {
1992 	      tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1993 				    is_gimple_reg, fb_rvalue);
1994 	      ret = MIN (ret, tret);
1995 	    }
1996 	}
1997     }
1998 
1999   /* Step 2 is to gimplify the base expression.  Make sure lvalue is set
2000      so as to match the min_lval predicate.  Failure to do so may result
2001      in the creation of large aggregate temporaries.  */
2002   tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2003 			fallback | fb_lvalue);
2004   ret = MIN (ret, tret);
2005 
2006   /* And finally, the indices and operands of ARRAY_REF.  During this
2007      loop we also remove any useless conversions.  */
2008   for (; expr_stack.length () > 0; )
2009     {
2010       tree t = expr_stack.pop ();
2011 
2012       if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2013 	{
2014 	  /* Gimplify the dimension.  */
2015 	  if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2016 	    {
2017 	      tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2018 				    is_gimple_val, fb_rvalue);
2019 	      ret = MIN (ret, tret);
2020 	    }
2021 	}
2022 
2023       STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2024 
2025       /* The innermost expression P may have originally had
2026 	 TREE_SIDE_EFFECTS set which would have caused all the outer
2027 	 expressions in *EXPR_P leading to P to also have had
2028 	 TREE_SIDE_EFFECTS set.  */
2029       recalculate_side_effects (t);
2030     }
2031 
2032   /* If the outermost expression is a COMPONENT_REF, canonicalize its type.  */
2033   if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
2034     {
2035       canonicalize_component_ref (expr_p);
2036     }
2037 
2038   expr_stack.release ();
2039 
2040   gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
2041 
2042   return ret;
2043 }
2044 
2045 /*  Gimplify the self modifying expression pointed to by EXPR_P
2046     (++, --, +=, -=).
2047 
2048     PRE_P points to the list where side effects that must happen before
2049 	*EXPR_P should be stored.
2050 
2051     POST_P points to the list where side effects that must happen after
2052 	*EXPR_P should be stored.
2053 
2054     WANT_VALUE is nonzero iff we want to use the value of this expression
2055 	in another expression.
2056 
2057     ARITH_TYPE is the type the computation should be performed in.  */
2058 
2059 enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,tree arith_type)2060 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2061 			bool want_value, tree arith_type)
2062 {
2063   enum tree_code code;
2064   tree lhs, lvalue, rhs, t1;
2065   gimple_seq post = NULL, *orig_post_p = post_p;
2066   bool postfix;
2067   enum tree_code arith_code;
2068   enum gimplify_status ret;
2069   location_t loc = EXPR_LOCATION (*expr_p);
2070 
2071   code = TREE_CODE (*expr_p);
2072 
2073   gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
2074 	      || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
2075 
2076   /* Prefix or postfix?  */
2077   if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
2078     /* Faster to treat as prefix if result is not used.  */
2079     postfix = want_value;
2080   else
2081     postfix = false;
2082 
2083   /* For postfix, make sure the inner expression's post side effects
2084      are executed after side effects from this expression.  */
2085   if (postfix)
2086     post_p = &post;
2087 
2088   /* Add or subtract?  */
2089   if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
2090     arith_code = PLUS_EXPR;
2091   else
2092     arith_code = MINUS_EXPR;
2093 
2094   /* Gimplify the LHS into a GIMPLE lvalue.  */
2095   lvalue = TREE_OPERAND (*expr_p, 0);
2096   ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
2097   if (ret == GS_ERROR)
2098     return ret;
2099 
2100   /* Extract the operands to the arithmetic operation.  */
2101   lhs = lvalue;
2102   rhs = TREE_OPERAND (*expr_p, 1);
2103 
2104   /* For postfix operator, we evaluate the LHS to an rvalue and then use
2105      that as the result value and in the postqueue operation.  */
2106   if (postfix)
2107     {
2108       ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
2109       if (ret == GS_ERROR)
2110 	return ret;
2111 
2112       lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
2113     }
2114 
2115   /* For POINTERs increment, use POINTER_PLUS_EXPR.  */
2116   if (POINTER_TYPE_P (TREE_TYPE (lhs)))
2117     {
2118       rhs = convert_to_ptrofftype_loc (loc, rhs);
2119       if (arith_code == MINUS_EXPR)
2120 	rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
2121       t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
2122     }
2123   else
2124     t1 = fold_convert (TREE_TYPE (*expr_p),
2125 		       fold_build2 (arith_code, arith_type,
2126 				    fold_convert (arith_type, lhs),
2127 				    fold_convert (arith_type, rhs)));
2128 
2129   if (postfix)
2130     {
2131       gimplify_assign (lvalue, t1, pre_p);
2132       gimplify_seq_add_seq (orig_post_p, post);
2133       *expr_p = lhs;
2134       return GS_ALL_DONE;
2135     }
2136   else
2137     {
2138       *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
2139       return GS_OK;
2140     }
2141 }
2142 
2143 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR.  */
2144 
2145 static void
maybe_with_size_expr(tree * expr_p)2146 maybe_with_size_expr (tree *expr_p)
2147 {
2148   tree expr = *expr_p;
2149   tree type = TREE_TYPE (expr);
2150   tree size;
2151 
2152   /* If we've already wrapped this or the type is error_mark_node, we can't do
2153      anything.  */
2154   if (TREE_CODE (expr) == WITH_SIZE_EXPR
2155       || type == error_mark_node)
2156     return;
2157 
2158   /* If the size isn't known or is a constant, we have nothing to do.  */
2159   size = TYPE_SIZE_UNIT (type);
2160   if (!size || TREE_CODE (size) == INTEGER_CST)
2161     return;
2162 
2163   /* Otherwise, make a WITH_SIZE_EXPR.  */
2164   size = unshare_expr (size);
2165   size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
2166   *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
2167 }
2168 
2169 /* Helper for gimplify_call_expr.  Gimplify a single argument *ARG_P
2170    Store any side-effects in PRE_P.  CALL_LOCATION is the location of
2171    the CALL_EXPR.  */
2172 
2173 enum gimplify_status
gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location)2174 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location)
2175 {
2176   bool (*test) (tree);
2177   fallback_t fb;
2178 
2179   /* In general, we allow lvalues for function arguments to avoid
2180      extra overhead of copying large aggregates out of even larger
2181      aggregates into temporaries only to copy the temporaries to
2182      the argument list.  Make optimizers happy by pulling out to
2183      temporaries those types that fit in registers.  */
2184   if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
2185     test = is_gimple_val, fb = fb_rvalue;
2186   else
2187     {
2188       test = is_gimple_lvalue, fb = fb_either;
2189       /* Also strip a TARGET_EXPR that would force an extra copy.  */
2190       if (TREE_CODE (*arg_p) == TARGET_EXPR)
2191 	{
2192 	  tree init = TARGET_EXPR_INITIAL (*arg_p);
2193 	  if (init
2194 	      && !VOID_TYPE_P (TREE_TYPE (init)))
2195 	    *arg_p = init;
2196 	}
2197     }
2198 
2199   /* If this is a variable sized type, we must remember the size.  */
2200   maybe_with_size_expr (arg_p);
2201 
2202   /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c.  */
2203   /* Make sure arguments have the same location as the function call
2204      itself.  */
2205   protected_set_expr_location (*arg_p, call_location);
2206 
2207   /* There is a sequence point before a function call.  Side effects in
2208      the argument list must occur before the actual call. So, when
2209      gimplifying arguments, force gimplify_expr to use an internal
2210      post queue which is then appended to the end of PRE_P.  */
2211   return gimplify_expr (arg_p, pre_p, NULL, test, fb);
2212 }
2213 
2214 /* Don't fold STMT inside ORT_TARGET, because it can break code by adding decl
2215    references that weren't in the source.  We'll do it during omplower pass
2216    instead.  */
2217 
2218 static bool
maybe_fold_stmt(gimple_stmt_iterator * gsi)2219 maybe_fold_stmt (gimple_stmt_iterator *gsi)
2220 {
2221   struct gimplify_omp_ctx *ctx;
2222   for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
2223     if (ctx->region_type == ORT_TARGET)
2224       return false;
2225   return fold_stmt (gsi);
2226 }
2227 
2228 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
2229    WANT_VALUE is true if the result of the call is desired.  */
2230 
2231 static enum gimplify_status
gimplify_call_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)2232 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
2233 {
2234   tree fndecl, parms, p, fnptrtype;
2235   enum gimplify_status ret;
2236   int i, nargs;
2237   gimple call;
2238   bool builtin_va_start_p = false;
2239   location_t loc = EXPR_LOCATION (*expr_p);
2240 
2241   gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2242 
2243   /* For reliable diagnostics during inlining, it is necessary that
2244      every call_expr be annotated with file and line.  */
2245   if (! EXPR_HAS_LOCATION (*expr_p))
2246     SET_EXPR_LOCATION (*expr_p, input_location);
2247 
2248   /* This may be a call to a builtin function.
2249 
2250      Builtin function calls may be transformed into different
2251      (and more efficient) builtin function calls under certain
2252      circumstances.  Unfortunately, gimplification can muck things
2253      up enough that the builtin expanders are not aware that certain
2254      transformations are still valid.
2255 
2256      So we attempt transformation/gimplification of the call before
2257      we gimplify the CALL_EXPR.  At this time we do not manage to
2258      transform all calls in the same manner as the expanders do, but
2259      we do transform most of them.  */
2260   fndecl = get_callee_fndecl (*expr_p);
2261   if (fndecl
2262       && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
2263     switch (DECL_FUNCTION_CODE (fndecl))
2264       {
2265       case BUILT_IN_VA_START:
2266         {
2267 	  builtin_va_start_p = TRUE;
2268 	  if (call_expr_nargs (*expr_p) < 2)
2269 	    {
2270 	      error ("too few arguments to function %<va_start%>");
2271 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2272 	      return GS_OK;
2273 	    }
2274 
2275 	  if (fold_builtin_next_arg (*expr_p, true))
2276 	    {
2277 	      *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
2278 	      return GS_OK;
2279 	    }
2280 	  break;
2281 	}
2282       case BUILT_IN_LINE:
2283 	{
2284 	  expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2285 	  *expr_p = build_int_cst (TREE_TYPE (*expr_p), loc.line);
2286 	  return GS_OK;
2287 	}
2288       case BUILT_IN_FILE:
2289 	{
2290 	  expanded_location loc = expand_location (EXPR_LOCATION (*expr_p));
2291 	  *expr_p = build_string_literal (strlen (loc.file) + 1, loc.file);
2292 	  return GS_OK;
2293 	}
2294       case BUILT_IN_FUNCTION:
2295 	{
2296 	  const char *function;
2297 	  function = IDENTIFIER_POINTER (DECL_NAME (current_function_decl));
2298 	  *expr_p = build_string_literal (strlen (function) + 1, function);
2299 	  return GS_OK;
2300 	}
2301       default:
2302         ;
2303       }
2304   if (fndecl && DECL_BUILT_IN (fndecl))
2305     {
2306       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2307       if (new_tree && new_tree != *expr_p)
2308 	{
2309 	  /* There was a transformation of this call which computes the
2310 	     same value, but in a more efficient way.  Return and try
2311 	     again.  */
2312 	  *expr_p = new_tree;
2313 	  return GS_OK;
2314 	}
2315     }
2316 
2317   /* Remember the original function pointer type.  */
2318   fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
2319 
2320   /* There is a sequence point before the call, so any side effects in
2321      the calling expression must occur before the actual call.  Force
2322      gimplify_expr to use an internal post queue.  */
2323   ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
2324 		       is_gimple_call_addr, fb_rvalue);
2325 
2326   nargs = call_expr_nargs (*expr_p);
2327 
2328   /* Get argument types for verification.  */
2329   fndecl = get_callee_fndecl (*expr_p);
2330   parms = NULL_TREE;
2331   if (fndecl)
2332     parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
2333   else if (POINTER_TYPE_P (TREE_TYPE (CALL_EXPR_FN (*expr_p))))
2334     parms = TYPE_ARG_TYPES (TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (*expr_p))));
2335 
2336   if (fndecl && DECL_ARGUMENTS (fndecl))
2337     p = DECL_ARGUMENTS (fndecl);
2338   else if (parms)
2339     p = parms;
2340   else
2341     p = NULL_TREE;
2342   for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
2343     ;
2344 
2345   /* If the last argument is __builtin_va_arg_pack () and it is not
2346      passed as a named argument, decrease the number of CALL_EXPR
2347      arguments and set instead the CALL_EXPR_VA_ARG_PACK flag.  */
2348   if (!p
2349       && i < nargs
2350       && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
2351     {
2352       tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
2353       tree last_arg_fndecl = get_callee_fndecl (last_arg);
2354 
2355       if (last_arg_fndecl
2356 	  && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
2357 	  && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
2358 	  && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
2359 	{
2360 	  tree call = *expr_p;
2361 
2362 	  --nargs;
2363 	  *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
2364 					  CALL_EXPR_FN (call),
2365 					  nargs, CALL_EXPR_ARGP (call));
2366 
2367 	  /* Copy all CALL_EXPR flags, location and block, except
2368 	     CALL_EXPR_VA_ARG_PACK flag.  */
2369 	  CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
2370 	  CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
2371 	  CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
2372 	    = CALL_EXPR_RETURN_SLOT_OPT (call);
2373 	  CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
2374 	  SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
2375 
2376 	  /* Set CALL_EXPR_VA_ARG_PACK.  */
2377 	  CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
2378 	}
2379     }
2380 
2381   /* Finally, gimplify the function arguments.  */
2382   if (nargs > 0)
2383     {
2384       for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
2385            PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
2386            PUSH_ARGS_REVERSED ? i-- : i++)
2387         {
2388           enum gimplify_status t;
2389 
2390           /* Avoid gimplifying the second argument to va_start, which needs to
2391              be the plain PARM_DECL.  */
2392           if ((i != 1) || !builtin_va_start_p)
2393             {
2394               t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
2395 				EXPR_LOCATION (*expr_p));
2396 
2397               if (t == GS_ERROR)
2398                 ret = GS_ERROR;
2399             }
2400         }
2401     }
2402 
2403   /* Verify the function result.  */
2404   if (want_value && fndecl
2405       && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
2406     {
2407       error_at (loc, "using result of function returning %<void%>");
2408       ret = GS_ERROR;
2409     }
2410 
2411   /* Try this again in case gimplification exposed something.  */
2412   if (ret != GS_ERROR)
2413     {
2414       tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
2415 
2416       if (new_tree && new_tree != *expr_p)
2417 	{
2418 	  /* There was a transformation of this call which computes the
2419 	     same value, but in a more efficient way.  Return and try
2420 	     again.  */
2421 	  *expr_p = new_tree;
2422 	  return GS_OK;
2423 	}
2424     }
2425   else
2426     {
2427       *expr_p = error_mark_node;
2428       return GS_ERROR;
2429     }
2430 
2431   /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2432      decl.  This allows us to eliminate redundant or useless
2433      calls to "const" functions.  */
2434   if (TREE_CODE (*expr_p) == CALL_EXPR)
2435     {
2436       int flags = call_expr_flags (*expr_p);
2437       if (flags & (ECF_CONST | ECF_PURE)
2438 	  /* An infinite loop is considered a side effect.  */
2439 	  && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
2440 	TREE_SIDE_EFFECTS (*expr_p) = 0;
2441     }
2442 
2443   /* If the value is not needed by the caller, emit a new GIMPLE_CALL
2444      and clear *EXPR_P.  Otherwise, leave *EXPR_P in its gimplified
2445      form and delegate the creation of a GIMPLE_CALL to
2446      gimplify_modify_expr.  This is always possible because when
2447      WANT_VALUE is true, the caller wants the result of this call into
2448      a temporary, which means that we will emit an INIT_EXPR in
2449      internal_get_tmp_var which will then be handled by
2450      gimplify_modify_expr.  */
2451   if (!want_value)
2452     {
2453       /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
2454 	 have to do is replicate it as a GIMPLE_CALL tuple.  */
2455       gimple_stmt_iterator gsi;
2456       call = gimple_build_call_from_tree (*expr_p);
2457       gimple_call_set_fntype (call, TREE_TYPE (fnptrtype));
2458       notice_special_calls (call);
2459       gimplify_seq_add_stmt (pre_p, call);
2460       gsi = gsi_last (*pre_p);
2461       maybe_fold_stmt (&gsi);
2462       *expr_p = NULL_TREE;
2463     }
2464   else
2465     /* Remember the original function type.  */
2466     CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
2467 				     CALL_EXPR_FN (*expr_p));
2468 
2469   return ret;
2470 }
2471 
2472 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2473    rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2474 
2475    TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2476    condition is true or false, respectively.  If null, we should generate
2477    our own to skip over the evaluation of this specific expression.
2478 
2479    LOCUS is the source location of the COND_EXPR.
2480 
2481    This function is the tree equivalent of do_jump.
2482 
2483    shortcut_cond_r should only be called by shortcut_cond_expr.  */
2484 
2485 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p,location_t locus)2486 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
2487 		 location_t locus)
2488 {
2489   tree local_label = NULL_TREE;
2490   tree t, expr = NULL;
2491 
2492   /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2493      retain the shortcut semantics.  Just insert the gotos here;
2494      shortcut_cond_expr will append the real blocks later.  */
2495   if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2496     {
2497       location_t new_locus;
2498 
2499       /* Turn if (a && b) into
2500 
2501 	 if (a); else goto no;
2502 	 if (b) goto yes; else goto no;
2503 	 (no:) */
2504 
2505       if (false_label_p == NULL)
2506 	false_label_p = &local_label;
2507 
2508       /* Keep the original source location on the first 'if'.  */
2509       t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
2510       append_to_statement_list (t, &expr);
2511 
2512       /* Set the source location of the && on the second 'if'.  */
2513       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2514       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2515 			   new_locus);
2516       append_to_statement_list (t, &expr);
2517     }
2518   else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2519     {
2520       location_t new_locus;
2521 
2522       /* Turn if (a || b) into
2523 
2524 	 if (a) goto yes;
2525 	 if (b) goto yes; else goto no;
2526 	 (yes:) */
2527 
2528       if (true_label_p == NULL)
2529 	true_label_p = &local_label;
2530 
2531       /* Keep the original source location on the first 'if'.  */
2532       t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
2533       append_to_statement_list (t, &expr);
2534 
2535       /* Set the source location of the || on the second 'if'.  */
2536       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2537       t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
2538 			   new_locus);
2539       append_to_statement_list (t, &expr);
2540     }
2541   else if (TREE_CODE (pred) == COND_EXPR
2542 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
2543 	   && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
2544     {
2545       location_t new_locus;
2546 
2547       /* As long as we're messing with gotos, turn if (a ? b : c) into
2548 	 if (a)
2549 	   if (b) goto yes; else goto no;
2550 	 else
2551 	   if (c) goto yes; else goto no;
2552 
2553 	 Don't do this if one of the arms has void type, which can happen
2554 	 in C++ when the arm is throw.  */
2555 
2556       /* Keep the original source location on the first 'if'.  Set the source
2557 	 location of the ? on the second 'if'.  */
2558       new_locus = EXPR_HAS_LOCATION (pred) ? EXPR_LOCATION (pred) : locus;
2559       expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2560 		     shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2561 				      false_label_p, locus),
2562 		     shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2563 				      false_label_p, new_locus));
2564     }
2565   else
2566     {
2567       expr = build3 (COND_EXPR, void_type_node, pred,
2568 		     build_and_jump (true_label_p),
2569 		     build_and_jump (false_label_p));
2570       SET_EXPR_LOCATION (expr, locus);
2571     }
2572 
2573   if (local_label)
2574     {
2575       t = build1 (LABEL_EXPR, void_type_node, local_label);
2576       append_to_statement_list (t, &expr);
2577     }
2578 
2579   return expr;
2580 }
2581 
2582 /* Given a conditional expression EXPR with short-circuit boolean
2583    predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
2584    predicate apart into the equivalent sequence of conditionals.  */
2585 
2586 static tree
shortcut_cond_expr(tree expr)2587 shortcut_cond_expr (tree expr)
2588 {
2589   tree pred = TREE_OPERAND (expr, 0);
2590   tree then_ = TREE_OPERAND (expr, 1);
2591   tree else_ = TREE_OPERAND (expr, 2);
2592   tree true_label, false_label, end_label, t;
2593   tree *true_label_p;
2594   tree *false_label_p;
2595   bool emit_end, emit_false, jump_over_else;
2596   bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2597   bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2598 
2599   /* First do simple transformations.  */
2600   if (!else_se)
2601     {
2602       /* If there is no 'else', turn
2603 	   if (a && b) then c
2604 	 into
2605 	   if (a) if (b) then c.  */
2606       while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2607 	{
2608 	  /* Keep the original source location on the first 'if'.  */
2609 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2610 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2611 	  /* Set the source location of the && on the second 'if'.  */
2612 	  if (EXPR_HAS_LOCATION (pred))
2613 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2614 	  then_ = shortcut_cond_expr (expr);
2615 	  then_se = then_ && TREE_SIDE_EFFECTS (then_);
2616 	  pred = TREE_OPERAND (pred, 0);
2617 	  expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2618 	  SET_EXPR_LOCATION (expr, locus);
2619 	}
2620     }
2621 
2622   if (!then_se)
2623     {
2624       /* If there is no 'then', turn
2625 	   if (a || b); else d
2626 	 into
2627 	   if (a); else if (b); else d.  */
2628       while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2629 	{
2630 	  /* Keep the original source location on the first 'if'.  */
2631 	  location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
2632 	  TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2633 	  /* Set the source location of the || on the second 'if'.  */
2634 	  if (EXPR_HAS_LOCATION (pred))
2635 	    SET_EXPR_LOCATION (expr, EXPR_LOCATION (pred));
2636 	  else_ = shortcut_cond_expr (expr);
2637 	  else_se = else_ && TREE_SIDE_EFFECTS (else_);
2638 	  pred = TREE_OPERAND (pred, 0);
2639 	  expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2640 	  SET_EXPR_LOCATION (expr, locus);
2641 	}
2642     }
2643 
2644   /* If we're done, great.  */
2645   if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2646       && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2647     return expr;
2648 
2649   /* Otherwise we need to mess with gotos.  Change
2650        if (a) c; else d;
2651      to
2652        if (a); else goto no;
2653        c; goto end;
2654        no: d; end:
2655      and recursively gimplify the condition.  */
2656 
2657   true_label = false_label = end_label = NULL_TREE;
2658 
2659   /* If our arms just jump somewhere, hijack those labels so we don't
2660      generate jumps to jumps.  */
2661 
2662   if (then_
2663       && TREE_CODE (then_) == GOTO_EXPR
2664       && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2665     {
2666       true_label = GOTO_DESTINATION (then_);
2667       then_ = NULL;
2668       then_se = false;
2669     }
2670 
2671   if (else_
2672       && TREE_CODE (else_) == GOTO_EXPR
2673       && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2674     {
2675       false_label = GOTO_DESTINATION (else_);
2676       else_ = NULL;
2677       else_se = false;
2678     }
2679 
2680   /* If we aren't hijacking a label for the 'then' branch, it falls through.  */
2681   if (true_label)
2682     true_label_p = &true_label;
2683   else
2684     true_label_p = NULL;
2685 
2686   /* The 'else' branch also needs a label if it contains interesting code.  */
2687   if (false_label || else_se)
2688     false_label_p = &false_label;
2689   else
2690     false_label_p = NULL;
2691 
2692   /* If there was nothing else in our arms, just forward the label(s).  */
2693   if (!then_se && !else_se)
2694     return shortcut_cond_r (pred, true_label_p, false_label_p,
2695 			    EXPR_LOC_OR_LOC (expr, input_location));
2696 
2697   /* If our last subexpression already has a terminal label, reuse it.  */
2698   if (else_se)
2699     t = expr_last (else_);
2700   else if (then_se)
2701     t = expr_last (then_);
2702   else
2703     t = NULL;
2704   if (t && TREE_CODE (t) == LABEL_EXPR)
2705     end_label = LABEL_EXPR_LABEL (t);
2706 
2707   /* If we don't care about jumping to the 'else' branch, jump to the end
2708      if the condition is false.  */
2709   if (!false_label_p)
2710     false_label_p = &end_label;
2711 
2712   /* We only want to emit these labels if we aren't hijacking them.  */
2713   emit_end = (end_label == NULL_TREE);
2714   emit_false = (false_label == NULL_TREE);
2715 
2716   /* We only emit the jump over the else clause if we have to--if the
2717      then clause may fall through.  Otherwise we can wind up with a
2718      useless jump and a useless label at the end of gimplified code,
2719      which will cause us to think that this conditional as a whole
2720      falls through even if it doesn't.  If we then inline a function
2721      which ends with such a condition, that can cause us to issue an
2722      inappropriate warning about control reaching the end of a
2723      non-void function.  */
2724   jump_over_else = block_may_fallthru (then_);
2725 
2726   pred = shortcut_cond_r (pred, true_label_p, false_label_p,
2727 			  EXPR_LOC_OR_LOC (expr, input_location));
2728 
2729   expr = NULL;
2730   append_to_statement_list (pred, &expr);
2731 
2732   append_to_statement_list (then_, &expr);
2733   if (else_se)
2734     {
2735       if (jump_over_else)
2736 	{
2737 	  tree last = expr_last (expr);
2738 	  t = build_and_jump (&end_label);
2739 	  if (EXPR_HAS_LOCATION (last))
2740 	    SET_EXPR_LOCATION (t, EXPR_LOCATION (last));
2741 	  append_to_statement_list (t, &expr);
2742 	}
2743       if (emit_false)
2744 	{
2745 	  t = build1 (LABEL_EXPR, void_type_node, false_label);
2746 	  append_to_statement_list (t, &expr);
2747 	}
2748       append_to_statement_list (else_, &expr);
2749     }
2750   if (emit_end && end_label)
2751     {
2752       t = build1 (LABEL_EXPR, void_type_node, end_label);
2753       append_to_statement_list (t, &expr);
2754     }
2755 
2756   return expr;
2757 }
2758 
2759 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE.  */
2760 
2761 tree
gimple_boolify(tree expr)2762 gimple_boolify (tree expr)
2763 {
2764   tree type = TREE_TYPE (expr);
2765   location_t loc = EXPR_LOCATION (expr);
2766 
2767   if (TREE_CODE (expr) == NE_EXPR
2768       && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
2769       && integer_zerop (TREE_OPERAND (expr, 1)))
2770     {
2771       tree call = TREE_OPERAND (expr, 0);
2772       tree fn = get_callee_fndecl (call);
2773 
2774       /* For __builtin_expect ((long) (x), y) recurse into x as well
2775 	 if x is truth_value_p.  */
2776       if (fn
2777 	  && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
2778 	  && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
2779 	  && call_expr_nargs (call) == 2)
2780 	{
2781 	  tree arg = CALL_EXPR_ARG (call, 0);
2782 	  if (arg)
2783 	    {
2784 	      if (TREE_CODE (arg) == NOP_EXPR
2785 		  && TREE_TYPE (arg) == TREE_TYPE (call))
2786 		arg = TREE_OPERAND (arg, 0);
2787 	      if (truth_value_p (TREE_CODE (arg)))
2788 		{
2789 		  arg = gimple_boolify (arg);
2790 		  CALL_EXPR_ARG (call, 0)
2791 		    = fold_convert_loc (loc, TREE_TYPE (call), arg);
2792 		}
2793 	    }
2794 	}
2795     }
2796 
2797   switch (TREE_CODE (expr))
2798     {
2799     case TRUTH_AND_EXPR:
2800     case TRUTH_OR_EXPR:
2801     case TRUTH_XOR_EXPR:
2802     case TRUTH_ANDIF_EXPR:
2803     case TRUTH_ORIF_EXPR:
2804       /* Also boolify the arguments of truth exprs.  */
2805       TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2806       /* FALLTHRU */
2807 
2808     case TRUTH_NOT_EXPR:
2809       TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2810 
2811       /* These expressions always produce boolean results.  */
2812       if (TREE_CODE (type) != BOOLEAN_TYPE)
2813 	TREE_TYPE (expr) = boolean_type_node;
2814       return expr;
2815 
2816     case ANNOTATE_EXPR:
2817       if ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1))
2818 	  == annot_expr_ivdep_kind)
2819 	{
2820 	  TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2821 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2822 	    TREE_TYPE (expr) = boolean_type_node;
2823 	  return expr;
2824 	}
2825       /* FALLTHRU */
2826 
2827     default:
2828       if (COMPARISON_CLASS_P (expr))
2829 	{
2830 	  /* There expressions always prduce boolean results.  */
2831 	  if (TREE_CODE (type) != BOOLEAN_TYPE)
2832 	    TREE_TYPE (expr) = boolean_type_node;
2833 	  return expr;
2834 	}
2835       /* Other expressions that get here must have boolean values, but
2836 	 might need to be converted to the appropriate mode.  */
2837       if (TREE_CODE (type) == BOOLEAN_TYPE)
2838 	return expr;
2839       return fold_convert_loc (loc, boolean_type_node, expr);
2840     }
2841 }
2842 
2843 /* Given a conditional expression *EXPR_P without side effects, gimplify
2844    its operands.  New statements are inserted to PRE_P.  */
2845 
2846 static enum gimplify_status
gimplify_pure_cond_expr(tree * expr_p,gimple_seq * pre_p)2847 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
2848 {
2849   tree expr = *expr_p, cond;
2850   enum gimplify_status ret, tret;
2851   enum tree_code code;
2852 
2853   cond = gimple_boolify (COND_EXPR_COND (expr));
2854 
2855   /* We need to handle && and || specially, as their gimplification
2856      creates pure cond_expr, thus leading to an infinite cycle otherwise.  */
2857   code = TREE_CODE (cond);
2858   if (code == TRUTH_ANDIF_EXPR)
2859     TREE_SET_CODE (cond, TRUTH_AND_EXPR);
2860   else if (code == TRUTH_ORIF_EXPR)
2861     TREE_SET_CODE (cond, TRUTH_OR_EXPR);
2862   ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
2863   COND_EXPR_COND (*expr_p) = cond;
2864 
2865   tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
2866 				   is_gimple_val, fb_rvalue);
2867   ret = MIN (ret, tret);
2868   tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
2869 				   is_gimple_val, fb_rvalue);
2870 
2871   return MIN (ret, tret);
2872 }
2873 
2874 /* Return true if evaluating EXPR could trap.
2875    EXPR is GENERIC, while tree_could_trap_p can be called
2876    only on GIMPLE.  */
2877 
2878 static bool
generic_expr_could_trap_p(tree expr)2879 generic_expr_could_trap_p (tree expr)
2880 {
2881   unsigned i, n;
2882 
2883   if (!expr || is_gimple_val (expr))
2884     return false;
2885 
2886   if (!EXPR_P (expr) || tree_could_trap_p (expr))
2887     return true;
2888 
2889   n = TREE_OPERAND_LENGTH (expr);
2890   for (i = 0; i < n; i++)
2891     if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
2892       return true;
2893 
2894   return false;
2895 }
2896 
2897 /*  Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2898     into
2899 
2900     if (p)			if (p)
2901       t1 = a;			  a;
2902     else		or	else
2903       t1 = b;			  b;
2904     t1;
2905 
2906     The second form is used when *EXPR_P is of type void.
2907 
2908     PRE_P points to the list where side effects that must happen before
2909       *EXPR_P should be stored.  */
2910 
2911 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,gimple_seq * pre_p,fallback_t fallback)2912 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
2913 {
2914   tree expr = *expr_p;
2915   tree type = TREE_TYPE (expr);
2916   location_t loc = EXPR_LOCATION (expr);
2917   tree tmp, arm1, arm2;
2918   enum gimplify_status ret;
2919   tree label_true, label_false, label_cont;
2920   bool have_then_clause_p, have_else_clause_p;
2921   gimple gimple_cond;
2922   enum tree_code pred_code;
2923   gimple_seq seq = NULL;
2924 
2925   /* If this COND_EXPR has a value, copy the values into a temporary within
2926      the arms.  */
2927   if (!VOID_TYPE_P (type))
2928     {
2929       tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
2930       tree result;
2931 
2932       /* If either an rvalue is ok or we do not require an lvalue, create the
2933 	 temporary.  But we cannot do that if the type is addressable.  */
2934       if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
2935 	  && !TREE_ADDRESSABLE (type))
2936 	{
2937 	  if (gimplify_ctxp->allow_rhs_cond_expr
2938 	      /* If either branch has side effects or could trap, it can't be
2939 		 evaluated unconditionally.  */
2940 	      && !TREE_SIDE_EFFECTS (then_)
2941 	      && !generic_expr_could_trap_p (then_)
2942 	      && !TREE_SIDE_EFFECTS (else_)
2943 	      && !generic_expr_could_trap_p (else_))
2944 	    return gimplify_pure_cond_expr (expr_p, pre_p);
2945 
2946 	  tmp = create_tmp_var (type, "iftmp");
2947 	  result = tmp;
2948 	}
2949 
2950       /* Otherwise, only create and copy references to the values.  */
2951       else
2952 	{
2953 	  type = build_pointer_type (type);
2954 
2955 	  if (!VOID_TYPE_P (TREE_TYPE (then_)))
2956 	    then_ = build_fold_addr_expr_loc (loc, then_);
2957 
2958 	  if (!VOID_TYPE_P (TREE_TYPE (else_)))
2959 	    else_ = build_fold_addr_expr_loc (loc, else_);
2960 
2961 	  expr
2962 	    = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
2963 
2964 	  tmp = create_tmp_var (type, "iftmp");
2965 	  result = build_simple_mem_ref_loc (loc, tmp);
2966 	}
2967 
2968       /* Build the new then clause, `tmp = then_;'.  But don't build the
2969 	 assignment if the value is void; in C++ it can be if it's a throw.  */
2970       if (!VOID_TYPE_P (TREE_TYPE (then_)))
2971 	TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
2972 
2973       /* Similarly, build the new else clause, `tmp = else_;'.  */
2974       if (!VOID_TYPE_P (TREE_TYPE (else_)))
2975 	TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
2976 
2977       TREE_TYPE (expr) = void_type_node;
2978       recalculate_side_effects (expr);
2979 
2980       /* Move the COND_EXPR to the prequeue.  */
2981       gimplify_stmt (&expr, pre_p);
2982 
2983       *expr_p = result;
2984       return GS_ALL_DONE;
2985     }
2986 
2987   /* Remove any COMPOUND_EXPR so the following cases will be caught.  */
2988   STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
2989   if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
2990     gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
2991 
2992   /* Make sure the condition has BOOLEAN_TYPE.  */
2993   TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2994 
2995   /* Break apart && and || conditions.  */
2996   if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2997       || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2998     {
2999       expr = shortcut_cond_expr (expr);
3000 
3001       if (expr != *expr_p)
3002 	{
3003 	  *expr_p = expr;
3004 
3005 	  /* We can't rely on gimplify_expr to re-gimplify the expanded
3006 	     form properly, as cleanups might cause the target labels to be
3007 	     wrapped in a TRY_FINALLY_EXPR.  To prevent that, we need to
3008 	     set up a conditional context.  */
3009 	  gimple_push_condition ();
3010 	  gimplify_stmt (expr_p, &seq);
3011 	  gimple_pop_condition (pre_p);
3012 	  gimple_seq_add_seq (pre_p, seq);
3013 
3014 	  return GS_ALL_DONE;
3015 	}
3016     }
3017 
3018   /* Now do the normal gimplification.  */
3019 
3020   /* Gimplify condition.  */
3021   ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
3022 		       fb_rvalue);
3023   if (ret == GS_ERROR)
3024     return GS_ERROR;
3025   gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
3026 
3027   gimple_push_condition ();
3028 
3029   have_then_clause_p = have_else_clause_p = false;
3030   if (TREE_OPERAND (expr, 1) != NULL
3031       && TREE_CODE (TREE_OPERAND (expr, 1)) == GOTO_EXPR
3032       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 1))) == LABEL_DECL
3033       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 1)))
3034 	  == current_function_decl)
3035       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3036 	 have different locations, otherwise we end up with incorrect
3037 	 location information on the branches.  */
3038       && (optimize
3039 	  || !EXPR_HAS_LOCATION (expr)
3040 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 1))
3041 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 1))))
3042     {
3043       label_true = GOTO_DESTINATION (TREE_OPERAND (expr, 1));
3044       have_then_clause_p = true;
3045     }
3046   else
3047     label_true = create_artificial_label (UNKNOWN_LOCATION);
3048   if (TREE_OPERAND (expr, 2) != NULL
3049       && TREE_CODE (TREE_OPERAND (expr, 2)) == GOTO_EXPR
3050       && TREE_CODE (GOTO_DESTINATION (TREE_OPERAND (expr, 2))) == LABEL_DECL
3051       && (DECL_CONTEXT (GOTO_DESTINATION (TREE_OPERAND (expr, 2)))
3052 	  == current_function_decl)
3053       /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
3054 	 have different locations, otherwise we end up with incorrect
3055 	 location information on the branches.  */
3056       && (optimize
3057 	  || !EXPR_HAS_LOCATION (expr)
3058 	  || !EXPR_HAS_LOCATION (TREE_OPERAND (expr, 2))
3059 	  || EXPR_LOCATION (expr) == EXPR_LOCATION (TREE_OPERAND (expr, 2))))
3060     {
3061       label_false = GOTO_DESTINATION (TREE_OPERAND (expr, 2));
3062       have_else_clause_p = true;
3063     }
3064   else
3065     label_false = create_artificial_label (UNKNOWN_LOCATION);
3066 
3067   gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
3068 				 &arm2);
3069 
3070   gimple_cond = gimple_build_cond (pred_code, arm1, arm2, label_true,
3071                                    label_false);
3072 
3073   gimplify_seq_add_stmt (&seq, gimple_cond);
3074   label_cont = NULL_TREE;
3075   if (!have_then_clause_p)
3076     {
3077       /* For if (...) {} else { code; } put label_true after
3078 	 the else block.  */
3079       if (TREE_OPERAND (expr, 1) == NULL_TREE
3080 	  && !have_else_clause_p
3081 	  && TREE_OPERAND (expr, 2) != NULL_TREE)
3082 	label_cont = label_true;
3083       else
3084 	{
3085 	  gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
3086 	  have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
3087 	  /* For if (...) { code; } else {} or
3088 	     if (...) { code; } else goto label; or
3089 	     if (...) { code; return; } else { ... }
3090 	     label_cont isn't needed.  */
3091 	  if (!have_else_clause_p
3092 	      && TREE_OPERAND (expr, 2) != NULL_TREE
3093 	      && gimple_seq_may_fallthru (seq))
3094 	    {
3095 	      gimple g;
3096 	      label_cont = create_artificial_label (UNKNOWN_LOCATION);
3097 
3098 	      g = gimple_build_goto (label_cont);
3099 
3100 	      /* GIMPLE_COND's are very low level; they have embedded
3101 		 gotos.  This particular embedded goto should not be marked
3102 		 with the location of the original COND_EXPR, as it would
3103 		 correspond to the COND_EXPR's condition, not the ELSE or the
3104 		 THEN arms.  To avoid marking it with the wrong location, flag
3105 		 it as "no location".  */
3106 	      gimple_set_do_not_emit_location (g);
3107 
3108 	      gimplify_seq_add_stmt (&seq, g);
3109 	    }
3110 	}
3111     }
3112   if (!have_else_clause_p)
3113     {
3114       gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
3115       have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
3116     }
3117   if (label_cont)
3118     gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
3119 
3120   gimple_pop_condition (pre_p);
3121   gimple_seq_add_seq (pre_p, seq);
3122 
3123   if (ret == GS_ERROR)
3124     ; /* Do nothing.  */
3125   else if (have_then_clause_p || have_else_clause_p)
3126     ret = GS_ALL_DONE;
3127   else
3128     {
3129       /* Both arms are empty; replace the COND_EXPR with its predicate.  */
3130       expr = TREE_OPERAND (expr, 0);
3131       gimplify_stmt (&expr, pre_p);
3132     }
3133 
3134   *expr_p = NULL;
3135   return ret;
3136 }
3137 
3138 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
3139    to be marked addressable.
3140 
3141    We cannot rely on such an expression being directly markable if a temporary
3142    has been created by the gimplification.  In this case, we create another
3143    temporary and initialize it with a copy, which will become a store after we
3144    mark it addressable.  This can happen if the front-end passed us something
3145    that it could not mark addressable yet, like a Fortran pass-by-reference
3146    parameter (int) floatvar.  */
3147 
3148 static void
prepare_gimple_addressable(tree * expr_p,gimple_seq * seq_p)3149 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
3150 {
3151   while (handled_component_p (*expr_p))
3152     expr_p = &TREE_OPERAND (*expr_p, 0);
3153   if (is_gimple_reg (*expr_p))
3154     *expr_p = get_initialized_tmp_var (*expr_p, seq_p, NULL);
3155 }
3156 
3157 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3158    a call to __builtin_memcpy.  */
3159 
3160 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)3161 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
3162     				gimple_seq *seq_p)
3163 {
3164   tree t, to, to_ptr, from, from_ptr;
3165   gimple gs;
3166   location_t loc = EXPR_LOCATION (*expr_p);
3167 
3168   to = TREE_OPERAND (*expr_p, 0);
3169   from = TREE_OPERAND (*expr_p, 1);
3170 
3171   /* Mark the RHS addressable.  Beware that it may not be possible to do so
3172      directly if a temporary has been created by the gimplification.  */
3173   prepare_gimple_addressable (&from, seq_p);
3174 
3175   mark_addressable (from);
3176   from_ptr = build_fold_addr_expr_loc (loc, from);
3177   gimplify_arg (&from_ptr, seq_p, loc);
3178 
3179   mark_addressable (to);
3180   to_ptr = build_fold_addr_expr_loc (loc, to);
3181   gimplify_arg (&to_ptr, seq_p, loc);
3182 
3183   t = builtin_decl_implicit (BUILT_IN_MEMCPY);
3184 
3185   gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
3186 
3187   if (want_value)
3188     {
3189       /* tmp = memcpy() */
3190       t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3191       gimple_call_set_lhs (gs, t);
3192       gimplify_seq_add_stmt (seq_p, gs);
3193 
3194       *expr_p = build_simple_mem_ref (t);
3195       return GS_ALL_DONE;
3196     }
3197 
3198   gimplify_seq_add_stmt (seq_p, gs);
3199   *expr_p = NULL;
3200   return GS_ALL_DONE;
3201 }
3202 
3203 /* A subroutine of gimplify_modify_expr.  Replace a MODIFY_EXPR with
3204    a call to __builtin_memset.  In this case we know that the RHS is
3205    a CONSTRUCTOR with an empty element list.  */
3206 
3207 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)3208 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
3209     				gimple_seq *seq_p)
3210 {
3211   tree t, from, to, to_ptr;
3212   gimple gs;
3213   location_t loc = EXPR_LOCATION (*expr_p);
3214 
3215   /* Assert our assumptions, to abort instead of producing wrong code
3216      silently if they are not met.  Beware that the RHS CONSTRUCTOR might
3217      not be immediately exposed.  */
3218   from = TREE_OPERAND (*expr_p, 1);
3219   if (TREE_CODE (from) == WITH_SIZE_EXPR)
3220     from = TREE_OPERAND (from, 0);
3221 
3222   gcc_assert (TREE_CODE (from) == CONSTRUCTOR
3223 	      && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
3224 
3225   /* Now proceed.  */
3226   to = TREE_OPERAND (*expr_p, 0);
3227 
3228   to_ptr = build_fold_addr_expr_loc (loc, to);
3229   gimplify_arg (&to_ptr, seq_p, loc);
3230   t = builtin_decl_implicit (BUILT_IN_MEMSET);
3231 
3232   gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
3233 
3234   if (want_value)
3235     {
3236       /* tmp = memset() */
3237       t = create_tmp_var (TREE_TYPE (to_ptr), NULL);
3238       gimple_call_set_lhs (gs, t);
3239       gimplify_seq_add_stmt (seq_p, gs);
3240 
3241       *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
3242       return GS_ALL_DONE;
3243     }
3244 
3245   gimplify_seq_add_stmt (seq_p, gs);
3246   *expr_p = NULL;
3247   return GS_ALL_DONE;
3248 }
3249 
3250 /* A subroutine of gimplify_init_ctor_preeval.  Called via walk_tree,
3251    determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
3252    assignment.  Return non-null if we detect a potential overlap.  */
3253 
3254 struct gimplify_init_ctor_preeval_data
3255 {
3256   /* The base decl of the lhs object.  May be NULL, in which case we
3257      have to assume the lhs is indirect.  */
3258   tree lhs_base_decl;
3259 
3260   /* The alias set of the lhs object.  */
3261   alias_set_type lhs_alias_set;
3262 };
3263 
3264 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)3265 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
3266 {
3267   struct gimplify_init_ctor_preeval_data *data
3268     = (struct gimplify_init_ctor_preeval_data *) xdata;
3269   tree t = *tp;
3270 
3271   /* If we find the base object, obviously we have overlap.  */
3272   if (data->lhs_base_decl == t)
3273     return t;
3274 
3275   /* If the constructor component is indirect, determine if we have a
3276      potential overlap with the lhs.  The only bits of information we
3277      have to go on at this point are addressability and alias sets.  */
3278   if ((INDIRECT_REF_P (t)
3279        || TREE_CODE (t) == MEM_REF)
3280       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3281       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
3282     return t;
3283 
3284   /* If the constructor component is a call, determine if it can hide a
3285      potential overlap with the lhs through an INDIRECT_REF like above.
3286      ??? Ugh - this is completely broken.  In fact this whole analysis
3287      doesn't look conservative.  */
3288   if (TREE_CODE (t) == CALL_EXPR)
3289     {
3290       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
3291 
3292       for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
3293 	if (POINTER_TYPE_P (TREE_VALUE (type))
3294 	    && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
3295 	    && alias_sets_conflict_p (data->lhs_alias_set,
3296 				      get_alias_set
3297 				        (TREE_TYPE (TREE_VALUE (type)))))
3298 	  return t;
3299     }
3300 
3301   if (IS_TYPE_OR_DECL_P (t))
3302     *walk_subtrees = 0;
3303   return NULL;
3304 }
3305 
3306 /* A subroutine of gimplify_init_constructor.  Pre-evaluate EXPR,
3307    force values that overlap with the lhs (as described by *DATA)
3308    into temporaries.  */
3309 
3310 static void
gimplify_init_ctor_preeval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,struct gimplify_init_ctor_preeval_data * data)3311 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3312 			    struct gimplify_init_ctor_preeval_data *data)
3313 {
3314   enum gimplify_status one;
3315 
3316   /* If the value is constant, then there's nothing to pre-evaluate.  */
3317   if (TREE_CONSTANT (*expr_p))
3318     {
3319       /* Ensure it does not have side effects, it might contain a reference to
3320 	 the object we're initializing.  */
3321       gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
3322       return;
3323     }
3324 
3325   /* If the type has non-trivial constructors, we can't pre-evaluate.  */
3326   if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
3327     return;
3328 
3329   /* Recurse for nested constructors.  */
3330   if (TREE_CODE (*expr_p) == CONSTRUCTOR)
3331     {
3332       unsigned HOST_WIDE_INT ix;
3333       constructor_elt *ce;
3334       vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
3335 
3336       FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
3337 	gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
3338 
3339       return;
3340     }
3341 
3342   /* If this is a variable sized type, we must remember the size.  */
3343   maybe_with_size_expr (expr_p);
3344 
3345   /* Gimplify the constructor element to something appropriate for the rhs
3346      of a MODIFY_EXPR.  Given that we know the LHS is an aggregate, we know
3347      the gimplifier will consider this a store to memory.  Doing this
3348      gimplification now means that we won't have to deal with complicated
3349      language-specific trees, nor trees like SAVE_EXPR that can induce
3350      exponential search behavior.  */
3351   one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
3352   if (one == GS_ERROR)
3353     {
3354       *expr_p = NULL;
3355       return;
3356     }
3357 
3358   /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
3359      with the lhs, since "a = { .x=a }" doesn't make sense.  This will
3360      always be true for all scalars, since is_gimple_mem_rhs insists on a
3361      temporary variable for them.  */
3362   if (DECL_P (*expr_p))
3363     return;
3364 
3365   /* If this is of variable size, we have no choice but to assume it doesn't
3366      overlap since we can't make a temporary for it.  */
3367   if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
3368     return;
3369 
3370   /* Otherwise, we must search for overlap ...  */
3371   if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
3372     return;
3373 
3374   /* ... and if found, force the value into a temporary.  */
3375   *expr_p = get_formal_tmp_var (*expr_p, pre_p);
3376 }
3377 
3378 /* A subroutine of gimplify_init_ctor_eval.  Create a loop for
3379    a RANGE_EXPR in a CONSTRUCTOR for an array.
3380 
3381       var = lower;
3382     loop_entry:
3383       object[var] = value;
3384       if (var == upper)
3385 	goto loop_exit;
3386       var = var + 1;
3387       goto loop_entry;
3388     loop_exit:
3389 
3390    We increment var _after_ the loop exit check because we might otherwise
3391    fail if upper == TYPE_MAX_VALUE (type for upper).
3392 
3393    Note that we never have to deal with SAVE_EXPRs here, because this has
3394    already been taken care of for us, in gimplify_init_ctor_preeval().  */
3395 
3396 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
3397 				     gimple_seq *, bool);
3398 
3399 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,gimple_seq * pre_p,bool cleared)3400 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
3401 			       tree value, tree array_elt_type,
3402 			       gimple_seq *pre_p, bool cleared)
3403 {
3404   tree loop_entry_label, loop_exit_label, fall_thru_label;
3405   tree var, var_type, cref, tmp;
3406 
3407   loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
3408   loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
3409   fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
3410 
3411   /* Create and initialize the index variable.  */
3412   var_type = TREE_TYPE (upper);
3413   var = create_tmp_var (var_type, NULL);
3414   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
3415 
3416   /* Add the loop entry label.  */
3417   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
3418 
3419   /* Build the reference.  */
3420   cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3421 		 var, NULL_TREE, NULL_TREE);
3422 
3423   /* If we are a constructor, just call gimplify_init_ctor_eval to do
3424      the store.  Otherwise just assign value to the reference.  */
3425 
3426   if (TREE_CODE (value) == CONSTRUCTOR)
3427     /* NB we might have to call ourself recursively through
3428        gimplify_init_ctor_eval if the value is a constructor.  */
3429     gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3430 			     pre_p, cleared);
3431   else
3432     gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
3433 
3434   /* We exit the loop when the index var is equal to the upper bound.  */
3435   gimplify_seq_add_stmt (pre_p,
3436 			 gimple_build_cond (EQ_EXPR, var, upper,
3437 					    loop_exit_label, fall_thru_label));
3438 
3439   gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
3440 
3441   /* Otherwise, increment the index var...  */
3442   tmp = build2 (PLUS_EXPR, var_type, var,
3443 		fold_convert (var_type, integer_one_node));
3444   gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
3445 
3446   /* ...and jump back to the loop entry.  */
3447   gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
3448 
3449   /* Add the loop exit label.  */
3450   gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
3451 }
3452 
3453 /* Return true if FDECL is accessing a field that is zero sized.  */
3454 
3455 static bool
zero_sized_field_decl(const_tree fdecl)3456 zero_sized_field_decl (const_tree fdecl)
3457 {
3458   if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
3459       && integer_zerop (DECL_SIZE (fdecl)))
3460     return true;
3461   return false;
3462 }
3463 
3464 /* Return true if TYPE is zero sized.  */
3465 
3466 static bool
zero_sized_type(const_tree type)3467 zero_sized_type (const_tree type)
3468 {
3469   if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
3470       && integer_zerop (TYPE_SIZE (type)))
3471     return true;
3472   return false;
3473 }
3474 
3475 /* A subroutine of gimplify_init_constructor.  Generate individual
3476    MODIFY_EXPRs for a CONSTRUCTOR.  OBJECT is the LHS against which the
3477    assignments should happen.  ELTS is the CONSTRUCTOR_ELTS of the
3478    CONSTRUCTOR.  CLEARED is true if the entire LHS object has been
3479    zeroed first.  */
3480 
3481 static void
gimplify_init_ctor_eval(tree object,vec<constructor_elt,va_gc> * elts,gimple_seq * pre_p,bool cleared)3482 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
3483 			 gimple_seq *pre_p, bool cleared)
3484 {
3485   tree array_elt_type = NULL;
3486   unsigned HOST_WIDE_INT ix;
3487   tree purpose, value;
3488 
3489   if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
3490     array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
3491 
3492   FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
3493     {
3494       tree cref;
3495 
3496       /* NULL values are created above for gimplification errors.  */
3497       if (value == NULL)
3498 	continue;
3499 
3500       if (cleared && initializer_zerop (value))
3501 	continue;
3502 
3503       /* ??? Here's to hoping the front end fills in all of the indices,
3504 	 so we don't have to figure out what's missing ourselves.  */
3505       gcc_assert (purpose);
3506 
3507       /* Skip zero-sized fields, unless value has side-effects.  This can
3508 	 happen with calls to functions returning a zero-sized type, which
3509 	 we shouldn't discard.  As a number of downstream passes don't
3510 	 expect sets of zero-sized fields, we rely on the gimplification of
3511 	 the MODIFY_EXPR we make below to drop the assignment statement.  */
3512       if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
3513 	continue;
3514 
3515       /* If we have a RANGE_EXPR, we have to build a loop to assign the
3516 	 whole range.  */
3517       if (TREE_CODE (purpose) == RANGE_EXPR)
3518 	{
3519 	  tree lower = TREE_OPERAND (purpose, 0);
3520 	  tree upper = TREE_OPERAND (purpose, 1);
3521 
3522 	  /* If the lower bound is equal to upper, just treat it as if
3523 	     upper was the index.  */
3524 	  if (simple_cst_equal (lower, upper))
3525 	    purpose = upper;
3526 	  else
3527 	    {
3528 	      gimplify_init_ctor_eval_range (object, lower, upper, value,
3529 					     array_elt_type, pre_p, cleared);
3530 	      continue;
3531 	    }
3532 	}
3533 
3534       if (array_elt_type)
3535 	{
3536 	  /* Do not use bitsizetype for ARRAY_REF indices.  */
3537 	  if (TYPE_DOMAIN (TREE_TYPE (object)))
3538 	    purpose
3539 	      = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
3540 			      purpose);
3541 	  cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
3542 			 purpose, NULL_TREE, NULL_TREE);
3543 	}
3544       else
3545 	{
3546 	  gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
3547 	  cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
3548 			 unshare_expr (object), purpose, NULL_TREE);
3549 	}
3550 
3551       if (TREE_CODE (value) == CONSTRUCTOR
3552 	  && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
3553 	gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
3554 				 pre_p, cleared);
3555       else
3556 	{
3557 	  tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
3558 	  gimplify_and_add (init, pre_p);
3559 	  ggc_free (init);
3560 	}
3561     }
3562 }
3563 
3564 /* Return the appropriate RHS predicate for this LHS.  */
3565 
3566 gimple_predicate
rhs_predicate_for(tree lhs)3567 rhs_predicate_for (tree lhs)
3568 {
3569   if (is_gimple_reg (lhs))
3570     return is_gimple_reg_rhs_or_call;
3571   else
3572     return is_gimple_mem_rhs_or_call;
3573 }
3574 
3575 /* Gimplify a C99 compound literal expression.  This just means adding
3576    the DECL_EXPR before the current statement and using its anonymous
3577    decl instead.  */
3578 
3579 static enum gimplify_status
gimplify_compound_literal_expr(tree * expr_p,gimple_seq * pre_p,bool (* gimple_test_f)(tree),fallback_t fallback)3580 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
3581 				bool (*gimple_test_f) (tree),
3582 				fallback_t fallback)
3583 {
3584   tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
3585   tree decl = DECL_EXPR_DECL (decl_s);
3586   tree init = DECL_INITIAL (decl);
3587   /* Mark the decl as addressable if the compound literal
3588      expression is addressable now, otherwise it is marked too late
3589      after we gimplify the initialization expression.  */
3590   if (TREE_ADDRESSABLE (*expr_p))
3591     TREE_ADDRESSABLE (decl) = 1;
3592   /* Otherwise, if we don't need an lvalue and have a literal directly
3593      substitute it.  Check if it matches the gimple predicate, as
3594      otherwise we'd generate a new temporary, and we can as well just
3595      use the decl we already have.  */
3596   else if (!TREE_ADDRESSABLE (decl)
3597 	   && init
3598 	   && (fallback & fb_lvalue) == 0
3599 	   && gimple_test_f (init))
3600     {
3601       *expr_p = init;
3602       return GS_OK;
3603     }
3604 
3605   /* Preliminarily mark non-addressed complex variables as eligible
3606      for promotion to gimple registers.  We'll transform their uses
3607      as we find them.  */
3608   if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
3609        || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
3610       && !TREE_THIS_VOLATILE (decl)
3611       && !needs_to_live_in_memory (decl))
3612     DECL_GIMPLE_REG_P (decl) = 1;
3613 
3614   /* If the decl is not addressable, then it is being used in some
3615      expression or on the right hand side of a statement, and it can
3616      be put into a readonly data section.  */
3617   if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
3618     TREE_READONLY (decl) = 1;
3619 
3620   /* This decl isn't mentioned in the enclosing block, so add it to the
3621      list of temps.  FIXME it seems a bit of a kludge to say that
3622      anonymous artificial vars aren't pushed, but everything else is.  */
3623   if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
3624     gimple_add_tmp_var (decl);
3625 
3626   gimplify_and_add (decl_s, pre_p);
3627   *expr_p = decl;
3628   return GS_OK;
3629 }
3630 
3631 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
3632    return a new CONSTRUCTOR if something changed.  */
3633 
3634 static tree
optimize_compound_literals_in_ctor(tree orig_ctor)3635 optimize_compound_literals_in_ctor (tree orig_ctor)
3636 {
3637   tree ctor = orig_ctor;
3638   vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
3639   unsigned int idx, num = vec_safe_length (elts);
3640 
3641   for (idx = 0; idx < num; idx++)
3642     {
3643       tree value = (*elts)[idx].value;
3644       tree newval = value;
3645       if (TREE_CODE (value) == CONSTRUCTOR)
3646 	newval = optimize_compound_literals_in_ctor (value);
3647       else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
3648 	{
3649 	  tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
3650 	  tree decl = DECL_EXPR_DECL (decl_s);
3651 	  tree init = DECL_INITIAL (decl);
3652 
3653 	  if (!TREE_ADDRESSABLE (value)
3654 	      && !TREE_ADDRESSABLE (decl)
3655 	      && init
3656 	      && TREE_CODE (init) == CONSTRUCTOR)
3657 	    newval = optimize_compound_literals_in_ctor (init);
3658 	}
3659       if (newval == value)
3660 	continue;
3661 
3662       if (ctor == orig_ctor)
3663 	{
3664 	  ctor = copy_node (orig_ctor);
3665 	  CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
3666 	  elts = CONSTRUCTOR_ELTS (ctor);
3667 	}
3668       (*elts)[idx].value = newval;
3669     }
3670   return ctor;
3671 }
3672 
3673 /* A subroutine of gimplify_modify_expr.  Break out elements of a
3674    CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
3675 
3676    Note that we still need to clear any elements that don't have explicit
3677    initializers, so if not all elements are initialized we keep the
3678    original MODIFY_EXPR, we just remove all of the constructor elements.
3679 
3680    If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
3681    GS_ERROR if we would have to create a temporary when gimplifying
3682    this constructor.  Otherwise, return GS_OK.
3683 
3684    If NOTIFY_TEMP_CREATION is false, just do the gimplification.  */
3685 
3686 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,bool notify_temp_creation)3687 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3688 			   bool want_value, bool notify_temp_creation)
3689 {
3690   tree object, ctor, type;
3691   enum gimplify_status ret;
3692   vec<constructor_elt, va_gc> *elts;
3693 
3694   gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
3695 
3696   if (!notify_temp_creation)
3697     {
3698       ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3699 			   is_gimple_lvalue, fb_lvalue);
3700       if (ret == GS_ERROR)
3701 	return ret;
3702     }
3703 
3704   object = TREE_OPERAND (*expr_p, 0);
3705   ctor = TREE_OPERAND (*expr_p, 1) =
3706     optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
3707   type = TREE_TYPE (ctor);
3708   elts = CONSTRUCTOR_ELTS (ctor);
3709   ret = GS_ALL_DONE;
3710 
3711   switch (TREE_CODE (type))
3712     {
3713     case RECORD_TYPE:
3714     case UNION_TYPE:
3715     case QUAL_UNION_TYPE:
3716     case ARRAY_TYPE:
3717       {
3718 	struct gimplify_init_ctor_preeval_data preeval_data;
3719 	HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
3720 	bool cleared, complete_p, valid_const_initializer;
3721 
3722 	/* Aggregate types must lower constructors to initialization of
3723 	   individual elements.  The exception is that a CONSTRUCTOR node
3724 	   with no elements indicates zero-initialization of the whole.  */
3725 	if (vec_safe_is_empty (elts))
3726 	  {
3727 	    if (notify_temp_creation)
3728 	      return GS_OK;
3729 	    break;
3730 	  }
3731 
3732 	/* Fetch information about the constructor to direct later processing.
3733 	   We might want to make static versions of it in various cases, and
3734 	   can only do so if it known to be a valid constant initializer.  */
3735 	valid_const_initializer
3736 	  = categorize_ctor_elements (ctor, &num_nonzero_elements,
3737 				      &num_ctor_elements, &complete_p);
3738 
3739 	/* If a const aggregate variable is being initialized, then it
3740 	   should never be a lose to promote the variable to be static.  */
3741 	if (valid_const_initializer
3742 	    && num_nonzero_elements > 1
3743 	    && TREE_READONLY (object)
3744 	    && TREE_CODE (object) == VAR_DECL
3745 	    && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object)))
3746 	  {
3747 	    if (notify_temp_creation)
3748 	      return GS_ERROR;
3749 	    DECL_INITIAL (object) = ctor;
3750 	    TREE_STATIC (object) = 1;
3751 	    if (!DECL_NAME (object))
3752 	      DECL_NAME (object) = create_tmp_var_name ("C");
3753 	    walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
3754 
3755 	    /* ??? C++ doesn't automatically append a .<number> to the
3756 	       assembler name, and even when it does, it looks at FE private
3757 	       data structures to figure out what that number should be,
3758 	       which are not set for this variable.  I suppose this is
3759 	       important for local statics for inline functions, which aren't
3760 	       "local" in the object file sense.  So in order to get a unique
3761 	       TU-local symbol, we must invoke the lhd version now.  */
3762 	    lhd_set_decl_assembler_name (object);
3763 
3764 	    *expr_p = NULL_TREE;
3765 	    break;
3766 	  }
3767 
3768 	/* If there are "lots" of initialized elements, even discounting
3769 	   those that are not address constants (and thus *must* be
3770 	   computed at runtime), then partition the constructor into
3771 	   constant and non-constant parts.  Block copy the constant
3772 	   parts in, then generate code for the non-constant parts.  */
3773 	/* TODO.  There's code in cp/typeck.c to do this.  */
3774 
3775 	if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
3776 	  /* store_constructor will ignore the clearing of variable-sized
3777 	     objects.  Initializers for such objects must explicitly set
3778 	     every field that needs to be set.  */
3779 	  cleared = false;
3780 	else if (!complete_p && !CONSTRUCTOR_NO_CLEARING (ctor))
3781 	  /* If the constructor isn't complete, clear the whole object
3782 	     beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
3783 
3784 	     ??? This ought not to be needed.  For any element not present
3785 	     in the initializer, we should simply set them to zero.  Except
3786 	     we'd need to *find* the elements that are not present, and that
3787 	     requires trickery to avoid quadratic compile-time behavior in
3788 	     large cases or excessive memory use in small cases.  */
3789 	  cleared = true;
3790 	else if (num_ctor_elements - num_nonzero_elements
3791 		 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
3792 		 && num_nonzero_elements < num_ctor_elements / 4)
3793 	  /* If there are "lots" of zeros, it's more efficient to clear
3794 	     the memory and then set the nonzero elements.  */
3795 	  cleared = true;
3796 	else
3797 	  cleared = false;
3798 
3799 	/* If there are "lots" of initialized elements, and all of them
3800 	   are valid address constants, then the entire initializer can
3801 	   be dropped to memory, and then memcpy'd out.  Don't do this
3802 	   for sparse arrays, though, as it's more efficient to follow
3803 	   the standard CONSTRUCTOR behavior of memset followed by
3804 	   individual element initialization.  Also don't do this for small
3805 	   all-zero initializers (which aren't big enough to merit
3806 	   clearing), and don't try to make bitwise copies of
3807 	   TREE_ADDRESSABLE types.  */
3808 	if (valid_const_initializer
3809 	    && !(cleared || num_nonzero_elements == 0)
3810 	    && !TREE_ADDRESSABLE (type))
3811 	  {
3812 	    HOST_WIDE_INT size = int_size_in_bytes (type);
3813 	    unsigned int align;
3814 
3815 	    /* ??? We can still get unbounded array types, at least
3816 	       from the C++ front end.  This seems wrong, but attempt
3817 	       to work around it for now.  */
3818 	    if (size < 0)
3819 	      {
3820 		size = int_size_in_bytes (TREE_TYPE (object));
3821 		if (size >= 0)
3822 		  TREE_TYPE (ctor) = type = TREE_TYPE (object);
3823 	      }
3824 
3825 	    /* Find the maximum alignment we can assume for the object.  */
3826 	    /* ??? Make use of DECL_OFFSET_ALIGN.  */
3827 	    if (DECL_P (object))
3828 	      align = DECL_ALIGN (object);
3829 	    else
3830 	      align = TYPE_ALIGN (type);
3831 
3832 	    /* Do a block move either if the size is so small as to make
3833 	       each individual move a sub-unit move on average, or if it
3834 	       is so large as to make individual moves inefficient.  */
3835 	    if (size > 0
3836 		&& num_nonzero_elements > 1
3837 		&& (size < num_nonzero_elements
3838 		    || !can_move_by_pieces (size, align)))
3839 	      {
3840 		if (notify_temp_creation)
3841 		  return GS_ERROR;
3842 
3843 		walk_tree (&ctor, force_labels_r, NULL, NULL);
3844 		ctor = tree_output_constant_def (ctor);
3845 		if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
3846 		  ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
3847 		TREE_OPERAND (*expr_p, 1) = ctor;
3848 
3849 		/* This is no longer an assignment of a CONSTRUCTOR, but
3850 		   we still may have processing to do on the LHS.  So
3851 		   pretend we didn't do anything here to let that happen.  */
3852 		return GS_UNHANDLED;
3853 	      }
3854 	  }
3855 
3856 	/* If the target is volatile, we have non-zero elements and more than
3857 	   one field to assign, initialize the target from a temporary.  */
3858 	if (TREE_THIS_VOLATILE (object)
3859 	    && !TREE_ADDRESSABLE (type)
3860 	    && num_nonzero_elements > 0
3861 	    && vec_safe_length (elts) > 1)
3862 	  {
3863 	    tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type), NULL);
3864 	    TREE_OPERAND (*expr_p, 0) = temp;
3865 	    *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
3866 			      *expr_p,
3867 			      build2 (MODIFY_EXPR, void_type_node,
3868 				      object, temp));
3869 	    return GS_OK;
3870 	  }
3871 
3872 	if (notify_temp_creation)
3873 	  return GS_OK;
3874 
3875 	/* If there are nonzero elements and if needed, pre-evaluate to capture
3876 	   elements overlapping with the lhs into temporaries.  We must do this
3877 	   before clearing to fetch the values before they are zeroed-out.  */
3878 	if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
3879 	  {
3880 	    preeval_data.lhs_base_decl = get_base_address (object);
3881 	    if (!DECL_P (preeval_data.lhs_base_decl))
3882 	      preeval_data.lhs_base_decl = NULL;
3883 	    preeval_data.lhs_alias_set = get_alias_set (object);
3884 
3885 	    gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3886 					pre_p, post_p, &preeval_data);
3887 	  }
3888 
3889 	if (cleared)
3890 	  {
3891 	    /* Zap the CONSTRUCTOR element list, which simplifies this case.
3892 	       Note that we still have to gimplify, in order to handle the
3893 	       case of variable sized types.  Avoid shared tree structures.  */
3894 	    CONSTRUCTOR_ELTS (ctor) = NULL;
3895 	    TREE_SIDE_EFFECTS (ctor) = 0;
3896 	    object = unshare_expr (object);
3897 	    gimplify_stmt (expr_p, pre_p);
3898 	  }
3899 
3900 	/* If we have not block cleared the object, or if there are nonzero
3901 	   elements in the constructor, add assignments to the individual
3902 	   scalar fields of the object.  */
3903 	if (!cleared || num_nonzero_elements > 0)
3904 	  gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3905 
3906 	*expr_p = NULL_TREE;
3907       }
3908       break;
3909 
3910     case COMPLEX_TYPE:
3911       {
3912 	tree r, i;
3913 
3914 	if (notify_temp_creation)
3915 	  return GS_OK;
3916 
3917 	/* Extract the real and imaginary parts out of the ctor.  */
3918 	gcc_assert (elts->length () == 2);
3919 	r = (*elts)[0].value;
3920 	i = (*elts)[1].value;
3921 	if (r == NULL || i == NULL)
3922 	  {
3923 	    tree zero = build_zero_cst (TREE_TYPE (type));
3924 	    if (r == NULL)
3925 	      r = zero;
3926 	    if (i == NULL)
3927 	      i = zero;
3928 	  }
3929 
3930 	/* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3931 	   represent creation of a complex value.  */
3932 	if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3933 	  {
3934 	    ctor = build_complex (type, r, i);
3935 	    TREE_OPERAND (*expr_p, 1) = ctor;
3936 	  }
3937 	else
3938 	  {
3939 	    ctor = build2 (COMPLEX_EXPR, type, r, i);
3940 	    TREE_OPERAND (*expr_p, 1) = ctor;
3941 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
3942 				 pre_p,
3943 				 post_p,
3944 				 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3945 				 fb_rvalue);
3946 	  }
3947       }
3948       break;
3949 
3950     case VECTOR_TYPE:
3951       {
3952 	unsigned HOST_WIDE_INT ix;
3953 	constructor_elt *ce;
3954 
3955 	if (notify_temp_creation)
3956 	  return GS_OK;
3957 
3958 	/* Go ahead and simplify constant constructors to VECTOR_CST.  */
3959 	if (TREE_CONSTANT (ctor))
3960 	  {
3961 	    bool constant_p = true;
3962 	    tree value;
3963 
3964 	    /* Even when ctor is constant, it might contain non-*_CST
3965 	       elements, such as addresses or trapping values like
3966 	       1.0/0.0 - 1.0/0.0.  Such expressions don't belong
3967 	       in VECTOR_CST nodes.  */
3968 	    FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3969 	      if (!CONSTANT_CLASS_P (value))
3970 		{
3971 		  constant_p = false;
3972 		  break;
3973 		}
3974 
3975 	    if (constant_p)
3976 	      {
3977 		TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3978 		break;
3979 	      }
3980 
3981 	    /* Don't reduce an initializer constant even if we can't
3982 	       make a VECTOR_CST.  It won't do anything for us, and it'll
3983 	       prevent us from representing it as a single constant.  */
3984 	    if (initializer_constant_valid_p (ctor, type))
3985 	      break;
3986 
3987 	    TREE_CONSTANT (ctor) = 0;
3988 	  }
3989 
3990 	/* Vector types use CONSTRUCTOR all the way through gimple
3991 	  compilation as a general initializer.  */
3992 	FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
3993 	  {
3994 	    enum gimplify_status tret;
3995 	    tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
3996 				  fb_rvalue);
3997 	    if (tret == GS_ERROR)
3998 	      ret = GS_ERROR;
3999 	  }
4000 	if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
4001 	  TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
4002       }
4003       break;
4004 
4005     default:
4006       /* So how did we get a CONSTRUCTOR for a scalar type?  */
4007       gcc_unreachable ();
4008     }
4009 
4010   if (ret == GS_ERROR)
4011     return GS_ERROR;
4012   else if (want_value)
4013     {
4014       *expr_p = object;
4015       return GS_OK;
4016     }
4017   else
4018     {
4019       /* If we have gimplified both sides of the initializer but have
4020 	 not emitted an assignment, do so now.  */
4021       if (*expr_p)
4022 	{
4023 	  tree lhs = TREE_OPERAND (*expr_p, 0);
4024 	  tree rhs = TREE_OPERAND (*expr_p, 1);
4025 	  gimple init = gimple_build_assign (lhs, rhs);
4026 	  gimplify_seq_add_stmt (pre_p, init);
4027 	  *expr_p = NULL;
4028 	}
4029 
4030       return GS_ALL_DONE;
4031     }
4032 }
4033 
4034 /* Given a pointer value OP0, return a simplified version of an
4035    indirection through OP0, or NULL_TREE if no simplification is
4036    possible.  This may only be applied to a rhs of an expression.
4037    Note that the resulting type may be different from the type pointed
4038    to in the sense that it is still compatible from the langhooks
4039    point of view. */
4040 
4041 static tree
gimple_fold_indirect_ref_rhs(tree t)4042 gimple_fold_indirect_ref_rhs (tree t)
4043 {
4044   return gimple_fold_indirect_ref (t);
4045 }
4046 
4047 /* Subroutine of gimplify_modify_expr to do simplifications of
4048    MODIFY_EXPRs based on the code of the RHS.  We loop for as long as
4049    something changes.  */
4050 
4051 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)4052 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
4053 			  gimple_seq *pre_p, gimple_seq *post_p,
4054 			  bool want_value)
4055 {
4056   enum gimplify_status ret = GS_UNHANDLED;
4057   bool changed;
4058 
4059   do
4060     {
4061       changed = false;
4062       switch (TREE_CODE (*from_p))
4063 	{
4064 	case VAR_DECL:
4065 	  /* If we're assigning from a read-only variable initialized with
4066 	     a constructor, do the direct assignment from the constructor,
4067 	     but only if neither source nor target are volatile since this
4068 	     latter assignment might end up being done on a per-field basis.  */
4069 	  if (DECL_INITIAL (*from_p)
4070 	      && TREE_READONLY (*from_p)
4071 	      && !TREE_THIS_VOLATILE (*from_p)
4072 	      && !TREE_THIS_VOLATILE (*to_p)
4073 	      && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
4074 	    {
4075 	      tree old_from = *from_p;
4076 	      enum gimplify_status subret;
4077 
4078 	      /* Move the constructor into the RHS.  */
4079 	      *from_p = unshare_expr (DECL_INITIAL (*from_p));
4080 
4081 	      /* Let's see if gimplify_init_constructor will need to put
4082 		 it in memory.  */
4083 	      subret = gimplify_init_constructor (expr_p, NULL, NULL,
4084 						  false, true);
4085 	      if (subret == GS_ERROR)
4086 		{
4087 		  /* If so, revert the change.  */
4088 		  *from_p = old_from;
4089 		}
4090 	      else
4091 		{
4092 		  ret = GS_OK;
4093 		  changed = true;
4094 		}
4095 	    }
4096 	  break;
4097 	case INDIRECT_REF:
4098 	  {
4099 	    /* If we have code like
4100 
4101 	     *(const A*)(A*)&x
4102 
4103 	     where the type of "x" is a (possibly cv-qualified variant
4104 	     of "A"), treat the entire expression as identical to "x".
4105 	     This kind of code arises in C++ when an object is bound
4106 	     to a const reference, and if "x" is a TARGET_EXPR we want
4107 	     to take advantage of the optimization below.  */
4108 	    bool volatile_p = TREE_THIS_VOLATILE (*from_p);
4109 	    tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
4110 	    if (t)
4111 	      {
4112 		if (TREE_THIS_VOLATILE (t) != volatile_p)
4113 		  {
4114 		    if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4115 		      t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
4116 						    build_fold_addr_expr (t));
4117 		    if (REFERENCE_CLASS_P (t))
4118 		      TREE_THIS_VOLATILE (t) = volatile_p;
4119 		  }
4120 		*from_p = t;
4121 		ret = GS_OK;
4122 		changed = true;
4123 	      }
4124 	    break;
4125 	  }
4126 
4127 	case TARGET_EXPR:
4128 	  {
4129 	    /* If we are initializing something from a TARGET_EXPR, strip the
4130 	       TARGET_EXPR and initialize it directly, if possible.  This can't
4131 	       be done if the initializer is void, since that implies that the
4132 	       temporary is set in some non-trivial way.
4133 
4134 	       ??? What about code that pulls out the temp and uses it
4135 	       elsewhere? I think that such code never uses the TARGET_EXPR as
4136 	       an initializer.  If I'm wrong, we'll die because the temp won't
4137 	       have any RTL.  In that case, I guess we'll need to replace
4138 	       references somehow.  */
4139 	    tree init = TARGET_EXPR_INITIAL (*from_p);
4140 
4141 	    if (init
4142 		&& !VOID_TYPE_P (TREE_TYPE (init)))
4143 	      {
4144 		*from_p = init;
4145 		ret = GS_OK;
4146 		changed = true;
4147 	      }
4148 	  }
4149 	  break;
4150 
4151 	case COMPOUND_EXPR:
4152 	  /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
4153 	     caught.  */
4154 	  gimplify_compound_expr (from_p, pre_p, true);
4155 	  ret = GS_OK;
4156 	  changed = true;
4157 	  break;
4158 
4159 	case CONSTRUCTOR:
4160 	  /* If we already made some changes, let the front end have a
4161 	     crack at this before we break it down.  */
4162 	  if (ret != GS_UNHANDLED)
4163 	    break;
4164 	  /* If we're initializing from a CONSTRUCTOR, break this into
4165 	     individual MODIFY_EXPRs.  */
4166 	  return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
4167 					    false);
4168 
4169 	case COND_EXPR:
4170 	  /* If we're assigning to a non-register type, push the assignment
4171 	     down into the branches.  This is mandatory for ADDRESSABLE types,
4172 	     since we cannot generate temporaries for such, but it saves a
4173 	     copy in other cases as well.  */
4174 	  if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
4175 	    {
4176 	      /* This code should mirror the code in gimplify_cond_expr. */
4177 	      enum tree_code code = TREE_CODE (*expr_p);
4178 	      tree cond = *from_p;
4179 	      tree result = *to_p;
4180 
4181 	      ret = gimplify_expr (&result, pre_p, post_p,
4182 				   is_gimple_lvalue, fb_lvalue);
4183 	      if (ret != GS_ERROR)
4184 		ret = GS_OK;
4185 
4186 	      if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
4187 		TREE_OPERAND (cond, 1)
4188 		  = build2 (code, void_type_node, result,
4189 			    TREE_OPERAND (cond, 1));
4190 	      if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
4191 		TREE_OPERAND (cond, 2)
4192 		  = build2 (code, void_type_node, unshare_expr (result),
4193 			    TREE_OPERAND (cond, 2));
4194 
4195 	      TREE_TYPE (cond) = void_type_node;
4196 	      recalculate_side_effects (cond);
4197 
4198 	      if (want_value)
4199 		{
4200 		  gimplify_and_add (cond, pre_p);
4201 		  *expr_p = unshare_expr (result);
4202 		}
4203 	      else
4204 		*expr_p = cond;
4205 	      return ret;
4206 	    }
4207 	  break;
4208 
4209 	case CALL_EXPR:
4210 	  /* For calls that return in memory, give *to_p as the CALL_EXPR's
4211 	     return slot so that we don't generate a temporary.  */
4212 	  if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
4213 	      && aggregate_value_p (*from_p, *from_p))
4214 	    {
4215 	      bool use_target;
4216 
4217 	      if (!(rhs_predicate_for (*to_p))(*from_p))
4218 		/* If we need a temporary, *to_p isn't accurate.  */
4219 		use_target = false;
4220 	      /* It's OK to use the return slot directly unless it's an NRV. */
4221 	      else if (TREE_CODE (*to_p) == RESULT_DECL
4222 		       && DECL_NAME (*to_p) == NULL_TREE
4223 		       && needs_to_live_in_memory (*to_p))
4224 		use_target = true;
4225 	      else if (is_gimple_reg_type (TREE_TYPE (*to_p))
4226 		       || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
4227 		/* Don't force regs into memory.  */
4228 		use_target = false;
4229 	      else if (TREE_CODE (*expr_p) == INIT_EXPR)
4230 		/* It's OK to use the target directly if it's being
4231 		   initialized. */
4232 		use_target = true;
4233 	      else if (variably_modified_type_p (TREE_TYPE (*to_p), NULL_TREE))
4234 		/* Always use the target and thus RSO for variable-sized types.
4235 		   GIMPLE cannot deal with a variable-sized assignment
4236 		   embedded in a call statement.  */
4237 		use_target = true;
4238 	      else if (TREE_CODE (*to_p) != SSA_NAME
4239 		      && (!is_gimple_variable (*to_p)
4240 			  || needs_to_live_in_memory (*to_p)))
4241 		/* Don't use the original target if it's already addressable;
4242 		   if its address escapes, and the called function uses the
4243 		   NRV optimization, a conforming program could see *to_p
4244 		   change before the called function returns; see c++/19317.
4245 		   When optimizing, the return_slot pass marks more functions
4246 		   as safe after we have escape info.  */
4247 		use_target = false;
4248 	      else
4249 		use_target = true;
4250 
4251 	      if (use_target)
4252 		{
4253 		  CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
4254 		  mark_addressable (*to_p);
4255 		}
4256 	    }
4257 	  break;
4258 
4259 	case WITH_SIZE_EXPR:
4260 	  /* Likewise for calls that return an aggregate of non-constant size,
4261 	     since we would not be able to generate a temporary at all.  */
4262 	  if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
4263 	    {
4264 	      *from_p = TREE_OPERAND (*from_p, 0);
4265 	      /* We don't change ret in this case because the
4266 		 WITH_SIZE_EXPR might have been added in
4267 		 gimplify_modify_expr, so returning GS_OK would lead to an
4268 		 infinite loop.  */
4269 	      changed = true;
4270 	    }
4271 	  break;
4272 
4273 	  /* If we're initializing from a container, push the initialization
4274 	     inside it.  */
4275 	case CLEANUP_POINT_EXPR:
4276 	case BIND_EXPR:
4277 	case STATEMENT_LIST:
4278 	  {
4279 	    tree wrap = *from_p;
4280 	    tree t;
4281 
4282 	    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
4283 				 fb_lvalue);
4284 	    if (ret != GS_ERROR)
4285 	      ret = GS_OK;
4286 
4287 	    t = voidify_wrapper_expr (wrap, *expr_p);
4288 	    gcc_assert (t == *expr_p);
4289 
4290 	    if (want_value)
4291 	      {
4292 		gimplify_and_add (wrap, pre_p);
4293 		*expr_p = unshare_expr (*to_p);
4294 	      }
4295 	    else
4296 	      *expr_p = wrap;
4297 	    return GS_OK;
4298 	  }
4299 
4300 	case COMPOUND_LITERAL_EXPR:
4301 	  {
4302 	    tree complit = TREE_OPERAND (*expr_p, 1);
4303 	    tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
4304 	    tree decl = DECL_EXPR_DECL (decl_s);
4305 	    tree init = DECL_INITIAL (decl);
4306 
4307 	    /* struct T x = (struct T) { 0, 1, 2 } can be optimized
4308 	       into struct T x = { 0, 1, 2 } if the address of the
4309 	       compound literal has never been taken.  */
4310 	    if (!TREE_ADDRESSABLE (complit)
4311 		&& !TREE_ADDRESSABLE (decl)
4312 		&& init)
4313 	      {
4314 		*expr_p = copy_node (*expr_p);
4315 		TREE_OPERAND (*expr_p, 1) = init;
4316 		return GS_OK;
4317 	      }
4318 	  }
4319 
4320 	default:
4321 	  break;
4322 	}
4323     }
4324   while (changed);
4325 
4326   return ret;
4327 }
4328 
4329 
4330 /* Return true if T looks like a valid GIMPLE statement.  */
4331 
4332 static bool
is_gimple_stmt(tree t)4333 is_gimple_stmt (tree t)
4334 {
4335   const enum tree_code code = TREE_CODE (t);
4336 
4337   switch (code)
4338     {
4339     case NOP_EXPR:
4340       /* The only valid NOP_EXPR is the empty statement.  */
4341       return IS_EMPTY_STMT (t);
4342 
4343     case BIND_EXPR:
4344     case COND_EXPR:
4345       /* These are only valid if they're void.  */
4346       return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
4347 
4348     case SWITCH_EXPR:
4349     case GOTO_EXPR:
4350     case RETURN_EXPR:
4351     case LABEL_EXPR:
4352     case CASE_LABEL_EXPR:
4353     case TRY_CATCH_EXPR:
4354     case TRY_FINALLY_EXPR:
4355     case EH_FILTER_EXPR:
4356     case CATCH_EXPR:
4357     case ASM_EXPR:
4358     case STATEMENT_LIST:
4359     case OMP_PARALLEL:
4360     case OMP_FOR:
4361     case OMP_SIMD:
4362     case CILK_SIMD:
4363     case OMP_DISTRIBUTE:
4364     case OMP_SECTIONS:
4365     case OMP_SECTION:
4366     case OMP_SINGLE:
4367     case OMP_MASTER:
4368     case OMP_TASKGROUP:
4369     case OMP_ORDERED:
4370     case OMP_CRITICAL:
4371     case OMP_TASK:
4372       /* These are always void.  */
4373       return true;
4374 
4375     case CALL_EXPR:
4376     case MODIFY_EXPR:
4377     case PREDICT_EXPR:
4378       /* These are valid regardless of their type.  */
4379       return true;
4380 
4381     default:
4382       return false;
4383     }
4384 }
4385 
4386 
4387 /* Promote partial stores to COMPLEX variables to total stores.  *EXPR_P is
4388    a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
4389    DECL_GIMPLE_REG_P set.
4390 
4391    IMPORTANT NOTE: This promotion is performed by introducing a load of the
4392    other, unmodified part of the complex object just before the total store.
4393    As a consequence, if the object is still uninitialized, an undefined value
4394    will be loaded into a register, which may result in a spurious exception
4395    if the register is floating-point and the value happens to be a signaling
4396    NaN for example.  Then the fully-fledged complex operations lowering pass
4397    followed by a DCE pass are necessary in order to fix things up.  */
4398 
4399 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,gimple_seq * pre_p,bool want_value)4400 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
4401                                    bool want_value)
4402 {
4403   enum tree_code code, ocode;
4404   tree lhs, rhs, new_rhs, other, realpart, imagpart;
4405 
4406   lhs = TREE_OPERAND (*expr_p, 0);
4407   rhs = TREE_OPERAND (*expr_p, 1);
4408   code = TREE_CODE (lhs);
4409   lhs = TREE_OPERAND (lhs, 0);
4410 
4411   ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
4412   other = build1 (ocode, TREE_TYPE (rhs), lhs);
4413   TREE_NO_WARNING (other) = 1;
4414   other = get_formal_tmp_var (other, pre_p);
4415 
4416   realpart = code == REALPART_EXPR ? rhs : other;
4417   imagpart = code == REALPART_EXPR ? other : rhs;
4418 
4419   if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
4420     new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
4421   else
4422     new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
4423 
4424   gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
4425   *expr_p = (want_value) ? rhs : NULL_TREE;
4426 
4427   return GS_ALL_DONE;
4428 }
4429 
4430 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
4431 
4432       modify_expr
4433 	      : varname '=' rhs
4434 	      | '*' ID '=' rhs
4435 
4436     PRE_P points to the list where side effects that must happen before
4437 	*EXPR_P should be stored.
4438 
4439     POST_P points to the list where side effects that must happen after
4440 	*EXPR_P should be stored.
4441 
4442     WANT_VALUE is nonzero iff we want to use the value of this expression
4443 	in another expression.  */
4444 
4445 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)4446 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4447 		      bool want_value)
4448 {
4449   tree *from_p = &TREE_OPERAND (*expr_p, 1);
4450   tree *to_p = &TREE_OPERAND (*expr_p, 0);
4451   enum gimplify_status ret = GS_UNHANDLED;
4452   gimple assign;
4453   location_t loc = EXPR_LOCATION (*expr_p);
4454   gimple_stmt_iterator gsi;
4455 
4456   gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
4457 	      || TREE_CODE (*expr_p) == INIT_EXPR);
4458 
4459   /* Trying to simplify a clobber using normal logic doesn't work,
4460      so handle it here.  */
4461   if (TREE_CLOBBER_P (*from_p))
4462     {
4463       ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4464       if (ret == GS_ERROR)
4465 	return ret;
4466       gcc_assert (!want_value
4467 		  && (TREE_CODE (*to_p) == VAR_DECL
4468 		      || TREE_CODE (*to_p) == MEM_REF));
4469       gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
4470       *expr_p = NULL;
4471       return GS_ALL_DONE;
4472     }
4473 
4474   /* Insert pointer conversions required by the middle-end that are not
4475      required by the frontend.  This fixes middle-end type checking for
4476      for example gcc.dg/redecl-6.c.  */
4477   if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
4478     {
4479       STRIP_USELESS_TYPE_CONVERSION (*from_p);
4480       if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
4481 	*from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
4482     }
4483 
4484   /* See if any simplifications can be done based on what the RHS is.  */
4485   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4486 				  want_value);
4487   if (ret != GS_UNHANDLED)
4488     return ret;
4489 
4490   /* For zero sized types only gimplify the left hand side and right hand
4491      side as statements and throw away the assignment.  Do this after
4492      gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
4493      types properly.  */
4494   if (zero_sized_type (TREE_TYPE (*from_p)) && !want_value)
4495     {
4496       gimplify_stmt (from_p, pre_p);
4497       gimplify_stmt (to_p, pre_p);
4498       *expr_p = NULL_TREE;
4499       return GS_ALL_DONE;
4500     }
4501 
4502   /* If the value being copied is of variable width, compute the length
4503      of the copy into a WITH_SIZE_EXPR.   Note that we need to do this
4504      before gimplifying any of the operands so that we can resolve any
4505      PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
4506      the size of the expression to be copied, not of the destination, so
4507      that is what we must do here.  */
4508   maybe_with_size_expr (from_p);
4509 
4510   ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
4511   if (ret == GS_ERROR)
4512     return ret;
4513 
4514   /* As a special case, we have to temporarily allow for assignments
4515      with a CALL_EXPR on the RHS.  Since in GIMPLE a function call is
4516      a toplevel statement, when gimplifying the GENERIC expression
4517      MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
4518      GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
4519 
4520      Instead, we need to create the tuple GIMPLE_CALL <a, foo>.  To
4521      prevent gimplify_expr from trying to create a new temporary for
4522      foo's LHS, we tell it that it should only gimplify until it
4523      reaches the CALL_EXPR.  On return from gimplify_expr, the newly
4524      created GIMPLE_CALL <foo> will be the last statement in *PRE_P
4525      and all we need to do here is set 'a' to be its LHS.  */
4526   ret = gimplify_expr (from_p, pre_p, post_p, rhs_predicate_for (*to_p),
4527 		       fb_rvalue);
4528   if (ret == GS_ERROR)
4529     return ret;
4530 
4531   /* Now see if the above changed *from_p to something we handle specially.  */
4532   ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
4533 				  want_value);
4534   if (ret != GS_UNHANDLED)
4535     return ret;
4536 
4537   /* If we've got a variable sized assignment between two lvalues (i.e. does
4538      not involve a call), then we can make things a bit more straightforward
4539      by converting the assignment to memcpy or memset.  */
4540   if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
4541     {
4542       tree from = TREE_OPERAND (*from_p, 0);
4543       tree size = TREE_OPERAND (*from_p, 1);
4544 
4545       if (TREE_CODE (from) == CONSTRUCTOR)
4546 	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
4547 
4548       if (is_gimple_addressable (from))
4549 	{
4550 	  *from_p = from;
4551 	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
4552 	      					 pre_p);
4553 	}
4554     }
4555 
4556   /* Transform partial stores to non-addressable complex variables into
4557      total stores.  This allows us to use real instead of virtual operands
4558      for these variables, which improves optimization.  */
4559   if ((TREE_CODE (*to_p) == REALPART_EXPR
4560        || TREE_CODE (*to_p) == IMAGPART_EXPR)
4561       && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
4562     return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
4563 
4564   /* Try to alleviate the effects of the gimplification creating artificial
4565      temporaries (see for example is_gimple_reg_rhs) on the debug info.  */
4566   if (!gimplify_ctxp->into_ssa
4567       && TREE_CODE (*from_p) == VAR_DECL
4568       && DECL_IGNORED_P (*from_p)
4569       && DECL_P (*to_p)
4570       && !DECL_IGNORED_P (*to_p))
4571     {
4572       if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
4573 	DECL_NAME (*from_p)
4574 	  = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
4575       DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
4576       SET_DECL_DEBUG_EXPR (*from_p, *to_p);
4577    }
4578 
4579   if (want_value && TREE_THIS_VOLATILE (*to_p))
4580     *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
4581 
4582   if (TREE_CODE (*from_p) == CALL_EXPR)
4583     {
4584       /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
4585 	 instead of a GIMPLE_ASSIGN.  */
4586       tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
4587       CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
4588       STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
4589       tree fndecl = get_callee_fndecl (*from_p);
4590       if (fndecl
4591 	  && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
4592 	  && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
4593 	  && call_expr_nargs (*from_p) == 3)
4594 	assign = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
4595 					     CALL_EXPR_ARG (*from_p, 0),
4596 					     CALL_EXPR_ARG (*from_p, 1),
4597 					     CALL_EXPR_ARG (*from_p, 2));
4598       else
4599 	{
4600 	  assign = gimple_build_call_from_tree (*from_p);
4601 	  gimple_call_set_fntype (assign, TREE_TYPE (fnptrtype));
4602 	}
4603       notice_special_calls (assign);
4604       if (!gimple_call_noreturn_p (assign))
4605 	gimple_call_set_lhs (assign, *to_p);
4606     }
4607   else
4608     {
4609       assign = gimple_build_assign (*to_p, *from_p);
4610       gimple_set_location (assign, EXPR_LOCATION (*expr_p));
4611     }
4612 
4613   if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
4614     {
4615       /* We should have got an SSA name from the start.  */
4616       gcc_assert (TREE_CODE (*to_p) == SSA_NAME);
4617     }
4618 
4619   gimplify_seq_add_stmt (pre_p, assign);
4620   gsi = gsi_last (*pre_p);
4621   maybe_fold_stmt (&gsi);
4622 
4623   if (want_value)
4624     {
4625       *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
4626       return GS_OK;
4627     }
4628   else
4629     *expr_p = NULL;
4630 
4631   return GS_ALL_DONE;
4632 }
4633 
4634 /* Gimplify a comparison between two variable-sized objects.  Do this
4635    with a call to BUILT_IN_MEMCMP.  */
4636 
4637 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)4638 gimplify_variable_sized_compare (tree *expr_p)
4639 {
4640   location_t loc = EXPR_LOCATION (*expr_p);
4641   tree op0 = TREE_OPERAND (*expr_p, 0);
4642   tree op1 = TREE_OPERAND (*expr_p, 1);
4643   tree t, arg, dest, src, expr;
4644 
4645   arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
4646   arg = unshare_expr (arg);
4647   arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
4648   src = build_fold_addr_expr_loc (loc, op1);
4649   dest = build_fold_addr_expr_loc (loc, op0);
4650   t = builtin_decl_implicit (BUILT_IN_MEMCMP);
4651   t = build_call_expr_loc (loc, t, 3, dest, src, arg);
4652 
4653   expr
4654     = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
4655   SET_EXPR_LOCATION (expr, loc);
4656   *expr_p = expr;
4657 
4658   return GS_OK;
4659 }
4660 
4661 /* Gimplify a comparison between two aggregate objects of integral scalar
4662    mode as a comparison between the bitwise equivalent scalar values.  */
4663 
4664 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)4665 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
4666 {
4667   location_t loc = EXPR_LOCATION (*expr_p);
4668   tree op0 = TREE_OPERAND (*expr_p, 0);
4669   tree op1 = TREE_OPERAND (*expr_p, 1);
4670 
4671   tree type = TREE_TYPE (op0);
4672   tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
4673 
4674   op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
4675   op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
4676 
4677   *expr_p
4678     = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
4679 
4680   return GS_OK;
4681 }
4682 
4683 /* Gimplify an expression sequence.  This function gimplifies each
4684    expression and rewrites the original expression with the last
4685    expression of the sequence in GIMPLE form.
4686 
4687    PRE_P points to the list where the side effects for all the
4688        expressions in the sequence will be emitted.
4689 
4690    WANT_VALUE is true when the result of the last COMPOUND_EXPR is used.  */
4691 
4692 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)4693 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
4694 {
4695   tree t = *expr_p;
4696 
4697   do
4698     {
4699       tree *sub_p = &TREE_OPERAND (t, 0);
4700 
4701       if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
4702 	gimplify_compound_expr (sub_p, pre_p, false);
4703       else
4704 	gimplify_stmt (sub_p, pre_p);
4705 
4706       t = TREE_OPERAND (t, 1);
4707     }
4708   while (TREE_CODE (t) == COMPOUND_EXPR);
4709 
4710   *expr_p = t;
4711   if (want_value)
4712     return GS_OK;
4713   else
4714     {
4715       gimplify_stmt (expr_p, pre_p);
4716       return GS_ALL_DONE;
4717     }
4718 }
4719 
4720 /* Gimplify a SAVE_EXPR node.  EXPR_P points to the expression to
4721    gimplify.  After gimplification, EXPR_P will point to a new temporary
4722    that holds the original value of the SAVE_EXPR node.
4723 
4724    PRE_P points to the list where side effects that must happen before
4725    *EXPR_P should be stored.  */
4726 
4727 static enum gimplify_status
gimplify_save_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)4728 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4729 {
4730   enum gimplify_status ret = GS_ALL_DONE;
4731   tree val;
4732 
4733   gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
4734   val = TREE_OPERAND (*expr_p, 0);
4735 
4736   /* If the SAVE_EXPR has not been resolved, then evaluate it once.  */
4737   if (!SAVE_EXPR_RESOLVED_P (*expr_p))
4738     {
4739       /* The operand may be a void-valued expression such as SAVE_EXPRs
4740 	 generated by the Java frontend for class initialization.  It is
4741 	 being executed only for its side-effects.  */
4742       if (TREE_TYPE (val) == void_type_node)
4743 	{
4744 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4745 			       is_gimple_stmt, fb_none);
4746 	  val = NULL;
4747 	}
4748       else
4749 	val = get_initialized_tmp_var (val, pre_p, post_p);
4750 
4751       TREE_OPERAND (*expr_p, 0) = val;
4752       SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
4753     }
4754 
4755   *expr_p = val;
4756 
4757   return ret;
4758 }
4759 
4760 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
4761 
4762       unary_expr
4763 	      : ...
4764 	      | '&' varname
4765 	      ...
4766 
4767     PRE_P points to the list where side effects that must happen before
4768 	*EXPR_P should be stored.
4769 
4770     POST_P points to the list where side effects that must happen after
4771 	*EXPR_P should be stored.  */
4772 
4773 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)4774 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4775 {
4776   tree expr = *expr_p;
4777   tree op0 = TREE_OPERAND (expr, 0);
4778   enum gimplify_status ret;
4779   location_t loc = EXPR_LOCATION (*expr_p);
4780 
4781   switch (TREE_CODE (op0))
4782     {
4783     case INDIRECT_REF:
4784     do_indirect_ref:
4785       /* Check if we are dealing with an expression of the form '&*ptr'.
4786 	 While the front end folds away '&*ptr' into 'ptr', these
4787 	 expressions may be generated internally by the compiler (e.g.,
4788 	 builtins like __builtin_va_end).  */
4789       /* Caution: the silent array decomposition semantics we allow for
4790 	 ADDR_EXPR means we can't always discard the pair.  */
4791       /* Gimplification of the ADDR_EXPR operand may drop
4792 	 cv-qualification conversions, so make sure we add them if
4793 	 needed.  */
4794       {
4795 	tree op00 = TREE_OPERAND (op0, 0);
4796 	tree t_expr = TREE_TYPE (expr);
4797 	tree t_op00 = TREE_TYPE (op00);
4798 
4799         if (!useless_type_conversion_p (t_expr, t_op00))
4800 	  op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
4801         *expr_p = op00;
4802         ret = GS_OK;
4803       }
4804       break;
4805 
4806     case VIEW_CONVERT_EXPR:
4807       /* Take the address of our operand and then convert it to the type of
4808 	 this ADDR_EXPR.
4809 
4810 	 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
4811 	 all clear.  The impact of this transformation is even less clear.  */
4812 
4813       /* If the operand is a useless conversion, look through it.  Doing so
4814 	 guarantees that the ADDR_EXPR and its operand will remain of the
4815 	 same type.  */
4816       if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
4817 	op0 = TREE_OPERAND (op0, 0);
4818 
4819       *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
4820 				  build_fold_addr_expr_loc (loc,
4821 							TREE_OPERAND (op0, 0)));
4822       ret = GS_OK;
4823       break;
4824 
4825     default:
4826       /* We use fb_either here because the C frontend sometimes takes
4827 	 the address of a call that returns a struct; see
4828 	 gcc.dg/c99-array-lval-1.c.  The gimplifier will correctly make
4829 	 the implied temporary explicit.  */
4830 
4831       /* Make the operand addressable.  */
4832       ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
4833 			   is_gimple_addressable, fb_either);
4834       if (ret == GS_ERROR)
4835 	break;
4836 
4837       /* Then mark it.  Beware that it may not be possible to do so directly
4838 	 if a temporary has been created by the gimplification.  */
4839       prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
4840 
4841       op0 = TREE_OPERAND (expr, 0);
4842 
4843       /* For various reasons, the gimplification of the expression
4844 	 may have made a new INDIRECT_REF.  */
4845       if (TREE_CODE (op0) == INDIRECT_REF)
4846 	goto do_indirect_ref;
4847 
4848       mark_addressable (TREE_OPERAND (expr, 0));
4849 
4850       /* The FEs may end up building ADDR_EXPRs early on a decl with
4851 	 an incomplete type.  Re-build ADDR_EXPRs in canonical form
4852 	 here.  */
4853       if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
4854 	*expr_p = build_fold_addr_expr (op0);
4855 
4856       /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly.  */
4857       recompute_tree_invariant_for_addr_expr (*expr_p);
4858 
4859       /* If we re-built the ADDR_EXPR add a conversion to the original type
4860          if required.  */
4861       if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
4862 	*expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
4863 
4864       break;
4865     }
4866 
4867   return ret;
4868 }
4869 
4870 /* Gimplify the operands of an ASM_EXPR.  Input operands should be a gimple
4871    value; output operands should be a gimple lvalue.  */
4872 
4873 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)4874 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
4875 {
4876   tree expr;
4877   int noutputs;
4878   const char **oconstraints;
4879   int i;
4880   tree link;
4881   const char *constraint;
4882   bool allows_mem, allows_reg, is_inout;
4883   enum gimplify_status ret, tret;
4884   gimple stmt;
4885   vec<tree, va_gc> *inputs;
4886   vec<tree, va_gc> *outputs;
4887   vec<tree, va_gc> *clobbers;
4888   vec<tree, va_gc> *labels;
4889   tree link_next;
4890 
4891   expr = *expr_p;
4892   noutputs = list_length (ASM_OUTPUTS (expr));
4893   oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
4894 
4895   inputs = NULL;
4896   outputs = NULL;
4897   clobbers = NULL;
4898   labels = NULL;
4899 
4900   ret = GS_ALL_DONE;
4901   link_next = NULL_TREE;
4902   for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
4903     {
4904       bool ok;
4905       size_t constraint_len;
4906 
4907       link_next = TREE_CHAIN (link);
4908 
4909       oconstraints[i]
4910 	= constraint
4911 	= TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4912       constraint_len = strlen (constraint);
4913       if (constraint_len == 0)
4914         continue;
4915 
4916       ok = parse_output_constraint (&constraint, i, 0, 0,
4917 				    &allows_mem, &allows_reg, &is_inout);
4918       if (!ok)
4919 	{
4920 	  ret = GS_ERROR;
4921 	  is_inout = false;
4922 	}
4923 
4924       if (!allows_reg && allows_mem)
4925 	mark_addressable (TREE_VALUE (link));
4926 
4927       tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4928 			    is_inout ? is_gimple_min_lval : is_gimple_lvalue,
4929 			    fb_lvalue | fb_mayfail);
4930       if (tret == GS_ERROR)
4931 	{
4932 	  error ("invalid lvalue in asm output %d", i);
4933 	  ret = tret;
4934 	}
4935 
4936       vec_safe_push (outputs, link);
4937       TREE_CHAIN (link) = NULL_TREE;
4938 
4939       if (is_inout)
4940 	{
4941 	  /* An input/output operand.  To give the optimizers more
4942 	     flexibility, split it into separate input and output
4943  	     operands.  */
4944 	  tree input;
4945 	  char buf[10];
4946 
4947 	  /* Turn the in/out constraint into an output constraint.  */
4948 	  char *p = xstrdup (constraint);
4949 	  p[0] = '=';
4950 	  TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
4951 
4952 	  /* And add a matching input constraint.  */
4953 	  if (allows_reg)
4954 	    {
4955 	      sprintf (buf, "%d", i);
4956 
4957 	      /* If there are multiple alternatives in the constraint,
4958 		 handle each of them individually.  Those that allow register
4959 		 will be replaced with operand number, the others will stay
4960 		 unchanged.  */
4961 	      if (strchr (p, ',') != NULL)
4962 		{
4963 		  size_t len = 0, buflen = strlen (buf);
4964 		  char *beg, *end, *str, *dst;
4965 
4966 		  for (beg = p + 1;;)
4967 		    {
4968 		      end = strchr (beg, ',');
4969 		      if (end == NULL)
4970 			end = strchr (beg, '\0');
4971 		      if ((size_t) (end - beg) < buflen)
4972 			len += buflen + 1;
4973 		      else
4974 			len += end - beg + 1;
4975 		      if (*end)
4976 			beg = end + 1;
4977 		      else
4978 			break;
4979 		    }
4980 
4981 		  str = (char *) alloca (len);
4982 		  for (beg = p + 1, dst = str;;)
4983 		    {
4984 		      const char *tem;
4985 		      bool mem_p, reg_p, inout_p;
4986 
4987 		      end = strchr (beg, ',');
4988 		      if (end)
4989 			*end = '\0';
4990 		      beg[-1] = '=';
4991 		      tem = beg - 1;
4992 		      parse_output_constraint (&tem, i, 0, 0,
4993 					       &mem_p, &reg_p, &inout_p);
4994 		      if (dst != str)
4995 			*dst++ = ',';
4996 		      if (reg_p)
4997 			{
4998 			  memcpy (dst, buf, buflen);
4999 			  dst += buflen;
5000 			}
5001 		      else
5002 			{
5003 			  if (end)
5004 			    len = end - beg;
5005 			  else
5006 			    len = strlen (beg);
5007 			  memcpy (dst, beg, len);
5008 			  dst += len;
5009 			}
5010 		      if (end)
5011 			beg = end + 1;
5012 		      else
5013 			break;
5014 		    }
5015 		  *dst = '\0';
5016 		  input = build_string (dst - str, str);
5017 		}
5018 	      else
5019 		input = build_string (strlen (buf), buf);
5020 	    }
5021 	  else
5022 	    input = build_string (constraint_len - 1, constraint + 1);
5023 
5024 	  free (p);
5025 
5026 	  input = build_tree_list (build_tree_list (NULL_TREE, input),
5027 				   unshare_expr (TREE_VALUE (link)));
5028 	  ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
5029 	}
5030     }
5031 
5032   link_next = NULL_TREE;
5033   for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
5034     {
5035       link_next = TREE_CHAIN (link);
5036       constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
5037       parse_input_constraint (&constraint, 0, 0, noutputs, 0,
5038 			      oconstraints, &allows_mem, &allows_reg);
5039 
5040       /* If we can't make copies, we can only accept memory.  */
5041       if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
5042 	{
5043 	  if (allows_mem)
5044 	    allows_reg = 0;
5045 	  else
5046 	    {
5047 	      error ("impossible constraint in %<asm%>");
5048 	      error ("non-memory input %d must stay in memory", i);
5049 	      return GS_ERROR;
5050 	    }
5051 	}
5052 
5053       /* If the operand is a memory input, it should be an lvalue.  */
5054       if (!allows_reg && allows_mem)
5055 	{
5056 	  tree inputv = TREE_VALUE (link);
5057 	  STRIP_NOPS (inputv);
5058 	  if (TREE_CODE (inputv) == PREDECREMENT_EXPR
5059 	      || TREE_CODE (inputv) == PREINCREMENT_EXPR
5060 	      || TREE_CODE (inputv) == POSTDECREMENT_EXPR
5061 	      || TREE_CODE (inputv) == POSTINCREMENT_EXPR)
5062 	    TREE_VALUE (link) = error_mark_node;
5063 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5064 				is_gimple_lvalue, fb_lvalue | fb_mayfail);
5065 	  mark_addressable (TREE_VALUE (link));
5066 	  if (tret == GS_ERROR)
5067 	    {
5068 	      if (EXPR_HAS_LOCATION (TREE_VALUE (link)))
5069 	        input_location = EXPR_LOCATION (TREE_VALUE (link));
5070 	      error ("memory input %d is not directly addressable", i);
5071 	      ret = tret;
5072 	    }
5073 	}
5074       else
5075 	{
5076 	  tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
5077 				is_gimple_asm_val, fb_rvalue);
5078 	  if (tret == GS_ERROR)
5079 	    ret = tret;
5080 	}
5081 
5082       TREE_CHAIN (link) = NULL_TREE;
5083       vec_safe_push (inputs, link);
5084     }
5085 
5086   link_next = NULL_TREE;
5087   for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
5088     {
5089       link_next = TREE_CHAIN (link);
5090       TREE_CHAIN (link) = NULL_TREE;
5091       vec_safe_push (clobbers, link);
5092     }
5093 
5094   link_next = NULL_TREE;
5095   for (link = ASM_LABELS (expr); link; ++i, link = link_next)
5096     {
5097       link_next = TREE_CHAIN (link);
5098       TREE_CHAIN (link) = NULL_TREE;
5099       vec_safe_push (labels, link);
5100     }
5101 
5102   /* Do not add ASMs with errors to the gimple IL stream.  */
5103   if (ret != GS_ERROR)
5104     {
5105       stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
5106 				   inputs, outputs, clobbers, labels);
5107 
5108       gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr));
5109       gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
5110 
5111       gimplify_seq_add_stmt (pre_p, stmt);
5112     }
5113 
5114   return ret;
5115 }
5116 
5117 /* Gimplify a CLEANUP_POINT_EXPR.  Currently this works by adding
5118    GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
5119    gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
5120    return to this function.
5121 
5122    FIXME should we complexify the prequeue handling instead?  Or use flags
5123    for all the cleanups and let the optimizer tighten them up?  The current
5124    code seems pretty fragile; it will break on a cleanup within any
5125    non-conditional nesting.  But any such nesting would be broken, anyway;
5126    we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
5127    and continues out of it.  We can do that at the RTL level, though, so
5128    having an optimizer to tighten up try/finally regions would be a Good
5129    Thing.  */
5130 
5131 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,gimple_seq * pre_p)5132 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
5133 {
5134   gimple_stmt_iterator iter;
5135   gimple_seq body_sequence = NULL;
5136 
5137   tree temp = voidify_wrapper_expr (*expr_p, NULL);
5138 
5139   /* We only care about the number of conditions between the innermost
5140      CLEANUP_POINT_EXPR and the cleanup.  So save and reset the count and
5141      any cleanups collected outside the CLEANUP_POINT_EXPR.  */
5142   int old_conds = gimplify_ctxp->conditions;
5143   gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
5144   bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
5145   gimplify_ctxp->conditions = 0;
5146   gimplify_ctxp->conditional_cleanups = NULL;
5147   gimplify_ctxp->in_cleanup_point_expr = true;
5148 
5149   gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
5150 
5151   gimplify_ctxp->conditions = old_conds;
5152   gimplify_ctxp->conditional_cleanups = old_cleanups;
5153   gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
5154 
5155   for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
5156     {
5157       gimple wce = gsi_stmt (iter);
5158 
5159       if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
5160 	{
5161 	  if (gsi_one_before_end_p (iter))
5162 	    {
5163               /* Note that gsi_insert_seq_before and gsi_remove do not
5164                  scan operands, unlike some other sequence mutators.  */
5165 	      if (!gimple_wce_cleanup_eh_only (wce))
5166 		gsi_insert_seq_before_without_update (&iter,
5167 						      gimple_wce_cleanup (wce),
5168 						      GSI_SAME_STMT);
5169 	      gsi_remove (&iter, true);
5170 	      break;
5171 	    }
5172 	  else
5173 	    {
5174 	      gimple_statement_try *gtry;
5175 	      gimple_seq seq;
5176 	      enum gimple_try_flags kind;
5177 
5178 	      if (gimple_wce_cleanup_eh_only (wce))
5179 		kind = GIMPLE_TRY_CATCH;
5180 	      else
5181 		kind = GIMPLE_TRY_FINALLY;
5182 	      seq = gsi_split_seq_after (iter);
5183 
5184 	      gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
5185               /* Do not use gsi_replace here, as it may scan operands.
5186                  We want to do a simple structural modification only.  */
5187 	      gsi_set_stmt (&iter, gtry);
5188 	      iter = gsi_start (gtry->eval);
5189 	    }
5190 	}
5191       else
5192 	gsi_next (&iter);
5193     }
5194 
5195   gimplify_seq_add_seq (pre_p, body_sequence);
5196   if (temp)
5197     {
5198       *expr_p = temp;
5199       return GS_OK;
5200     }
5201   else
5202     {
5203       *expr_p = NULL;
5204       return GS_ALL_DONE;
5205     }
5206 }
5207 
5208 /* Insert a cleanup marker for gimplify_cleanup_point_expr.  CLEANUP
5209    is the cleanup action required.  EH_ONLY is true if the cleanup should
5210    only be executed if an exception is thrown, not on normal exit.  */
5211 
5212 static void
gimple_push_cleanup(tree var,tree cleanup,bool eh_only,gimple_seq * pre_p)5213 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p)
5214 {
5215   gimple wce;
5216   gimple_seq cleanup_stmts = NULL;
5217 
5218   /* Errors can result in improperly nested cleanups.  Which results in
5219      confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR.  */
5220   if (seen_error ())
5221     return;
5222 
5223   if (gimple_conditional_context ())
5224     {
5225       /* If we're in a conditional context, this is more complex.  We only
5226 	 want to run the cleanup if we actually ran the initialization that
5227 	 necessitates it, but we want to run it after the end of the
5228 	 conditional context.  So we wrap the try/finally around the
5229 	 condition and use a flag to determine whether or not to actually
5230 	 run the destructor.  Thus
5231 
5232 	   test ? f(A()) : 0
5233 
5234 	 becomes (approximately)
5235 
5236 	   flag = 0;
5237 	   try {
5238 	     if (test) { A::A(temp); flag = 1; val = f(temp); }
5239 	     else { val = 0; }
5240 	   } finally {
5241 	     if (flag) A::~A(temp);
5242 	   }
5243 	   val
5244       */
5245       tree flag = create_tmp_var (boolean_type_node, "cleanup");
5246       gimple ffalse = gimple_build_assign (flag, boolean_false_node);
5247       gimple ftrue = gimple_build_assign (flag, boolean_true_node);
5248 
5249       cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
5250       gimplify_stmt (&cleanup, &cleanup_stmts);
5251       wce = gimple_build_wce (cleanup_stmts);
5252 
5253       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
5254       gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
5255       gimplify_seq_add_stmt (pre_p, ftrue);
5256 
5257       /* Because of this manipulation, and the EH edges that jump
5258 	 threading cannot redirect, the temporary (VAR) will appear
5259 	 to be used uninitialized.  Don't warn.  */
5260       TREE_NO_WARNING (var) = 1;
5261     }
5262   else
5263     {
5264       gimplify_stmt (&cleanup, &cleanup_stmts);
5265       wce = gimple_build_wce (cleanup_stmts);
5266       gimple_wce_set_cleanup_eh_only (wce, eh_only);
5267       gimplify_seq_add_stmt (pre_p, wce);
5268     }
5269 }
5270 
5271 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR.  */
5272 
5273 static enum gimplify_status
gimplify_target_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)5274 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5275 {
5276   tree targ = *expr_p;
5277   tree temp = TARGET_EXPR_SLOT (targ);
5278   tree init = TARGET_EXPR_INITIAL (targ);
5279   enum gimplify_status ret;
5280 
5281   if (init)
5282     {
5283       tree cleanup = NULL_TREE;
5284 
5285       /* TARGET_EXPR temps aren't part of the enclosing block, so add it
5286 	 to the temps list.  Handle also variable length TARGET_EXPRs.  */
5287       if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
5288 	{
5289 	  if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
5290 	    gimplify_type_sizes (TREE_TYPE (temp), pre_p);
5291 	  gimplify_vla_decl (temp, pre_p);
5292 	}
5293       else
5294 	gimple_add_tmp_var (temp);
5295 
5296       /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
5297 	 expression is supposed to initialize the slot.  */
5298       if (VOID_TYPE_P (TREE_TYPE (init)))
5299 	ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5300       else
5301 	{
5302 	  tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
5303 	  init = init_expr;
5304 	  ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
5305 	  init = NULL;
5306 	  ggc_free (init_expr);
5307 	}
5308       if (ret == GS_ERROR)
5309 	{
5310 	  /* PR c++/28266 Make sure this is expanded only once. */
5311 	  TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5312 	  return GS_ERROR;
5313 	}
5314       if (init)
5315 	gimplify_and_add (init, pre_p);
5316 
5317       /* If needed, push the cleanup for the temp.  */
5318       if (TARGET_EXPR_CLEANUP (targ))
5319 	{
5320 	  if (CLEANUP_EH_ONLY (targ))
5321 	    gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
5322 				 CLEANUP_EH_ONLY (targ), pre_p);
5323 	  else
5324 	    cleanup = TARGET_EXPR_CLEANUP (targ);
5325 	}
5326 
5327       /* Add a clobber for the temporary going out of scope, like
5328 	 gimplify_bind_expr.  */
5329       if (gimplify_ctxp->in_cleanup_point_expr
5330 	  && needs_to_live_in_memory (temp)
5331 	  && flag_stack_reuse == SR_ALL)
5332 	{
5333 	  tree clobber = build_constructor (TREE_TYPE (temp),
5334 					    NULL);
5335 	  TREE_THIS_VOLATILE (clobber) = true;
5336 	  clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
5337 	  if (cleanup)
5338 	    cleanup = build2 (COMPOUND_EXPR, void_type_node, cleanup,
5339 			      clobber);
5340 	  else
5341 	    cleanup = clobber;
5342 	}
5343 
5344       if (cleanup)
5345 	gimple_push_cleanup (temp, cleanup, false, pre_p);
5346 
5347       /* Only expand this once.  */
5348       TREE_OPERAND (targ, 3) = init;
5349       TARGET_EXPR_INITIAL (targ) = NULL_TREE;
5350     }
5351   else
5352     /* We should have expanded this before.  */
5353     gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
5354 
5355   *expr_p = temp;
5356   return GS_OK;
5357 }
5358 
5359 /* Gimplification of expression trees.  */
5360 
5361 /* Gimplify an expression which appears at statement context.  The
5362    corresponding GIMPLE statements are added to *SEQ_P.  If *SEQ_P is
5363    NULL, a new sequence is allocated.
5364 
5365    Return true if we actually added a statement to the queue.  */
5366 
5367 bool
gimplify_stmt(tree * stmt_p,gimple_seq * seq_p)5368 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
5369 {
5370   gimple_seq_node last;
5371 
5372   last = gimple_seq_last (*seq_p);
5373   gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
5374   return last != gimple_seq_last (*seq_p);
5375 }
5376 
5377 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
5378    to CTX.  If entries already exist, force them to be some flavor of private.
5379    If there is no enclosing parallel, do nothing.  */
5380 
5381 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)5382 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
5383 {
5384   splay_tree_node n;
5385 
5386   if (decl == NULL || !DECL_P (decl))
5387     return;
5388 
5389   do
5390     {
5391       n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5392       if (n != NULL)
5393 	{
5394 	  if (n->value & GOVD_SHARED)
5395 	    n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
5396 	  else if (n->value & GOVD_MAP)
5397 	    n->value |= GOVD_MAP_TO_ONLY;
5398 	  else
5399 	    return;
5400 	}
5401       else if (ctx->region_type == ORT_TARGET)
5402 	omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
5403       else if (ctx->region_type != ORT_WORKSHARE
5404 	       && ctx->region_type != ORT_SIMD
5405 	       && ctx->region_type != ORT_TARGET_DATA)
5406 	omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
5407 
5408       ctx = ctx->outer_context;
5409     }
5410   while (ctx);
5411 }
5412 
5413 /* Similarly for each of the type sizes of TYPE.  */
5414 
5415 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)5416 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
5417 {
5418   if (type == NULL || type == error_mark_node)
5419     return;
5420   type = TYPE_MAIN_VARIANT (type);
5421 
5422   if (pointer_set_insert (ctx->privatized_types, type))
5423     return;
5424 
5425   switch (TREE_CODE (type))
5426     {
5427     case INTEGER_TYPE:
5428     case ENUMERAL_TYPE:
5429     case BOOLEAN_TYPE:
5430     case REAL_TYPE:
5431     case FIXED_POINT_TYPE:
5432       omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
5433       omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
5434       break;
5435 
5436     case ARRAY_TYPE:
5437       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5438       omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
5439       break;
5440 
5441     case RECORD_TYPE:
5442     case UNION_TYPE:
5443     case QUAL_UNION_TYPE:
5444       {
5445 	tree field;
5446 	for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
5447 	  if (TREE_CODE (field) == FIELD_DECL)
5448 	    {
5449 	      omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
5450 	      omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
5451 	    }
5452       }
5453       break;
5454 
5455     case POINTER_TYPE:
5456     case REFERENCE_TYPE:
5457       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
5458       break;
5459 
5460     default:
5461       break;
5462     }
5463 
5464   omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
5465   omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
5466   lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
5467 }
5468 
5469 /* Add an entry for DECL in the OpenMP context CTX with FLAGS.  */
5470 
5471 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)5472 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
5473 {
5474   splay_tree_node n;
5475   unsigned int nflags;
5476   tree t;
5477 
5478   if (error_operand_p (decl))
5479     return;
5480 
5481   /* Never elide decls whose type has TREE_ADDRESSABLE set.  This means
5482      there are constructors involved somewhere.  */
5483   if (TREE_ADDRESSABLE (TREE_TYPE (decl))
5484       || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
5485     flags |= GOVD_SEEN;
5486 
5487   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5488   if (n != NULL && n->value != GOVD_ALIGNED)
5489     {
5490       /* We shouldn't be re-adding the decl with the same data
5491 	 sharing class.  */
5492       gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
5493       /* The only combination of data sharing classes we should see is
5494 	 FIRSTPRIVATE and LASTPRIVATE.  */
5495       nflags = n->value | flags;
5496       gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
5497 		  == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE)
5498 		  || (flags & GOVD_DATA_SHARE_CLASS) == 0);
5499       n->value = nflags;
5500       return;
5501     }
5502 
5503   /* When adding a variable-sized variable, we have to handle all sorts
5504      of additional bits of data: the pointer replacement variable, and
5505      the parameters of the type.  */
5506   if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5507     {
5508       /* Add the pointer replacement variable as PRIVATE if the variable
5509 	 replacement is private, else FIRSTPRIVATE since we'll need the
5510 	 address of the original variable either for SHARED, or for the
5511 	 copy into or out of the context.  */
5512       if (!(flags & GOVD_LOCAL))
5513 	{
5514 	  nflags = flags & GOVD_MAP
5515 		   ? GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT
5516 		   : flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
5517 	  nflags |= flags & GOVD_SEEN;
5518 	  t = DECL_VALUE_EXPR (decl);
5519 	  gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5520 	  t = TREE_OPERAND (t, 0);
5521 	  gcc_assert (DECL_P (t));
5522 	  omp_add_variable (ctx, t, nflags);
5523 	}
5524 
5525       /* Add all of the variable and type parameters (which should have
5526 	 been gimplified to a formal temporary) as FIRSTPRIVATE.  */
5527       omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
5528       omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
5529       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5530 
5531       /* The variable-sized variable itself is never SHARED, only some form
5532 	 of PRIVATE.  The sharing would take place via the pointer variable
5533 	 which we remapped above.  */
5534       if (flags & GOVD_SHARED)
5535 	flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
5536 		| (flags & (GOVD_SEEN | GOVD_EXPLICIT));
5537 
5538       /* We're going to make use of the TYPE_SIZE_UNIT at least in the
5539 	 alloca statement we generate for the variable, so make sure it
5540 	 is available.  This isn't automatically needed for the SHARED
5541 	 case, since we won't be allocating local storage then.
5542 	 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
5543 	 in this case omp_notice_variable will be called later
5544 	 on when it is gimplified.  */
5545       else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
5546 	       && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
5547 	omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
5548     }
5549   else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
5550 	   && lang_hooks.decls.omp_privatize_by_reference (decl))
5551     {
5552       omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
5553 
5554       /* Similar to the direct variable sized case above, we'll need the
5555 	 size of references being privatized.  */
5556       if ((flags & GOVD_SHARED) == 0)
5557 	{
5558 	  t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
5559 	  if (TREE_CODE (t) != INTEGER_CST)
5560 	    omp_notice_variable (ctx, t, true);
5561 	}
5562     }
5563 
5564   if (n != NULL)
5565     n->value |= flags;
5566   else
5567     splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
5568 }
5569 
5570 /* Notice a threadprivate variable DECL used in OpenMP context CTX.
5571    This just prints out diagnostics about threadprivate variable uses
5572    in untied tasks.  If DECL2 is non-NULL, prevent this warning
5573    on that variable.  */
5574 
5575 static bool
omp_notice_threadprivate_variable(struct gimplify_omp_ctx * ctx,tree decl,tree decl2)5576 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
5577 				   tree decl2)
5578 {
5579   splay_tree_node n;
5580   struct gimplify_omp_ctx *octx;
5581 
5582   for (octx = ctx; octx; octx = octx->outer_context)
5583     if (octx->region_type == ORT_TARGET)
5584       {
5585 	n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
5586 	if (n == NULL)
5587 	  {
5588 	    error ("threadprivate variable %qE used in target region",
5589 		   DECL_NAME (decl));
5590 	    error_at (octx->location, "enclosing target region");
5591 	    splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
5592 	  }
5593 	if (decl2)
5594 	  splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
5595       }
5596 
5597   if (ctx->region_type != ORT_UNTIED_TASK)
5598     return false;
5599   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5600   if (n == NULL)
5601     {
5602       error ("threadprivate variable %qE used in untied task",
5603 	     DECL_NAME (decl));
5604       error_at (ctx->location, "enclosing task");
5605       splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
5606     }
5607   if (decl2)
5608     splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
5609   return false;
5610 }
5611 
5612 /* Record the fact that DECL was used within the OpenMP context CTX.
5613    IN_CODE is true when real code uses DECL, and false when we should
5614    merely emit default(none) errors.  Return true if DECL is going to
5615    be remapped and thus DECL shouldn't be gimplified into its
5616    DECL_VALUE_EXPR (if any).  */
5617 
5618 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)5619 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
5620 {
5621   splay_tree_node n;
5622   unsigned flags = in_code ? GOVD_SEEN : 0;
5623   bool ret = false, shared;
5624 
5625   if (error_operand_p (decl))
5626     return false;
5627 
5628   /* Threadprivate variables are predetermined.  */
5629   if (is_global_var (decl))
5630     {
5631       if (DECL_THREAD_LOCAL_P (decl))
5632 	return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
5633 
5634       if (DECL_HAS_VALUE_EXPR_P (decl))
5635 	{
5636 	  tree value = get_base_address (DECL_VALUE_EXPR (decl));
5637 
5638 	  if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
5639 	    return omp_notice_threadprivate_variable (ctx, decl, value);
5640 	}
5641     }
5642 
5643   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5644   if (ctx->region_type == ORT_TARGET)
5645     {
5646       ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
5647       if (n == NULL)
5648 	{
5649 	  if (!lang_hooks.types.omp_mappable_type (TREE_TYPE (decl)))
5650 	    {
5651 	      error ("%qD referenced in target region does not have "
5652 		     "a mappable type", decl);
5653 	      omp_add_variable (ctx, decl, GOVD_MAP | GOVD_EXPLICIT | flags);
5654 	    }
5655 	  else
5656 	    omp_add_variable (ctx, decl, GOVD_MAP | flags);
5657 	}
5658       else
5659 	{
5660 	  /* If nothing changed, there's nothing left to do.  */
5661 	  if ((n->value & flags) == flags)
5662 	    return ret;
5663 	  n->value |= flags;
5664 	}
5665       goto do_outer;
5666     }
5667 
5668   if (n == NULL)
5669     {
5670       enum omp_clause_default_kind default_kind, kind;
5671       struct gimplify_omp_ctx *octx;
5672 
5673       if (ctx->region_type == ORT_WORKSHARE
5674 	  || ctx->region_type == ORT_SIMD
5675 	  || ctx->region_type == ORT_TARGET_DATA)
5676 	goto do_outer;
5677 
5678       /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
5679 	 remapped firstprivate instead of shared.  To some extent this is
5680 	 addressed in omp_firstprivatize_type_sizes, but not effectively.  */
5681       default_kind = ctx->default_kind;
5682       kind = lang_hooks.decls.omp_predetermined_sharing (decl);
5683       if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
5684 	default_kind = kind;
5685 
5686       switch (default_kind)
5687 	{
5688 	case OMP_CLAUSE_DEFAULT_NONE:
5689 	  if ((ctx->region_type & ORT_TASK) != 0)
5690 	    {
5691 	      error ("%qE not specified in enclosing task",
5692 		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5693 	      error_at (ctx->location, "enclosing task");
5694 	    }
5695 	  else if (ctx->region_type == ORT_TEAMS)
5696 	    {
5697 	      error ("%qE not specified in enclosing teams construct",
5698 		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5699 	      error_at (ctx->location, "enclosing teams construct");
5700 	    }
5701 	  else
5702 	    {
5703 	      error ("%qE not specified in enclosing parallel",
5704 		     DECL_NAME (lang_hooks.decls.omp_report_decl (decl)));
5705 	      error_at (ctx->location, "enclosing parallel");
5706 	    }
5707 	  /* FALLTHRU */
5708 	case OMP_CLAUSE_DEFAULT_SHARED:
5709 	  flags |= GOVD_SHARED;
5710 	  break;
5711 	case OMP_CLAUSE_DEFAULT_PRIVATE:
5712 	  flags |= GOVD_PRIVATE;
5713 	  break;
5714 	case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
5715 	  flags |= GOVD_FIRSTPRIVATE;
5716 	  break;
5717 	case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
5718 	  /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED.  */
5719 	  gcc_assert ((ctx->region_type & ORT_TASK) != 0);
5720 	  if (ctx->outer_context)
5721 	    omp_notice_variable (ctx->outer_context, decl, in_code);
5722 	  for (octx = ctx->outer_context; octx; octx = octx->outer_context)
5723 	    {
5724 	      splay_tree_node n2;
5725 
5726 	      if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0)
5727 		continue;
5728 	      n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
5729 	      if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
5730 		{
5731 		  flags |= GOVD_FIRSTPRIVATE;
5732 		  break;
5733 		}
5734 	      if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
5735 		break;
5736 	    }
5737 	  if (flags & GOVD_FIRSTPRIVATE)
5738 	    break;
5739 	  if (octx == NULL
5740 	      && (TREE_CODE (decl) == PARM_DECL
5741 		  || (!is_global_var (decl)
5742 		      && DECL_CONTEXT (decl) == current_function_decl)))
5743 	    {
5744 	      flags |= GOVD_FIRSTPRIVATE;
5745 	      break;
5746 	    }
5747 	  flags |= GOVD_SHARED;
5748 	  break;
5749 	default:
5750 	  gcc_unreachable ();
5751 	}
5752 
5753       if ((flags & GOVD_PRIVATE)
5754 	  && lang_hooks.decls.omp_private_outer_ref (decl))
5755 	flags |= GOVD_PRIVATE_OUTER_REF;
5756 
5757       omp_add_variable (ctx, decl, flags);
5758 
5759       shared = (flags & GOVD_SHARED) != 0;
5760       ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5761       goto do_outer;
5762     }
5763 
5764   if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
5765       && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
5766       && DECL_SIZE (decl)
5767       && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
5768     {
5769       splay_tree_node n2;
5770       tree t = DECL_VALUE_EXPR (decl);
5771       gcc_assert (TREE_CODE (t) == INDIRECT_REF);
5772       t = TREE_OPERAND (t, 0);
5773       gcc_assert (DECL_P (t));
5774       n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
5775       n2->value |= GOVD_SEEN;
5776     }
5777 
5778   shared = ((flags | n->value) & GOVD_SHARED) != 0;
5779   ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
5780 
5781   /* If nothing changed, there's nothing left to do.  */
5782   if ((n->value & flags) == flags)
5783     return ret;
5784   flags |= n->value;
5785   n->value = flags;
5786 
5787  do_outer:
5788   /* If the variable is private in the current context, then we don't
5789      need to propagate anything to an outer context.  */
5790   if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
5791     return ret;
5792   if (ctx->outer_context
5793       && omp_notice_variable (ctx->outer_context, decl, in_code))
5794     return true;
5795   return ret;
5796 }
5797 
5798 /* Verify that DECL is private within CTX.  If there's specific information
5799    to the contrary in the innermost scope, generate an error.  */
5800 
5801 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl,int simd)5802 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
5803 {
5804   splay_tree_node n;
5805 
5806   n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
5807   if (n != NULL)
5808     {
5809       if (n->value & GOVD_SHARED)
5810 	{
5811 	  if (ctx == gimplify_omp_ctxp)
5812 	    {
5813 	      if (simd)
5814 		error ("iteration variable %qE is predetermined linear",
5815 		       DECL_NAME (decl));
5816 	      else
5817 		error ("iteration variable %qE should be private",
5818 		       DECL_NAME (decl));
5819 	      n->value = GOVD_PRIVATE;
5820 	      return true;
5821 	    }
5822 	  else
5823 	    return false;
5824 	}
5825       else if ((n->value & GOVD_EXPLICIT) != 0
5826 	       && (ctx == gimplify_omp_ctxp
5827 		   || (ctx->region_type == ORT_COMBINED_PARALLEL
5828 		       && gimplify_omp_ctxp->outer_context == ctx)))
5829 	{
5830 	  if ((n->value & GOVD_FIRSTPRIVATE) != 0)
5831 	    error ("iteration variable %qE should not be firstprivate",
5832 		   DECL_NAME (decl));
5833 	  else if ((n->value & GOVD_REDUCTION) != 0)
5834 	    error ("iteration variable %qE should not be reduction",
5835 		   DECL_NAME (decl));
5836 	  else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
5837 	    error ("iteration variable %qE should not be lastprivate",
5838 		   DECL_NAME (decl));
5839 	  else if (simd && (n->value & GOVD_PRIVATE) != 0)
5840 	    error ("iteration variable %qE should not be private",
5841 		   DECL_NAME (decl));
5842 	  else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
5843 	    error ("iteration variable %qE is predetermined linear",
5844 		   DECL_NAME (decl));
5845 	}
5846       return (ctx == gimplify_omp_ctxp
5847 	      || (ctx->region_type == ORT_COMBINED_PARALLEL
5848 		  && gimplify_omp_ctxp->outer_context == ctx));
5849     }
5850 
5851   if (ctx->region_type != ORT_WORKSHARE
5852       && ctx->region_type != ORT_SIMD)
5853     return false;
5854   else if (ctx->outer_context)
5855     return omp_is_private (ctx->outer_context, decl, simd);
5856   return false;
5857 }
5858 
5859 /* Return true if DECL is private within a parallel region
5860    that binds to the current construct's context or in parallel
5861    region's REDUCTION clause.  */
5862 
5863 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl,bool copyprivate)5864 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
5865 {
5866   splay_tree_node n;
5867 
5868   do
5869     {
5870       ctx = ctx->outer_context;
5871       if (ctx == NULL)
5872 	return !(is_global_var (decl)
5873 		 /* References might be private, but might be shared too,
5874 		    when checking for copyprivate, assume they might be
5875 		    private, otherwise assume they might be shared.  */
5876 		 || (!copyprivate
5877 		     && lang_hooks.decls.omp_privatize_by_reference (decl)));
5878 
5879       if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
5880 	continue;
5881 
5882       n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
5883       if (n != NULL)
5884 	return (n->value & GOVD_SHARED) == 0;
5885     }
5886   while (ctx->region_type == ORT_WORKSHARE
5887 	 || ctx->region_type == ORT_SIMD);
5888   return false;
5889 }
5890 
5891 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
5892    and previous omp contexts.  */
5893 
5894 static void
gimplify_scan_omp_clauses(tree * list_p,gimple_seq * pre_p,enum omp_region_type region_type)5895 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
5896 			   enum omp_region_type region_type)
5897 {
5898   struct gimplify_omp_ctx *ctx, *outer_ctx;
5899   tree c;
5900 
5901   ctx = new_omp_context (region_type);
5902   outer_ctx = ctx->outer_context;
5903 
5904   while ((c = *list_p) != NULL)
5905     {
5906       bool remove = false;
5907       bool notice_outer = true;
5908       const char *check_non_private = NULL;
5909       unsigned int flags;
5910       tree decl;
5911 
5912       switch (OMP_CLAUSE_CODE (c))
5913 	{
5914 	case OMP_CLAUSE_PRIVATE:
5915 	  flags = GOVD_PRIVATE | GOVD_EXPLICIT;
5916 	  if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
5917 	    {
5918 	      flags |= GOVD_PRIVATE_OUTER_REF;
5919 	      OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
5920 	    }
5921 	  else
5922 	    notice_outer = false;
5923 	  goto do_add;
5924 	case OMP_CLAUSE_SHARED:
5925 	  flags = GOVD_SHARED | GOVD_EXPLICIT;
5926 	  goto do_add;
5927 	case OMP_CLAUSE_FIRSTPRIVATE:
5928 	  flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
5929 	  check_non_private = "firstprivate";
5930 	  goto do_add;
5931 	case OMP_CLAUSE_LASTPRIVATE:
5932 	  flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
5933 	  check_non_private = "lastprivate";
5934 	  goto do_add;
5935 	case OMP_CLAUSE_REDUCTION:
5936 	  flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
5937 	  check_non_private = "reduction";
5938 	  goto do_add;
5939 	case OMP_CLAUSE_LINEAR:
5940 	  if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
5941 			     is_gimple_val, fb_rvalue) == GS_ERROR)
5942 	    {
5943 	      remove = true;
5944 	      break;
5945 	    }
5946 	  flags = GOVD_LINEAR | GOVD_EXPLICIT;
5947 	  goto do_add;
5948 
5949 	case OMP_CLAUSE_MAP:
5950 	  decl = OMP_CLAUSE_DECL (c);
5951 	  if (error_operand_p (decl))
5952 	    {
5953 	      remove = true;
5954 	      break;
5955 	    }
5956 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
5957 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
5958 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
5959 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
5960 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
5961 	    {
5962 	      remove = true;
5963 	      break;
5964 	    }
5965 	  if (!DECL_P (decl))
5966 	    {
5967 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
5968 				 NULL, is_gimple_lvalue, fb_lvalue)
5969 		  == GS_ERROR)
5970 		{
5971 		  remove = true;
5972 		  break;
5973 		}
5974 	      break;
5975 	    }
5976 	  flags = GOVD_MAP | GOVD_EXPLICIT;
5977 	  goto do_add;
5978 
5979 	case OMP_CLAUSE_DEPEND:
5980 	  if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
5981 	    {
5982 	      gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
5983 			     NULL, is_gimple_val, fb_rvalue);
5984 	      OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
5985 	    }
5986 	  if (error_operand_p (OMP_CLAUSE_DECL (c)))
5987 	    {
5988 	      remove = true;
5989 	      break;
5990 	    }
5991 	  OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
5992 	  if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
5993 			     is_gimple_val, fb_rvalue) == GS_ERROR)
5994 	    {
5995 	      remove = true;
5996 	      break;
5997 	    }
5998 	  break;
5999 
6000 	case OMP_CLAUSE_TO:
6001 	case OMP_CLAUSE_FROM:
6002 	  decl = OMP_CLAUSE_DECL (c);
6003 	  if (error_operand_p (decl))
6004 	    {
6005 	      remove = true;
6006 	      break;
6007 	    }
6008 	  if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6009 	    OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
6010 				  : TYPE_SIZE_UNIT (TREE_TYPE (decl));
6011 	  if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
6012 			     NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
6013 	    {
6014 	      remove = true;
6015 	      break;
6016 	    }
6017 	  if (!DECL_P (decl))
6018 	    {
6019 	      if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
6020 				 NULL, is_gimple_lvalue, fb_lvalue)
6021 		  == GS_ERROR)
6022 		{
6023 		  remove = true;
6024 		  break;
6025 		}
6026 	      break;
6027 	    }
6028 	  goto do_notice;
6029 
6030 	do_add:
6031 	  decl = OMP_CLAUSE_DECL (c);
6032 	  if (error_operand_p (decl))
6033 	    {
6034 	      remove = true;
6035 	      break;
6036 	    }
6037 	  omp_add_variable (ctx, decl, flags);
6038 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
6039 	      && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
6040 	    {
6041 	      omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
6042 				GOVD_LOCAL | GOVD_SEEN);
6043 	      gimplify_omp_ctxp = ctx;
6044 	      push_gimplify_context ();
6045 
6046 	      OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
6047 	      OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
6048 
6049 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
6050 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
6051 	      pop_gimplify_context
6052 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
6053 	      push_gimplify_context ();
6054 	      gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
6055 		  		&OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
6056 	      pop_gimplify_context
6057 		(gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
6058 	      OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
6059 	      OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
6060 
6061 	      gimplify_omp_ctxp = outer_ctx;
6062 	    }
6063 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6064 		   && OMP_CLAUSE_LASTPRIVATE_STMT (c))
6065 	    {
6066 	      gimplify_omp_ctxp = ctx;
6067 	      push_gimplify_context ();
6068 	      if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
6069 		{
6070 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6071 				      NULL, NULL);
6072 		  TREE_SIDE_EFFECTS (bind) = 1;
6073 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
6074 		  OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
6075 		}
6076 	      gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
6077 				&OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
6078 	      pop_gimplify_context
6079 		(gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
6080 	      OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
6081 
6082 	      gimplify_omp_ctxp = outer_ctx;
6083 	    }
6084 	  else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6085 		   && OMP_CLAUSE_LINEAR_STMT (c))
6086 	    {
6087 	      gimplify_omp_ctxp = ctx;
6088 	      push_gimplify_context ();
6089 	      if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
6090 		{
6091 		  tree bind = build3 (BIND_EXPR, void_type_node, NULL,
6092 				      NULL, NULL);
6093 		  TREE_SIDE_EFFECTS (bind) = 1;
6094 		  BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
6095 		  OMP_CLAUSE_LINEAR_STMT (c) = bind;
6096 		}
6097 	      gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
6098 				&OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
6099 	      pop_gimplify_context
6100 		(gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
6101 	      OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
6102 
6103 	      gimplify_omp_ctxp = outer_ctx;
6104 	    }
6105 	  if (notice_outer)
6106 	    goto do_notice;
6107 	  break;
6108 
6109 	case OMP_CLAUSE_COPYIN:
6110 	case OMP_CLAUSE_COPYPRIVATE:
6111 	  decl = OMP_CLAUSE_DECL (c);
6112 	  if (error_operand_p (decl))
6113 	    {
6114 	      remove = true;
6115 	      break;
6116 	    }
6117 	  if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
6118 	      && !remove
6119 	      && !omp_check_private (ctx, decl, true))
6120 	    {
6121 	      remove = true;
6122 	      if (is_global_var (decl))
6123 		{
6124 		  if (DECL_THREAD_LOCAL_P (decl))
6125 		    remove = false;
6126 		  else if (DECL_HAS_VALUE_EXPR_P (decl))
6127 		    {
6128 		      tree value = get_base_address (DECL_VALUE_EXPR (decl));
6129 
6130 		      if (value
6131 			  && DECL_P (value)
6132 			  && DECL_THREAD_LOCAL_P (value))
6133 			remove = false;
6134 		    }
6135 		}
6136 	      if (remove)
6137 		error_at (OMP_CLAUSE_LOCATION (c),
6138 			  "copyprivate variable %qE is not threadprivate"
6139 			  " or private in outer context", DECL_NAME (decl));
6140 	    }
6141 	do_notice:
6142 	  if (outer_ctx)
6143 	    omp_notice_variable (outer_ctx, decl, true);
6144 	  if (check_non_private
6145 	      && region_type == ORT_WORKSHARE
6146 	      && omp_check_private (ctx, decl, false))
6147 	    {
6148 	      error ("%s variable %qE is private in outer context",
6149 		     check_non_private, DECL_NAME (decl));
6150 	      remove = true;
6151 	    }
6152 	  break;
6153 
6154 	case OMP_CLAUSE_FINAL:
6155 	case OMP_CLAUSE_IF:
6156 	  OMP_CLAUSE_OPERAND (c, 0)
6157 	    = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
6158 	  /* Fall through.  */
6159 
6160 	case OMP_CLAUSE_SCHEDULE:
6161 	case OMP_CLAUSE_NUM_THREADS:
6162 	case OMP_CLAUSE_NUM_TEAMS:
6163 	case OMP_CLAUSE_THREAD_LIMIT:
6164 	case OMP_CLAUSE_DIST_SCHEDULE:
6165 	case OMP_CLAUSE_DEVICE:
6166 	  if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
6167 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6168 	    remove = true;
6169 	  break;
6170 
6171 	case OMP_CLAUSE_NOWAIT:
6172 	case OMP_CLAUSE_ORDERED:
6173 	case OMP_CLAUSE_UNTIED:
6174 	case OMP_CLAUSE_COLLAPSE:
6175 	case OMP_CLAUSE_MERGEABLE:
6176 	case OMP_CLAUSE_PROC_BIND:
6177 	case OMP_CLAUSE_SAFELEN:
6178 	  break;
6179 
6180 	case OMP_CLAUSE_ALIGNED:
6181 	  decl = OMP_CLAUSE_DECL (c);
6182 	  if (error_operand_p (decl))
6183 	    {
6184 	      remove = true;
6185 	      break;
6186 	    }
6187 	  if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
6188 			     is_gimple_val, fb_rvalue) == GS_ERROR)
6189 	    {
6190 	      remove = true;
6191 	      break;
6192 	    }
6193 	  if (!is_global_var (decl)
6194 	      && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6195 	    omp_add_variable (ctx, decl, GOVD_ALIGNED);
6196 	  break;
6197 
6198 	case OMP_CLAUSE_DEFAULT:
6199 	  ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
6200 	  break;
6201 
6202 	default:
6203 	  gcc_unreachable ();
6204 	}
6205 
6206       if (remove)
6207 	*list_p = OMP_CLAUSE_CHAIN (c);
6208       else
6209 	list_p = &OMP_CLAUSE_CHAIN (c);
6210     }
6211 
6212   gimplify_omp_ctxp = ctx;
6213 }
6214 
6215 struct gimplify_adjust_omp_clauses_data
6216 {
6217   tree *list_p;
6218   gimple_seq *pre_p;
6219 };
6220 
6221 /* For all variables that were not actually used within the context,
6222    remove PRIVATE, SHARED, and FIRSTPRIVATE clauses.  */
6223 
6224 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)6225 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
6226 {
6227   tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
6228   gimple_seq *pre_p
6229     = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
6230   tree decl = (tree) n->key;
6231   unsigned flags = n->value;
6232   enum omp_clause_code code;
6233   tree clause;
6234   bool private_debug;
6235 
6236   if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
6237     return 0;
6238   if ((flags & GOVD_SEEN) == 0)
6239     return 0;
6240   if (flags & GOVD_DEBUG_PRIVATE)
6241     {
6242       gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
6243       private_debug = true;
6244     }
6245   else if (flags & GOVD_MAP)
6246     private_debug = false;
6247   else
6248     private_debug
6249       = lang_hooks.decls.omp_private_debug_clause (decl,
6250 						   !!(flags & GOVD_SHARED));
6251   if (private_debug)
6252     code = OMP_CLAUSE_PRIVATE;
6253   else if (flags & GOVD_MAP)
6254     code = OMP_CLAUSE_MAP;
6255   else if (flags & GOVD_SHARED)
6256     {
6257       if (is_global_var (decl))
6258 	{
6259 	  struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6260 	  while (ctx != NULL)
6261 	    {
6262 	      splay_tree_node on
6263 		= splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6264 	      if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
6265 				      | GOVD_PRIVATE | GOVD_REDUCTION
6266 				      | GOVD_LINEAR | GOVD_MAP)) != 0)
6267 		break;
6268 	      ctx = ctx->outer_context;
6269 	    }
6270 	  if (ctx == NULL)
6271 	    return 0;
6272 	}
6273       code = OMP_CLAUSE_SHARED;
6274     }
6275   else if (flags & GOVD_PRIVATE)
6276     code = OMP_CLAUSE_PRIVATE;
6277   else if (flags & GOVD_FIRSTPRIVATE)
6278     code = OMP_CLAUSE_FIRSTPRIVATE;
6279   else if (flags & GOVD_LASTPRIVATE)
6280     code = OMP_CLAUSE_LASTPRIVATE;
6281   else if (flags & GOVD_ALIGNED)
6282     return 0;
6283   else
6284     gcc_unreachable ();
6285 
6286   clause = build_omp_clause (input_location, code);
6287   OMP_CLAUSE_DECL (clause) = decl;
6288   OMP_CLAUSE_CHAIN (clause) = *list_p;
6289   if (private_debug)
6290     OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
6291   else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
6292     OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
6293   else if (code == OMP_CLAUSE_MAP)
6294     {
6295       OMP_CLAUSE_MAP_KIND (clause) = flags & GOVD_MAP_TO_ONLY
6296 				     ? OMP_CLAUSE_MAP_TO
6297 				     : OMP_CLAUSE_MAP_TOFROM;
6298       if (DECL_SIZE (decl)
6299 	  && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6300 	{
6301 	  tree decl2 = DECL_VALUE_EXPR (decl);
6302 	  gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6303 	  decl2 = TREE_OPERAND (decl2, 0);
6304 	  gcc_assert (DECL_P (decl2));
6305 	  tree mem = build_simple_mem_ref (decl2);
6306 	  OMP_CLAUSE_DECL (clause) = mem;
6307 	  OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6308 	  if (gimplify_omp_ctxp->outer_context)
6309 	    {
6310 	      struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
6311 	      omp_notice_variable (ctx, decl2, true);
6312 	      omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
6313 	    }
6314 	  tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
6315 				      OMP_CLAUSE_MAP);
6316 	  OMP_CLAUSE_DECL (nc) = decl;
6317 	  OMP_CLAUSE_SIZE (nc) = size_zero_node;
6318 	  OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6319 	  OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
6320 	  OMP_CLAUSE_CHAIN (clause) = nc;
6321 	}
6322       else
6323 	OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
6324     }
6325   if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
6326     {
6327       tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
6328       OMP_CLAUSE_DECL (nc) = decl;
6329       OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
6330       OMP_CLAUSE_CHAIN (nc) = *list_p;
6331       OMP_CLAUSE_CHAIN (clause) = nc;
6332       struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6333       gimplify_omp_ctxp = ctx->outer_context;
6334       lang_hooks.decls.omp_finish_clause (nc, pre_p);
6335       gimplify_omp_ctxp = ctx;
6336     }
6337   *list_p = clause;
6338   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6339   gimplify_omp_ctxp = ctx->outer_context;
6340   lang_hooks.decls.omp_finish_clause (clause, pre_p);
6341   gimplify_omp_ctxp = ctx;
6342   return 0;
6343 }
6344 
6345 static void
gimplify_adjust_omp_clauses(gimple_seq * pre_p,tree * list_p)6346 gimplify_adjust_omp_clauses (gimple_seq *pre_p, tree *list_p)
6347 {
6348   struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
6349   tree c, decl;
6350 
6351   while ((c = *list_p) != NULL)
6352     {
6353       splay_tree_node n;
6354       bool remove = false;
6355 
6356       switch (OMP_CLAUSE_CODE (c))
6357 	{
6358 	case OMP_CLAUSE_PRIVATE:
6359 	case OMP_CLAUSE_SHARED:
6360 	case OMP_CLAUSE_FIRSTPRIVATE:
6361 	case OMP_CLAUSE_LINEAR:
6362 	  decl = OMP_CLAUSE_DECL (c);
6363 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6364 	  remove = !(n->value & GOVD_SEEN);
6365 	  if (! remove)
6366 	    {
6367 	      bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
6368 	      if ((n->value & GOVD_DEBUG_PRIVATE)
6369 		  || lang_hooks.decls.omp_private_debug_clause (decl, shared))
6370 		{
6371 		  gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
6372 			      || ((n->value & GOVD_DATA_SHARE_CLASS)
6373 				  == GOVD_PRIVATE));
6374 		  OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
6375 		  OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
6376 		}
6377 	      if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6378 		  && ctx->outer_context
6379 		  && !(OMP_CLAUSE_LINEAR_NO_COPYIN (c)
6380 		       && OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
6381 		{
6382 		  if (ctx->outer_context->combined_loop
6383 		      && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
6384 		    {
6385 		      n = splay_tree_lookup (ctx->outer_context->variables,
6386 					     (splay_tree_key) decl);
6387 		      if (n == NULL
6388 			  || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
6389 			{
6390 			  int flags = GOVD_FIRSTPRIVATE;
6391 			  /* #pragma omp distribute does not allow
6392 			     lastprivate clause.  */
6393 			  if (!ctx->outer_context->distribute)
6394 			    flags |= GOVD_LASTPRIVATE;
6395 			  if (n == NULL)
6396 			    omp_add_variable (ctx->outer_context, decl,
6397 					      flags | GOVD_SEEN);
6398 			  else
6399 			    n->value |= flags | GOVD_SEEN;
6400 			}
6401 		    }
6402 		  else if (!is_global_var (decl))
6403 		    omp_notice_variable (ctx->outer_context, decl, true);
6404 		}
6405 	    }
6406 	  break;
6407 
6408 	case OMP_CLAUSE_LASTPRIVATE:
6409 	  /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
6410 	     accurately reflect the presence of a FIRSTPRIVATE clause.  */
6411 	  decl = OMP_CLAUSE_DECL (c);
6412 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6413 	  OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
6414 	    = (n->value & GOVD_FIRSTPRIVATE) != 0;
6415 	  break;
6416 
6417 	case OMP_CLAUSE_ALIGNED:
6418 	  decl = OMP_CLAUSE_DECL (c);
6419 	  if (!is_global_var (decl))
6420 	    {
6421 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6422 	      remove = n == NULL || !(n->value & GOVD_SEEN);
6423 	      if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
6424 		{
6425 		  struct gimplify_omp_ctx *octx;
6426 		  if (n != NULL
6427 		      && (n->value & (GOVD_DATA_SHARE_CLASS
6428 				      & ~GOVD_FIRSTPRIVATE)))
6429 		    remove = true;
6430 		  else
6431 		    for (octx = ctx->outer_context; octx;
6432 			 octx = octx->outer_context)
6433 		      {
6434 			n = splay_tree_lookup (octx->variables,
6435 					       (splay_tree_key) decl);
6436 			if (n == NULL)
6437 			  continue;
6438 			if (n->value & GOVD_LOCAL)
6439 			  break;
6440 			/* We have to avoid assigning a shared variable
6441 			   to itself when trying to add
6442 			   __builtin_assume_aligned.  */
6443 			if (n->value & GOVD_SHARED)
6444 			  {
6445 			    remove = true;
6446 			    break;
6447 			  }
6448 		      }
6449 		}
6450 	    }
6451 	  else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
6452 	    {
6453 	      n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6454 	      if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6455 		remove = true;
6456 	    }
6457 	  break;
6458 
6459 	case OMP_CLAUSE_MAP:
6460 	  decl = OMP_CLAUSE_DECL (c);
6461 	  if (!DECL_P (decl))
6462 	    break;
6463 	  n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
6464 	  if (ctx->region_type == ORT_TARGET && !(n->value & GOVD_SEEN))
6465 	    remove = true;
6466 	  else if (DECL_SIZE (decl)
6467 		   && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
6468 		   && OMP_CLAUSE_MAP_KIND (c) != OMP_CLAUSE_MAP_POINTER)
6469 	    {
6470 	      tree decl2 = DECL_VALUE_EXPR (decl);
6471 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6472 	      decl2 = TREE_OPERAND (decl2, 0);
6473 	      gcc_assert (DECL_P (decl2));
6474 	      tree mem = build_simple_mem_ref (decl2);
6475 	      OMP_CLAUSE_DECL (c) = mem;
6476 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6477 	      if (ctx->outer_context)
6478 		{
6479 		  omp_notice_variable (ctx->outer_context, decl2, true);
6480 		  omp_notice_variable (ctx->outer_context,
6481 				       OMP_CLAUSE_SIZE (c), true);
6482 		}
6483 	      tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
6484 					  OMP_CLAUSE_MAP);
6485 	      OMP_CLAUSE_DECL (nc) = decl;
6486 	      OMP_CLAUSE_SIZE (nc) = size_zero_node;
6487 	      OMP_CLAUSE_MAP_KIND (nc) = OMP_CLAUSE_MAP_POINTER;
6488 	      OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
6489 	      OMP_CLAUSE_CHAIN (c) = nc;
6490 	      c = nc;
6491 	    }
6492 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6493 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6494 	  break;
6495 
6496 	case OMP_CLAUSE_TO:
6497 	case OMP_CLAUSE_FROM:
6498 	  decl = OMP_CLAUSE_DECL (c);
6499 	  if (!DECL_P (decl))
6500 	    break;
6501 	  if (DECL_SIZE (decl)
6502 	      && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6503 	    {
6504 	      tree decl2 = DECL_VALUE_EXPR (decl);
6505 	      gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
6506 	      decl2 = TREE_OPERAND (decl2, 0);
6507 	      gcc_assert (DECL_P (decl2));
6508 	      tree mem = build_simple_mem_ref (decl2);
6509 	      OMP_CLAUSE_DECL (c) = mem;
6510 	      OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
6511 	      if (ctx->outer_context)
6512 		{
6513 		  omp_notice_variable (ctx->outer_context, decl2, true);
6514 		  omp_notice_variable (ctx->outer_context,
6515 				       OMP_CLAUSE_SIZE (c), true);
6516 		}
6517 	    }
6518 	  else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
6519 	    OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
6520 	  break;
6521 
6522 	case OMP_CLAUSE_REDUCTION:
6523 	case OMP_CLAUSE_COPYIN:
6524 	case OMP_CLAUSE_COPYPRIVATE:
6525 	case OMP_CLAUSE_IF:
6526 	case OMP_CLAUSE_NUM_THREADS:
6527 	case OMP_CLAUSE_NUM_TEAMS:
6528 	case OMP_CLAUSE_THREAD_LIMIT:
6529 	case OMP_CLAUSE_DIST_SCHEDULE:
6530 	case OMP_CLAUSE_DEVICE:
6531 	case OMP_CLAUSE_SCHEDULE:
6532 	case OMP_CLAUSE_NOWAIT:
6533 	case OMP_CLAUSE_ORDERED:
6534 	case OMP_CLAUSE_DEFAULT:
6535 	case OMP_CLAUSE_UNTIED:
6536 	case OMP_CLAUSE_COLLAPSE:
6537 	case OMP_CLAUSE_FINAL:
6538 	case OMP_CLAUSE_MERGEABLE:
6539 	case OMP_CLAUSE_PROC_BIND:
6540 	case OMP_CLAUSE_SAFELEN:
6541 	case OMP_CLAUSE_DEPEND:
6542 	  break;
6543 
6544 	default:
6545 	  gcc_unreachable ();
6546 	}
6547 
6548       if (remove)
6549 	*list_p = OMP_CLAUSE_CHAIN (c);
6550       else
6551 	list_p = &OMP_CLAUSE_CHAIN (c);
6552     }
6553 
6554   /* Add in any implicit data sharing.  */
6555   struct gimplify_adjust_omp_clauses_data data;
6556   data.list_p = list_p;
6557   data.pre_p = pre_p;
6558   splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
6559 
6560   gimplify_omp_ctxp = ctx->outer_context;
6561   delete_omp_context (ctx);
6562 }
6563 
6564 /* Gimplify the contents of an OMP_PARALLEL statement.  This involves
6565    gimplification of the body, as well as scanning the body for used
6566    variables.  We need to do this scan now, because variable-sized
6567    decls will be decomposed during gimplification.  */
6568 
6569 static void
gimplify_omp_parallel(tree * expr_p,gimple_seq * pre_p)6570 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
6571 {
6572   tree expr = *expr_p;
6573   gimple g;
6574   gimple_seq body = NULL;
6575 
6576   gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
6577 			     OMP_PARALLEL_COMBINED (expr)
6578 			     ? ORT_COMBINED_PARALLEL
6579 			     : ORT_PARALLEL);
6580 
6581   push_gimplify_context ();
6582 
6583   g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
6584   if (gimple_code (g) == GIMPLE_BIND)
6585     pop_gimplify_context (g);
6586   else
6587     pop_gimplify_context (NULL);
6588 
6589   gimplify_adjust_omp_clauses (pre_p, &OMP_PARALLEL_CLAUSES (expr));
6590 
6591   g = gimple_build_omp_parallel (body,
6592 				 OMP_PARALLEL_CLAUSES (expr),
6593 				 NULL_TREE, NULL_TREE);
6594   if (OMP_PARALLEL_COMBINED (expr))
6595     gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
6596   gimplify_seq_add_stmt (pre_p, g);
6597   *expr_p = NULL_TREE;
6598 }
6599 
6600 /* Gimplify the contents of an OMP_TASK statement.  This involves
6601    gimplification of the body, as well as scanning the body for used
6602    variables.  We need to do this scan now, because variable-sized
6603    decls will be decomposed during gimplification.  */
6604 
6605 static void
gimplify_omp_task(tree * expr_p,gimple_seq * pre_p)6606 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
6607 {
6608   tree expr = *expr_p;
6609   gimple g;
6610   gimple_seq body = NULL;
6611 
6612   gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
6613 			     find_omp_clause (OMP_TASK_CLAUSES (expr),
6614 					      OMP_CLAUSE_UNTIED)
6615 			     ? ORT_UNTIED_TASK : ORT_TASK);
6616 
6617   push_gimplify_context ();
6618 
6619   g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
6620   if (gimple_code (g) == GIMPLE_BIND)
6621     pop_gimplify_context (g);
6622   else
6623     pop_gimplify_context (NULL);
6624 
6625   gimplify_adjust_omp_clauses (pre_p, &OMP_TASK_CLAUSES (expr));
6626 
6627   g = gimple_build_omp_task (body,
6628 			     OMP_TASK_CLAUSES (expr),
6629 			     NULL_TREE, NULL_TREE,
6630 			     NULL_TREE, NULL_TREE, NULL_TREE);
6631   gimplify_seq_add_stmt (pre_p, g);
6632   *expr_p = NULL_TREE;
6633 }
6634 
6635 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
6636    with non-NULL OMP_FOR_INIT.  */
6637 
6638 static tree
find_combined_omp_for(tree * tp,int * walk_subtrees,void *)6639 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
6640 {
6641   *walk_subtrees = 0;
6642   switch (TREE_CODE (*tp))
6643     {
6644     case OMP_FOR:
6645       *walk_subtrees = 1;
6646       /* FALLTHRU */
6647     case OMP_SIMD:
6648       if (OMP_FOR_INIT (*tp) != NULL_TREE)
6649 	return *tp;
6650       break;
6651     case BIND_EXPR:
6652     case STATEMENT_LIST:
6653     case OMP_PARALLEL:
6654       *walk_subtrees = 1;
6655       break;
6656     default:
6657       break;
6658     }
6659   return NULL_TREE;
6660 }
6661 
6662 /* Gimplify the gross structure of an OMP_FOR statement.  */
6663 
6664 static enum gimplify_status
gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)6665 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
6666 {
6667   tree for_stmt, orig_for_stmt, decl, var, t;
6668   enum gimplify_status ret = GS_ALL_DONE;
6669   enum gimplify_status tret;
6670   gimple gfor;
6671   gimple_seq for_body, for_pre_body;
6672   int i;
6673   bool simd;
6674   bitmap has_decl_expr = NULL;
6675 
6676   orig_for_stmt = for_stmt = *expr_p;
6677 
6678   simd = (TREE_CODE (for_stmt) == OMP_SIMD
6679 	  || TREE_CODE (for_stmt) == CILK_SIMD);
6680   gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p,
6681 			     simd ? ORT_SIMD : ORT_WORKSHARE);
6682   if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
6683     gimplify_omp_ctxp->distribute = true;
6684 
6685   /* Handle OMP_FOR_INIT.  */
6686   for_pre_body = NULL;
6687   if (simd && OMP_FOR_PRE_BODY (for_stmt))
6688     {
6689       has_decl_expr = BITMAP_ALLOC (NULL);
6690       if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
6691 	  && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
6692 	     == VAR_DECL)
6693 	{
6694 	  t = OMP_FOR_PRE_BODY (for_stmt);
6695 	  bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6696 	}
6697       else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
6698 	{
6699 	  tree_stmt_iterator si;
6700 	  for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
6701 	       tsi_next (&si))
6702 	    {
6703 	      t = tsi_stmt (si);
6704 	      if (TREE_CODE (t) == DECL_EXPR
6705 		  && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
6706 		bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
6707 	    }
6708 	}
6709     }
6710   gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
6711   OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
6712 
6713   if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
6714     {
6715       for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt), find_combined_omp_for,
6716 			    NULL, NULL);
6717       gcc_assert (for_stmt != NULL_TREE);
6718       gimplify_omp_ctxp->combined_loop = true;
6719     }
6720 
6721   for_body = NULL;
6722   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6723 	      == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
6724   gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6725 	      == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
6726   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6727     {
6728       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6729       gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6730       decl = TREE_OPERAND (t, 0);
6731       gcc_assert (DECL_P (decl));
6732       gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
6733 		  || POINTER_TYPE_P (TREE_TYPE (decl)));
6734 
6735       /* Make sure the iteration variable is private.  */
6736       tree c = NULL_TREE;
6737       tree c2 = NULL_TREE;
6738       if (orig_for_stmt != for_stmt)
6739 	/* Do this only on innermost construct for combined ones.  */;
6740       else if (simd)
6741 	{
6742 	  splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
6743 						 (splay_tree_key)decl);
6744 	  omp_is_private (gimplify_omp_ctxp, decl,
6745 			  1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
6746 			       != 1));
6747 	  if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6748 	    omp_notice_variable (gimplify_omp_ctxp, decl, true);
6749 	  else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6750 	    {
6751 	      c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6752 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
6753 	      if (has_decl_expr
6754 		  && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
6755 		OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
6756 	      OMP_CLAUSE_DECL (c) = decl;
6757 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6758 	      OMP_FOR_CLAUSES (for_stmt) = c;
6759 	      omp_add_variable (gimplify_omp_ctxp, decl,
6760 				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6761 	    }
6762 	  else
6763 	    {
6764 	      bool lastprivate
6765 		= (!has_decl_expr
6766 		   || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
6767 	      if (lastprivate
6768 		  && gimplify_omp_ctxp->outer_context
6769 		  && gimplify_omp_ctxp->outer_context->region_type
6770 		     == ORT_WORKSHARE
6771 		  && gimplify_omp_ctxp->outer_context->combined_loop
6772 		  && !gimplify_omp_ctxp->outer_context->distribute)
6773 		{
6774 		  struct gimplify_omp_ctx *outer
6775 		    = gimplify_omp_ctxp->outer_context;
6776 		  n = splay_tree_lookup (outer->variables,
6777 					 (splay_tree_key) decl);
6778 		  if (n != NULL
6779 		      && (n->value & GOVD_DATA_SHARE_CLASS) == GOVD_LOCAL)
6780 		    lastprivate = false;
6781 		  else if (omp_check_private (outer, decl, false))
6782 		    error ("lastprivate variable %qE is private in outer "
6783 			   "context", DECL_NAME (decl));
6784 		  else
6785 		    {
6786 		      omp_add_variable (outer, decl,
6787 					GOVD_LASTPRIVATE | GOVD_SEEN);
6788 		      if (outer->outer_context)
6789 			omp_notice_variable (outer->outer_context, decl, true);
6790 		    }
6791 		}
6792 	      c = build_omp_clause (input_location,
6793 				    lastprivate ? OMP_CLAUSE_LASTPRIVATE
6794 						: OMP_CLAUSE_PRIVATE);
6795 	      OMP_CLAUSE_DECL (c) = decl;
6796 	      OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
6797 	      OMP_FOR_CLAUSES (for_stmt) = c;
6798 	      omp_add_variable (gimplify_omp_ctxp, decl,
6799 				(lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
6800 				| GOVD_EXPLICIT | GOVD_SEEN);
6801 	      c = NULL_TREE;
6802 	    }
6803 	}
6804       else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
6805 	omp_notice_variable (gimplify_omp_ctxp, decl, true);
6806       else
6807 	omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
6808 
6809       /* If DECL is not a gimple register, create a temporary variable to act
6810 	 as an iteration counter.  This is valid, since DECL cannot be
6811 	 modified in the body of the loop.  Similarly for any iteration vars
6812 	 in simd with collapse > 1 where the iterator vars must be
6813 	 lastprivate.  */
6814       if (orig_for_stmt != for_stmt)
6815 	var = decl;
6816       else if (!is_gimple_reg (decl)
6817 	       || (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
6818 	{
6819 	  var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6820 	  TREE_OPERAND (t, 0) = var;
6821 
6822 	  gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
6823 
6824 	  if (simd && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
6825 	    {
6826 	      c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
6827 	      OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
6828 	      OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
6829 	      OMP_CLAUSE_DECL (c2) = var;
6830 	      OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
6831 	      OMP_FOR_CLAUSES (for_stmt) = c2;
6832 	      omp_add_variable (gimplify_omp_ctxp, var,
6833 				GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
6834 	      if (c == NULL_TREE)
6835 		{
6836 		  c = c2;
6837 		  c2 = NULL_TREE;
6838 		}
6839 	    }
6840 	  else
6841 	    omp_add_variable (gimplify_omp_ctxp, var,
6842 			      GOVD_PRIVATE | GOVD_SEEN);
6843 	}
6844       else
6845 	var = decl;
6846 
6847       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6848 			    is_gimple_val, fb_rvalue);
6849       ret = MIN (ret, tret);
6850       if (ret == GS_ERROR)
6851 	return ret;
6852 
6853       /* Handle OMP_FOR_COND.  */
6854       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
6855       gcc_assert (COMPARISON_CLASS_P (t));
6856       gcc_assert (TREE_OPERAND (t, 0) == decl);
6857 
6858       tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6859 			    is_gimple_val, fb_rvalue);
6860       ret = MIN (ret, tret);
6861 
6862       /* Handle OMP_FOR_INCR.  */
6863       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6864       switch (TREE_CODE (t))
6865 	{
6866 	case PREINCREMENT_EXPR:
6867 	case POSTINCREMENT_EXPR:
6868 	  {
6869 	    tree decl = TREE_OPERAND (t, 0);
6870 	    /* c_omp_for_incr_canonicalize_ptr() should have been
6871 	       called to massage things appropriately.  */
6872 	    gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6873 
6874 	    if (orig_for_stmt != for_stmt)
6875 	      break;
6876 	    t = build_int_cst (TREE_TYPE (decl), 1);
6877 	    if (c)
6878 	      OMP_CLAUSE_LINEAR_STEP (c) = t;
6879 	    t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6880 	    t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6881 	    TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6882 	    break;
6883 	  }
6884 
6885 	case PREDECREMENT_EXPR:
6886 	case POSTDECREMENT_EXPR:
6887 	  /* c_omp_for_incr_canonicalize_ptr() should have been
6888 	     called to massage things appropriately.  */
6889 	  gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
6890 	  if (orig_for_stmt != for_stmt)
6891 	    break;
6892 	  t = build_int_cst (TREE_TYPE (decl), -1);
6893 	  if (c)
6894 	    OMP_CLAUSE_LINEAR_STEP (c) = t;
6895 	  t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
6896 	  t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
6897 	  TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
6898 	  break;
6899 
6900 	case MODIFY_EXPR:
6901 	  gcc_assert (TREE_OPERAND (t, 0) == decl);
6902 	  TREE_OPERAND (t, 0) = var;
6903 
6904 	  t = TREE_OPERAND (t, 1);
6905 	  switch (TREE_CODE (t))
6906 	    {
6907 	    case PLUS_EXPR:
6908 	      if (TREE_OPERAND (t, 1) == decl)
6909 		{
6910 		  TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
6911 		  TREE_OPERAND (t, 0) = var;
6912 		  break;
6913 		}
6914 
6915 	      /* Fallthru.  */
6916 	    case MINUS_EXPR:
6917 	    case POINTER_PLUS_EXPR:
6918 	      gcc_assert (TREE_OPERAND (t, 0) == decl);
6919 	      TREE_OPERAND (t, 0) = var;
6920 	      break;
6921 	    default:
6922 	      gcc_unreachable ();
6923 	    }
6924 
6925 	  tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
6926 				is_gimple_val, fb_rvalue);
6927 	  ret = MIN (ret, tret);
6928 	  if (c)
6929 	    {
6930 	      tree step = TREE_OPERAND (t, 1);
6931 	      tree stept = TREE_TYPE (decl);
6932 	      if (POINTER_TYPE_P (stept))
6933 		stept = sizetype;
6934 	      step = fold_convert (stept, step);
6935 	      if (TREE_CODE (t) == MINUS_EXPR)
6936 		step = fold_build1 (NEGATE_EXPR, stept, step);
6937 	      OMP_CLAUSE_LINEAR_STEP (c) = step;
6938 	      if (step != TREE_OPERAND (t, 1))
6939 		{
6940 		  tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
6941 					&for_pre_body, NULL,
6942 					is_gimple_val, fb_rvalue);
6943 		  ret = MIN (ret, tret);
6944 		}
6945 	    }
6946 	  break;
6947 
6948 	default:
6949 	  gcc_unreachable ();
6950 	}
6951 
6952       if (c2)
6953 	{
6954 	  gcc_assert (c);
6955 	  OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
6956 	}
6957 
6958       if ((var != decl || TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1)
6959 	  && orig_for_stmt == for_stmt)
6960 	{
6961 	  for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
6962 	    if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
6963 		  && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
6964 		 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
6965 		     && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
6966 		     && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
6967 		&& OMP_CLAUSE_DECL (c) == decl)
6968 	      {
6969 		t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
6970 		gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
6971 		gcc_assert (TREE_OPERAND (t, 0) == var);
6972 		t = TREE_OPERAND (t, 1);
6973 		gcc_assert (TREE_CODE (t) == PLUS_EXPR
6974 			    || TREE_CODE (t) == MINUS_EXPR
6975 			    || TREE_CODE (t) == POINTER_PLUS_EXPR);
6976 		gcc_assert (TREE_OPERAND (t, 0) == var);
6977 		t = build2 (TREE_CODE (t), TREE_TYPE (decl), decl,
6978 			    TREE_OPERAND (t, 1));
6979 		gimple_seq *seq;
6980 		if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
6981 		  seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
6982 		else
6983 		  seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
6984 		gimplify_assign (decl, t, seq);
6985 	    }
6986 	}
6987     }
6988 
6989   BITMAP_FREE (has_decl_expr);
6990 
6991   gimplify_and_add (OMP_FOR_BODY (orig_for_stmt), &for_body);
6992 
6993   if (orig_for_stmt != for_stmt)
6994     for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
6995       {
6996 	t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
6997 	decl = TREE_OPERAND (t, 0);
6998 	var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
6999 	omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
7000 	TREE_OPERAND (t, 0) = var;
7001 	t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7002 	TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
7003 	TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
7004       }
7005 
7006   gimplify_adjust_omp_clauses (pre_p, &OMP_FOR_CLAUSES (orig_for_stmt));
7007 
7008   int kind;
7009   switch (TREE_CODE (orig_for_stmt))
7010     {
7011     case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
7012     case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
7013     case CILK_SIMD: kind = GF_OMP_FOR_KIND_CILKSIMD; break;
7014     case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
7015     default:
7016       gcc_unreachable ();
7017     }
7018   gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
7019 			       TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
7020 			       for_pre_body);
7021   if (orig_for_stmt != for_stmt)
7022     gimple_omp_for_set_combined_p (gfor, true);
7023   if (gimplify_omp_ctxp
7024       && (gimplify_omp_ctxp->combined_loop
7025 	  || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
7026 	      && gimplify_omp_ctxp->outer_context
7027 	      && gimplify_omp_ctxp->outer_context->combined_loop)))
7028     {
7029       gimple_omp_for_set_combined_into_p (gfor, true);
7030       if (gimplify_omp_ctxp->combined_loop)
7031 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
7032       else
7033 	gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
7034     }
7035 
7036   for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
7037     {
7038       t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
7039       gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
7040       gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
7041       t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
7042       gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
7043       gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
7044       t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
7045       gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
7046     }
7047 
7048   gimplify_seq_add_stmt (pre_p, gfor);
7049   if (ret != GS_ALL_DONE)
7050     return GS_ERROR;
7051   *expr_p = NULL_TREE;
7052   return GS_ALL_DONE;
7053 }
7054 
7055 /* Gimplify the gross structure of other OpenMP constructs.
7056    In particular, OMP_SECTIONS, OMP_SINGLE, OMP_TARGET, OMP_TARGET_DATA
7057    and OMP_TEAMS.  */
7058 
7059 static void
gimplify_omp_workshare(tree * expr_p,gimple_seq * pre_p)7060 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
7061 {
7062   tree expr = *expr_p;
7063   gimple stmt;
7064   gimple_seq body = NULL;
7065   enum omp_region_type ort = ORT_WORKSHARE;
7066 
7067   switch (TREE_CODE (expr))
7068     {
7069     case OMP_SECTIONS:
7070     case OMP_SINGLE:
7071       break;
7072     case OMP_TARGET:
7073       ort = ORT_TARGET;
7074       break;
7075     case OMP_TARGET_DATA:
7076       ort = ORT_TARGET_DATA;
7077       break;
7078     case OMP_TEAMS:
7079       ort = ORT_TEAMS;
7080       break;
7081     default:
7082       gcc_unreachable ();
7083     }
7084   gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort);
7085   if (ort == ORT_TARGET || ort == ORT_TARGET_DATA)
7086     {
7087       push_gimplify_context ();
7088       gimple g = gimplify_and_return_first (OMP_BODY (expr), &body);
7089       if (gimple_code (g) == GIMPLE_BIND)
7090 	pop_gimplify_context (g);
7091       else
7092 	pop_gimplify_context (NULL);
7093       if (ort == ORT_TARGET_DATA)
7094 	{
7095 	  gimple_seq cleanup = NULL;
7096 	  tree fn = builtin_decl_explicit (BUILT_IN_GOMP_TARGET_END_DATA);
7097 	  g = gimple_build_call (fn, 0);
7098 	  gimple_seq_add_stmt (&cleanup, g);
7099 	  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
7100 	  body = NULL;
7101 	  gimple_seq_add_stmt (&body, g);
7102 	}
7103     }
7104   else
7105     gimplify_and_add (OMP_BODY (expr), &body);
7106   gimplify_adjust_omp_clauses (pre_p, &OMP_CLAUSES (expr));
7107 
7108   switch (TREE_CODE (expr))
7109     {
7110     case OMP_SECTIONS:
7111       stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
7112       break;
7113     case OMP_SINGLE:
7114       stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
7115       break;
7116     case OMP_TARGET:
7117       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
7118 				      OMP_CLAUSES (expr));
7119       break;
7120     case OMP_TARGET_DATA:
7121       stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
7122 				      OMP_CLAUSES (expr));
7123       break;
7124     case OMP_TEAMS:
7125       stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
7126       break;
7127     default:
7128       gcc_unreachable ();
7129     }
7130 
7131   gimplify_seq_add_stmt (pre_p, stmt);
7132   *expr_p = NULL_TREE;
7133 }
7134 
7135 /* Gimplify the gross structure of OpenMP target update construct.  */
7136 
7137 static void
gimplify_omp_target_update(tree * expr_p,gimple_seq * pre_p)7138 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
7139 {
7140   tree expr = *expr_p;
7141   gimple stmt;
7142 
7143   gimplify_scan_omp_clauses (&OMP_TARGET_UPDATE_CLAUSES (expr), pre_p,
7144 			     ORT_WORKSHARE);
7145   gimplify_adjust_omp_clauses (pre_p, &OMP_TARGET_UPDATE_CLAUSES (expr));
7146   stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_UPDATE,
7147 				  OMP_TARGET_UPDATE_CLAUSES (expr));
7148 
7149   gimplify_seq_add_stmt (pre_p, stmt);
7150   *expr_p = NULL_TREE;
7151 }
7152 
7153 /* A subroutine of gimplify_omp_atomic.  The front end is supposed to have
7154    stabilized the lhs of the atomic operation as *ADDR.  Return true if
7155    EXPR is this stabilized form.  */
7156 
7157 static bool
goa_lhs_expr_p(tree expr,tree addr)7158 goa_lhs_expr_p (tree expr, tree addr)
7159 {
7160   /* Also include casts to other type variants.  The C front end is fond
7161      of adding these for e.g. volatile variables.  This is like
7162      STRIP_TYPE_NOPS but includes the main variant lookup.  */
7163   STRIP_USELESS_TYPE_CONVERSION (expr);
7164 
7165   if (TREE_CODE (expr) == INDIRECT_REF)
7166     {
7167       expr = TREE_OPERAND (expr, 0);
7168       while (expr != addr
7169 	     && (CONVERT_EXPR_P (expr)
7170 		 || TREE_CODE (expr) == NON_LVALUE_EXPR)
7171 	     && TREE_CODE (expr) == TREE_CODE (addr)
7172 	     && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
7173 	{
7174 	  expr = TREE_OPERAND (expr, 0);
7175 	  addr = TREE_OPERAND (addr, 0);
7176 	}
7177       if (expr == addr)
7178 	return true;
7179       return (TREE_CODE (addr) == ADDR_EXPR
7180 	      && TREE_CODE (expr) == ADDR_EXPR
7181 	      && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
7182     }
7183   if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
7184     return true;
7185   return false;
7186 }
7187 
7188 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR.  If an
7189    expression does not involve the lhs, evaluate it into a temporary.
7190    Return 1 if the lhs appeared as a subexpression, 0 if it did not,
7191    or -1 if an error was encountered.  */
7192 
7193 static int
goa_stabilize_expr(tree * expr_p,gimple_seq * pre_p,tree lhs_addr,tree lhs_var)7194 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
7195 		    tree lhs_var)
7196 {
7197   tree expr = *expr_p;
7198   int saw_lhs;
7199 
7200   if (goa_lhs_expr_p (expr, lhs_addr))
7201     {
7202       *expr_p = lhs_var;
7203       return 1;
7204     }
7205   if (is_gimple_val (expr))
7206     return 0;
7207 
7208   saw_lhs = 0;
7209   switch (TREE_CODE_CLASS (TREE_CODE (expr)))
7210     {
7211     case tcc_binary:
7212     case tcc_comparison:
7213       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
7214 				     lhs_var);
7215     case tcc_unary:
7216       saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
7217 				     lhs_var);
7218       break;
7219     case tcc_expression:
7220       switch (TREE_CODE (expr))
7221 	{
7222 	case TRUTH_ANDIF_EXPR:
7223 	case TRUTH_ORIF_EXPR:
7224 	case TRUTH_AND_EXPR:
7225 	case TRUTH_OR_EXPR:
7226 	case TRUTH_XOR_EXPR:
7227 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
7228 					 lhs_addr, lhs_var);
7229 	case TRUTH_NOT_EXPR:
7230 	  saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
7231 					 lhs_addr, lhs_var);
7232 	  break;
7233 	case COMPOUND_EXPR:
7234 	  /* Break out any preevaluations from cp_build_modify_expr.  */
7235 	  for (; TREE_CODE (expr) == COMPOUND_EXPR;
7236 	       expr = TREE_OPERAND (expr, 1))
7237 	    gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
7238 	  *expr_p = expr;
7239 	  return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
7240 	default:
7241 	  break;
7242 	}
7243       break;
7244     default:
7245       break;
7246     }
7247 
7248   if (saw_lhs == 0)
7249     {
7250       enum gimplify_status gs;
7251       gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
7252       if (gs != GS_ALL_DONE)
7253 	saw_lhs = -1;
7254     }
7255 
7256   return saw_lhs;
7257 }
7258 
7259 /* Gimplify an OMP_ATOMIC statement.  */
7260 
7261 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,gimple_seq * pre_p)7262 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
7263 {
7264   tree addr = TREE_OPERAND (*expr_p, 0);
7265   tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
7266 	     ? NULL : TREE_OPERAND (*expr_p, 1);
7267   tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
7268   tree tmp_load;
7269   gimple loadstmt, storestmt;
7270 
7271   tmp_load = create_tmp_reg (type, NULL);
7272   if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
7273     return GS_ERROR;
7274 
7275   if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
7276       != GS_ALL_DONE)
7277     return GS_ERROR;
7278 
7279   loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
7280   gimplify_seq_add_stmt (pre_p, loadstmt);
7281   if (rhs && gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
7282       != GS_ALL_DONE)
7283     return GS_ERROR;
7284 
7285   if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
7286     rhs = tmp_load;
7287   storestmt = gimple_build_omp_atomic_store (rhs);
7288   gimplify_seq_add_stmt (pre_p, storestmt);
7289   if (OMP_ATOMIC_SEQ_CST (*expr_p))
7290     {
7291       gimple_omp_atomic_set_seq_cst (loadstmt);
7292       gimple_omp_atomic_set_seq_cst (storestmt);
7293     }
7294   switch (TREE_CODE (*expr_p))
7295     {
7296     case OMP_ATOMIC_READ:
7297     case OMP_ATOMIC_CAPTURE_OLD:
7298       *expr_p = tmp_load;
7299       gimple_omp_atomic_set_need_value (loadstmt);
7300       break;
7301     case OMP_ATOMIC_CAPTURE_NEW:
7302       *expr_p = rhs;
7303       gimple_omp_atomic_set_need_value (storestmt);
7304       break;
7305     default:
7306       *expr_p = NULL;
7307       break;
7308     }
7309 
7310   return GS_ALL_DONE;
7311 }
7312 
7313 /* Gimplify a TRANSACTION_EXPR.  This involves gimplification of the
7314    body, and adding some EH bits.  */
7315 
7316 static enum gimplify_status
gimplify_transaction(tree * expr_p,gimple_seq * pre_p)7317 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
7318 {
7319   tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
7320   gimple g;
7321   gimple_seq body = NULL;
7322   int subcode = 0;
7323 
7324   /* Wrap the transaction body in a BIND_EXPR so we have a context
7325      where to put decls for OpenMP.  */
7326   if (TREE_CODE (tbody) != BIND_EXPR)
7327     {
7328       tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
7329       TREE_SIDE_EFFECTS (bind) = 1;
7330       SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
7331       TRANSACTION_EXPR_BODY (expr) = bind;
7332     }
7333 
7334   push_gimplify_context ();
7335   temp = voidify_wrapper_expr (*expr_p, NULL);
7336 
7337   g = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
7338   pop_gimplify_context (g);
7339 
7340   g = gimple_build_transaction (body, NULL);
7341   if (TRANSACTION_EXPR_OUTER (expr))
7342     subcode = GTMA_IS_OUTER;
7343   else if (TRANSACTION_EXPR_RELAXED (expr))
7344     subcode = GTMA_IS_RELAXED;
7345   gimple_transaction_set_subcode (g, subcode);
7346 
7347   gimplify_seq_add_stmt (pre_p, g);
7348 
7349   if (temp)
7350     {
7351       *expr_p = temp;
7352       return GS_OK;
7353     }
7354 
7355   *expr_p = NULL_TREE;
7356   return GS_ALL_DONE;
7357 }
7358 
7359 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE.  If the
7360    expression produces a value to be used as an operand inside a GIMPLE
7361    statement, the value will be stored back in *EXPR_P.  This value will
7362    be a tree of class tcc_declaration, tcc_constant, tcc_reference or
7363    an SSA_NAME.  The corresponding sequence of GIMPLE statements is
7364    emitted in PRE_P and POST_P.
7365 
7366    Additionally, this process may overwrite parts of the input
7367    expression during gimplification.  Ideally, it should be
7368    possible to do non-destructive gimplification.
7369 
7370    EXPR_P points to the GENERIC expression to convert to GIMPLE.  If
7371       the expression needs to evaluate to a value to be used as
7372       an operand in a GIMPLE statement, this value will be stored in
7373       *EXPR_P on exit.  This happens when the caller specifies one
7374       of fb_lvalue or fb_rvalue fallback flags.
7375 
7376    PRE_P will contain the sequence of GIMPLE statements corresponding
7377        to the evaluation of EXPR and all the side-effects that must
7378        be executed before the main expression.  On exit, the last
7379        statement of PRE_P is the core statement being gimplified.  For
7380        instance, when gimplifying 'if (++a)' the last statement in
7381        PRE_P will be 'if (t.1)' where t.1 is the result of
7382        pre-incrementing 'a'.
7383 
7384    POST_P will contain the sequence of GIMPLE statements corresponding
7385        to the evaluation of all the side-effects that must be executed
7386        after the main expression.  If this is NULL, the post
7387        side-effects are stored at the end of PRE_P.
7388 
7389        The reason why the output is split in two is to handle post
7390        side-effects explicitly.  In some cases, an expression may have
7391        inner and outer post side-effects which need to be emitted in
7392        an order different from the one given by the recursive
7393        traversal.  For instance, for the expression (*p--)++ the post
7394        side-effects of '--' must actually occur *after* the post
7395        side-effects of '++'.  However, gimplification will first visit
7396        the inner expression, so if a separate POST sequence was not
7397        used, the resulting sequence would be:
7398 
7399        	    1	t.1 = *p
7400        	    2	p = p - 1
7401        	    3	t.2 = t.1 + 1
7402        	    4	*p = t.2
7403 
7404        However, the post-decrement operation in line #2 must not be
7405        evaluated until after the store to *p at line #4, so the
7406        correct sequence should be:
7407 
7408        	    1	t.1 = *p
7409        	    2	t.2 = t.1 + 1
7410        	    3	*p = t.2
7411        	    4	p = p - 1
7412 
7413        So, by specifying a separate post queue, it is possible
7414        to emit the post side-effects in the correct order.
7415        If POST_P is NULL, an internal queue will be used.  Before
7416        returning to the caller, the sequence POST_P is appended to
7417        the main output sequence PRE_P.
7418 
7419    GIMPLE_TEST_F points to a function that takes a tree T and
7420        returns nonzero if T is in the GIMPLE form requested by the
7421        caller.  The GIMPLE predicates are in gimple.c.
7422 
7423    FALLBACK tells the function what sort of a temporary we want if
7424        gimplification cannot produce an expression that complies with
7425        GIMPLE_TEST_F.
7426 
7427        fb_none means that no temporary should be generated
7428        fb_rvalue means that an rvalue is OK to generate
7429        fb_lvalue means that an lvalue is OK to generate
7430        fb_either means that either is OK, but an lvalue is preferable.
7431        fb_mayfail means that gimplification may fail (in which case
7432        GS_ERROR will be returned)
7433 
7434    The return value is either GS_ERROR or GS_ALL_DONE, since this
7435    function iterates until EXPR is completely gimplified or an error
7436    occurs.  */
7437 
7438 enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)7439 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
7440 	       bool (*gimple_test_f) (tree), fallback_t fallback)
7441 {
7442   tree tmp;
7443   gimple_seq internal_pre = NULL;
7444   gimple_seq internal_post = NULL;
7445   tree save_expr;
7446   bool is_statement;
7447   location_t saved_location;
7448   enum gimplify_status ret;
7449   gimple_stmt_iterator pre_last_gsi, post_last_gsi;
7450 
7451   save_expr = *expr_p;
7452   if (save_expr == NULL_TREE)
7453     return GS_ALL_DONE;
7454 
7455   /* If we are gimplifying a top-level statement, PRE_P must be valid.  */
7456   is_statement = gimple_test_f == is_gimple_stmt;
7457   if (is_statement)
7458     gcc_assert (pre_p);
7459 
7460   /* Consistency checks.  */
7461   if (gimple_test_f == is_gimple_reg)
7462     gcc_assert (fallback & (fb_rvalue | fb_lvalue));
7463   else if (gimple_test_f == is_gimple_val
7464            || gimple_test_f == is_gimple_call_addr
7465            || gimple_test_f == is_gimple_condexpr
7466            || gimple_test_f == is_gimple_mem_rhs
7467            || gimple_test_f == is_gimple_mem_rhs_or_call
7468            || gimple_test_f == is_gimple_reg_rhs
7469            || gimple_test_f == is_gimple_reg_rhs_or_call
7470            || gimple_test_f == is_gimple_asm_val
7471 	   || gimple_test_f == is_gimple_mem_ref_addr)
7472     gcc_assert (fallback & fb_rvalue);
7473   else if (gimple_test_f == is_gimple_min_lval
7474 	   || gimple_test_f == is_gimple_lvalue)
7475     gcc_assert (fallback & fb_lvalue);
7476   else if (gimple_test_f == is_gimple_addressable)
7477     gcc_assert (fallback & fb_either);
7478   else if (gimple_test_f == is_gimple_stmt)
7479     gcc_assert (fallback == fb_none);
7480   else
7481     {
7482       /* We should have recognized the GIMPLE_TEST_F predicate to
7483 	 know what kind of fallback to use in case a temporary is
7484 	 needed to hold the value or address of *EXPR_P.  */
7485       gcc_unreachable ();
7486     }
7487 
7488   /* We used to check the predicate here and return immediately if it
7489      succeeds.  This is wrong; the design is for gimplification to be
7490      idempotent, and for the predicates to only test for valid forms, not
7491      whether they are fully simplified.  */
7492   if (pre_p == NULL)
7493     pre_p = &internal_pre;
7494 
7495   if (post_p == NULL)
7496     post_p = &internal_post;
7497 
7498   /* Remember the last statements added to PRE_P and POST_P.  Every
7499      new statement added by the gimplification helpers needs to be
7500      annotated with location information.  To centralize the
7501      responsibility, we remember the last statement that had been
7502      added to both queues before gimplifying *EXPR_P.  If
7503      gimplification produces new statements in PRE_P and POST_P, those
7504      statements will be annotated with the same location information
7505      as *EXPR_P.  */
7506   pre_last_gsi = gsi_last (*pre_p);
7507   post_last_gsi = gsi_last (*post_p);
7508 
7509   saved_location = input_location;
7510   if (save_expr != error_mark_node
7511       && EXPR_HAS_LOCATION (*expr_p))
7512     input_location = EXPR_LOCATION (*expr_p);
7513 
7514   /* Loop over the specific gimplifiers until the toplevel node
7515      remains the same.  */
7516   do
7517     {
7518       /* Strip away as many useless type conversions as possible
7519 	 at the toplevel.  */
7520       STRIP_USELESS_TYPE_CONVERSION (*expr_p);
7521 
7522       /* Remember the expr.  */
7523       save_expr = *expr_p;
7524 
7525       /* Die, die, die, my darling.  */
7526       if (save_expr == error_mark_node
7527 	  || (TREE_TYPE (save_expr)
7528 	      && TREE_TYPE (save_expr) == error_mark_node))
7529 	{
7530 	  ret = GS_ERROR;
7531 	  break;
7532 	}
7533 
7534       /* Do any language-specific gimplification.  */
7535       ret = ((enum gimplify_status)
7536 	     lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
7537       if (ret == GS_OK)
7538 	{
7539 	  if (*expr_p == NULL_TREE)
7540 	    break;
7541 	  if (*expr_p != save_expr)
7542 	    continue;
7543 	}
7544       else if (ret != GS_UNHANDLED)
7545 	break;
7546 
7547       /* Make sure that all the cases set 'ret' appropriately.  */
7548       ret = GS_UNHANDLED;
7549       switch (TREE_CODE (*expr_p))
7550 	{
7551 	  /* First deal with the special cases.  */
7552 
7553 	case POSTINCREMENT_EXPR:
7554 	case POSTDECREMENT_EXPR:
7555 	case PREINCREMENT_EXPR:
7556 	case PREDECREMENT_EXPR:
7557 	  ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
7558 					fallback != fb_none,
7559 					TREE_TYPE (*expr_p));
7560 	  break;
7561 
7562 	case VIEW_CONVERT_EXPR:
7563 	  if (is_gimple_reg_type (TREE_TYPE (*expr_p))
7564 	      && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
7565 	    {
7566 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7567 				   post_p, is_gimple_val, fb_rvalue);
7568 	      recalculate_side_effects (*expr_p);
7569 	      break;
7570 	    }
7571 	  /* Fallthru.  */
7572 
7573 	case ARRAY_REF:
7574 	case ARRAY_RANGE_REF:
7575 	case REALPART_EXPR:
7576 	case IMAGPART_EXPR:
7577 	case COMPONENT_REF:
7578 	  ret = gimplify_compound_lval (expr_p, pre_p, post_p,
7579 					fallback ? fallback : fb_rvalue);
7580 	  break;
7581 
7582 	case COND_EXPR:
7583 	  ret = gimplify_cond_expr (expr_p, pre_p, fallback);
7584 
7585 	  /* C99 code may assign to an array in a structure value of a
7586 	     conditional expression, and this has undefined behavior
7587 	     only on execution, so create a temporary if an lvalue is
7588 	     required.  */
7589 	  if (fallback == fb_lvalue)
7590 	    {
7591 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7592 	      mark_addressable (*expr_p);
7593 	      ret = GS_OK;
7594 	    }
7595 	  break;
7596 
7597 	case CALL_EXPR:
7598 	  ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
7599 
7600 	  /* C99 code may assign to an array in a structure returned
7601 	     from a function, and this has undefined behavior only on
7602 	     execution, so create a temporary if an lvalue is
7603 	     required.  */
7604 	  if (fallback == fb_lvalue)
7605 	    {
7606 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7607 	      mark_addressable (*expr_p);
7608 	      ret = GS_OK;
7609 	    }
7610 	  break;
7611 
7612 	case TREE_LIST:
7613 	  gcc_unreachable ();
7614 
7615 	case COMPOUND_EXPR:
7616 	  ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
7617 	  break;
7618 
7619 	case COMPOUND_LITERAL_EXPR:
7620 	  ret = gimplify_compound_literal_expr (expr_p, pre_p,
7621 						gimple_test_f, fallback);
7622 	  break;
7623 
7624 	case MODIFY_EXPR:
7625 	case INIT_EXPR:
7626 	  ret = gimplify_modify_expr (expr_p, pre_p, post_p,
7627 				      fallback != fb_none);
7628 	  break;
7629 
7630 	case TRUTH_ANDIF_EXPR:
7631 	case TRUTH_ORIF_EXPR:
7632 	  {
7633 	    /* Preserve the original type of the expression and the
7634 	       source location of the outer expression.  */
7635 	    tree org_type = TREE_TYPE (*expr_p);
7636 	    *expr_p = gimple_boolify (*expr_p);
7637 	    *expr_p = build3_loc (input_location, COND_EXPR,
7638 				  org_type, *expr_p,
7639 				  fold_convert_loc
7640 				    (input_location,
7641 				     org_type, boolean_true_node),
7642 				  fold_convert_loc
7643 				    (input_location,
7644 				     org_type, boolean_false_node));
7645 	    ret = GS_OK;
7646 	    break;
7647 	  }
7648 
7649 	case TRUTH_NOT_EXPR:
7650 	  {
7651 	    tree type = TREE_TYPE (*expr_p);
7652 	    /* The parsers are careful to generate TRUTH_NOT_EXPR
7653 	       only with operands that are always zero or one.
7654 	       We do not fold here but handle the only interesting case
7655 	       manually, as fold may re-introduce the TRUTH_NOT_EXPR.  */
7656 	    *expr_p = gimple_boolify (*expr_p);
7657 	    if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
7658 	      *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
7659 				    TREE_TYPE (*expr_p),
7660 				    TREE_OPERAND (*expr_p, 0));
7661 	    else
7662 	      *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
7663 				    TREE_TYPE (*expr_p),
7664 				    TREE_OPERAND (*expr_p, 0),
7665 				    build_int_cst (TREE_TYPE (*expr_p), 1));
7666 	    if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
7667 	      *expr_p = fold_convert_loc (input_location, type, *expr_p);
7668 	    ret = GS_OK;
7669 	    break;
7670 	  }
7671 
7672 	case ADDR_EXPR:
7673 	  ret = gimplify_addr_expr (expr_p, pre_p, post_p);
7674 	  break;
7675 
7676 	case ANNOTATE_EXPR:
7677 	  {
7678 	    tree cond = TREE_OPERAND (*expr_p, 0);
7679 	    tree id = TREE_OPERAND (*expr_p, 1);
7680 	    tree type = TREE_TYPE (cond);
7681 	    if (!INTEGRAL_TYPE_P (type))
7682 	      {
7683 		*expr_p = cond;
7684 		ret = GS_OK;
7685 		break;
7686 	      }
7687 	    tree tmp = create_tmp_var (type, NULL);
7688 	    gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
7689 	    gimple call = gimple_build_call_internal (IFN_ANNOTATE, 2,
7690 						      cond, id);
7691 	    gimple_call_set_lhs (call, tmp);
7692 	    gimplify_seq_add_stmt (pre_p, call);
7693 	    *expr_p = tmp;
7694 	    ret = GS_ALL_DONE;
7695 	    break;
7696 	  }
7697 
7698 	case VA_ARG_EXPR:
7699 	  ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
7700 	  break;
7701 
7702 	CASE_CONVERT:
7703 	  if (IS_EMPTY_STMT (*expr_p))
7704 	    {
7705 	      ret = GS_ALL_DONE;
7706 	      break;
7707 	    }
7708 
7709 	  if (VOID_TYPE_P (TREE_TYPE (*expr_p))
7710 	      || fallback == fb_none)
7711 	    {
7712 	      /* Just strip a conversion to void (or in void context) and
7713 		 try again.  */
7714 	      *expr_p = TREE_OPERAND (*expr_p, 0);
7715 	      ret = GS_OK;
7716 	      break;
7717 	    }
7718 
7719 	  ret = gimplify_conversion (expr_p);
7720 	  if (ret == GS_ERROR)
7721 	    break;
7722 	  if (*expr_p != save_expr)
7723 	    break;
7724 	  /* FALLTHRU */
7725 
7726 	case FIX_TRUNC_EXPR:
7727 	  /* unary_expr: ... | '(' cast ')' val | ...  */
7728 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7729 			       is_gimple_val, fb_rvalue);
7730 	  recalculate_side_effects (*expr_p);
7731 	  break;
7732 
7733 	case INDIRECT_REF:
7734 	  {
7735 	    bool volatilep = TREE_THIS_VOLATILE (*expr_p);
7736 	    bool notrap = TREE_THIS_NOTRAP (*expr_p);
7737 	    tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
7738 
7739 	    *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
7740 	    if (*expr_p != save_expr)
7741 	      {
7742 		ret = GS_OK;
7743 		break;
7744 	      }
7745 
7746 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7747 				 is_gimple_reg, fb_rvalue);
7748 	    if (ret == GS_ERROR)
7749 	      break;
7750 
7751 	    recalculate_side_effects (*expr_p);
7752 	    *expr_p = fold_build2_loc (input_location, MEM_REF,
7753 				       TREE_TYPE (*expr_p),
7754 				       TREE_OPERAND (*expr_p, 0),
7755 				       build_int_cst (saved_ptr_type, 0));
7756 	    TREE_THIS_VOLATILE (*expr_p) = volatilep;
7757 	    TREE_THIS_NOTRAP (*expr_p) = notrap;
7758 	    ret = GS_OK;
7759 	    break;
7760 	  }
7761 
7762 	/* We arrive here through the various re-gimplifcation paths.  */
7763 	case MEM_REF:
7764 	  /* First try re-folding the whole thing.  */
7765 	  tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
7766 			     TREE_OPERAND (*expr_p, 0),
7767 			     TREE_OPERAND (*expr_p, 1));
7768 	  if (tmp)
7769 	    {
7770 	      *expr_p = tmp;
7771 	      recalculate_side_effects (*expr_p);
7772 	      ret = GS_OK;
7773 	      break;
7774 	    }
7775 	  /* Avoid re-gimplifying the address operand if it is already
7776 	     in suitable form.  Re-gimplifying would mark the address
7777 	     operand addressable.  Always gimplify when not in SSA form
7778 	     as we still may have to gimplify decls with value-exprs.  */
7779 	  if (!gimplify_ctxp || !gimplify_ctxp->into_ssa
7780 	      || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
7781 	    {
7782 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
7783 				   is_gimple_mem_ref_addr, fb_rvalue);
7784 	      if (ret == GS_ERROR)
7785 		break;
7786 	    }
7787 	  recalculate_side_effects (*expr_p);
7788 	  ret = GS_ALL_DONE;
7789 	  break;
7790 
7791 	/* Constants need not be gimplified.  */
7792 	case INTEGER_CST:
7793 	case REAL_CST:
7794 	case FIXED_CST:
7795 	case STRING_CST:
7796 	case COMPLEX_CST:
7797 	case VECTOR_CST:
7798 	  /* Drop the overflow flag on constants, we do not want
7799 	     that in the GIMPLE IL.  */
7800 	  if (TREE_OVERFLOW_P (*expr_p))
7801 	    *expr_p = drop_tree_overflow (*expr_p);
7802 	  ret = GS_ALL_DONE;
7803 	  break;
7804 
7805 	case CONST_DECL:
7806 	  /* If we require an lvalue, such as for ADDR_EXPR, retain the
7807 	     CONST_DECL node.  Otherwise the decl is replaceable by its
7808 	     value.  */
7809 	  /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either.  */
7810 	  if (fallback & fb_lvalue)
7811 	    ret = GS_ALL_DONE;
7812 	  else
7813 	    {
7814 	      *expr_p = DECL_INITIAL (*expr_p);
7815 	      ret = GS_OK;
7816 	    }
7817 	  break;
7818 
7819 	case DECL_EXPR:
7820 	  ret = gimplify_decl_expr (expr_p, pre_p);
7821 	  break;
7822 
7823 	case BIND_EXPR:
7824 	  ret = gimplify_bind_expr (expr_p, pre_p);
7825 	  break;
7826 
7827 	case LOOP_EXPR:
7828 	  ret = gimplify_loop_expr (expr_p, pre_p);
7829 	  break;
7830 
7831 	case SWITCH_EXPR:
7832 	  ret = gimplify_switch_expr (expr_p, pre_p);
7833 	  break;
7834 
7835 	case EXIT_EXPR:
7836 	  ret = gimplify_exit_expr (expr_p);
7837 	  break;
7838 
7839 	case GOTO_EXPR:
7840 	  /* If the target is not LABEL, then it is a computed jump
7841 	     and the target needs to be gimplified.  */
7842 	  if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
7843 	    {
7844 	      ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
7845 				   NULL, is_gimple_val, fb_rvalue);
7846 	      if (ret == GS_ERROR)
7847 		break;
7848 	    }
7849 	  gimplify_seq_add_stmt (pre_p,
7850 			  gimple_build_goto (GOTO_DESTINATION (*expr_p)));
7851 	  ret = GS_ALL_DONE;
7852 	  break;
7853 
7854 	case PREDICT_EXPR:
7855 	  gimplify_seq_add_stmt (pre_p,
7856 			gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
7857 					      PREDICT_EXPR_OUTCOME (*expr_p)));
7858 	  ret = GS_ALL_DONE;
7859 	  break;
7860 
7861 	case LABEL_EXPR:
7862 	  ret = GS_ALL_DONE;
7863 	  gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
7864 		      == current_function_decl);
7865 	  gimplify_seq_add_stmt (pre_p,
7866 			  gimple_build_label (LABEL_EXPR_LABEL (*expr_p)));
7867 	  break;
7868 
7869 	case CASE_LABEL_EXPR:
7870 	  ret = gimplify_case_label_expr (expr_p, pre_p);
7871 	  break;
7872 
7873 	case RETURN_EXPR:
7874 	  ret = gimplify_return_expr (*expr_p, pre_p);
7875 	  break;
7876 
7877 	case CONSTRUCTOR:
7878 	  /* Don't reduce this in place; let gimplify_init_constructor work its
7879 	     magic.  Buf if we're just elaborating this for side effects, just
7880 	     gimplify any element that has side-effects.  */
7881 	  if (fallback == fb_none)
7882 	    {
7883 	      unsigned HOST_WIDE_INT ix;
7884 	      tree val;
7885 	      tree temp = NULL_TREE;
7886 	      FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
7887 		if (TREE_SIDE_EFFECTS (val))
7888 		  append_to_statement_list (val, &temp);
7889 
7890 	      *expr_p = temp;
7891 	      ret = temp ? GS_OK : GS_ALL_DONE;
7892 	    }
7893 	  /* C99 code may assign to an array in a constructed
7894 	     structure or union, and this has undefined behavior only
7895 	     on execution, so create a temporary if an lvalue is
7896 	     required.  */
7897 	  else if (fallback == fb_lvalue)
7898 	    {
7899 	      *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
7900 	      mark_addressable (*expr_p);
7901 	      ret = GS_OK;
7902 	    }
7903 	  else
7904 	    ret = GS_ALL_DONE;
7905 	  break;
7906 
7907 	  /* The following are special cases that are not handled by the
7908 	     original GIMPLE grammar.  */
7909 
7910 	  /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
7911 	     eliminated.  */
7912 	case SAVE_EXPR:
7913 	  ret = gimplify_save_expr (expr_p, pre_p, post_p);
7914 	  break;
7915 
7916 	case BIT_FIELD_REF:
7917 	  ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
7918 			       post_p, is_gimple_lvalue, fb_either);
7919 	  recalculate_side_effects (*expr_p);
7920 	  break;
7921 
7922 	case TARGET_MEM_REF:
7923 	  {
7924 	    enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
7925 
7926 	    if (TMR_BASE (*expr_p))
7927 	      r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
7928 				  post_p, is_gimple_mem_ref_addr, fb_either);
7929 	    if (TMR_INDEX (*expr_p))
7930 	      r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
7931 				  post_p, is_gimple_val, fb_rvalue);
7932 	    if (TMR_INDEX2 (*expr_p))
7933 	      r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
7934 				  post_p, is_gimple_val, fb_rvalue);
7935 	    /* TMR_STEP and TMR_OFFSET are always integer constants.  */
7936 	    ret = MIN (r0, r1);
7937 	  }
7938 	  break;
7939 
7940 	case NON_LVALUE_EXPR:
7941 	  /* This should have been stripped above.  */
7942 	  gcc_unreachable ();
7943 
7944 	case ASM_EXPR:
7945 	  ret = gimplify_asm_expr (expr_p, pre_p, post_p);
7946 	  break;
7947 
7948 	case TRY_FINALLY_EXPR:
7949 	case TRY_CATCH_EXPR:
7950 	  {
7951 	    gimple_seq eval, cleanup;
7952 	    gimple try_;
7953 
7954 	    /* Calls to destructors are generated automatically in FINALLY/CATCH
7955 	       block. They should have location as UNKNOWN_LOCATION. However,
7956 	       gimplify_call_expr will reset these call stmts to input_location
7957 	       if it finds stmt's location is unknown. To prevent resetting for
7958 	       destructors, we set the input_location to unknown.
7959 	       Note that this only affects the destructor calls in FINALLY/CATCH
7960 	       block, and will automatically reset to its original value by the
7961 	       end of gimplify_expr.  */
7962 	    input_location = UNKNOWN_LOCATION;
7963 	    eval = cleanup = NULL;
7964 	    gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
7965 	    gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
7966 	    /* Don't create bogus GIMPLE_TRY with empty cleanup.  */
7967 	    if (gimple_seq_empty_p (cleanup))
7968 	      {
7969 		gimple_seq_add_seq (pre_p, eval);
7970 		ret = GS_ALL_DONE;
7971 		break;
7972 	      }
7973 	    try_ = gimple_build_try (eval, cleanup,
7974 				     TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
7975 				     ? GIMPLE_TRY_FINALLY
7976 				     : GIMPLE_TRY_CATCH);
7977 	    if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
7978 	      gimple_set_location (try_, saved_location);
7979 	    else
7980 	      gimple_set_location (try_, EXPR_LOCATION (save_expr));
7981 	    if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
7982 	      gimple_try_set_catch_is_cleanup (try_,
7983 					       TRY_CATCH_IS_CLEANUP (*expr_p));
7984 	    gimplify_seq_add_stmt (pre_p, try_);
7985 	    ret = GS_ALL_DONE;
7986 	    break;
7987 	  }
7988 
7989 	case CLEANUP_POINT_EXPR:
7990 	  ret = gimplify_cleanup_point_expr (expr_p, pre_p);
7991 	  break;
7992 
7993 	case TARGET_EXPR:
7994 	  ret = gimplify_target_expr (expr_p, pre_p, post_p);
7995 	  break;
7996 
7997 	case CATCH_EXPR:
7998 	  {
7999 	    gimple c;
8000 	    gimple_seq handler = NULL;
8001 	    gimplify_and_add (CATCH_BODY (*expr_p), &handler);
8002 	    c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
8003 	    gimplify_seq_add_stmt (pre_p, c);
8004 	    ret = GS_ALL_DONE;
8005 	    break;
8006 	  }
8007 
8008 	case EH_FILTER_EXPR:
8009 	  {
8010 	    gimple ehf;
8011 	    gimple_seq failure = NULL;
8012 
8013 	    gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
8014 	    ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
8015 	    gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
8016 	    gimplify_seq_add_stmt (pre_p, ehf);
8017 	    ret = GS_ALL_DONE;
8018 	    break;
8019 	  }
8020 
8021 	case OBJ_TYPE_REF:
8022 	  {
8023 	    enum gimplify_status r0, r1;
8024 	    r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
8025 				post_p, is_gimple_val, fb_rvalue);
8026 	    r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
8027 				post_p, is_gimple_val, fb_rvalue);
8028 	    TREE_SIDE_EFFECTS (*expr_p) = 0;
8029 	    ret = MIN (r0, r1);
8030 	  }
8031 	  break;
8032 
8033 	case LABEL_DECL:
8034 	  /* We get here when taking the address of a label.  We mark
8035 	     the label as "forced"; meaning it can never be removed and
8036 	     it is a potential target for any computed goto.  */
8037 	  FORCED_LABEL (*expr_p) = 1;
8038 	  ret = GS_ALL_DONE;
8039 	  break;
8040 
8041 	case STATEMENT_LIST:
8042 	  ret = gimplify_statement_list (expr_p, pre_p);
8043 	  break;
8044 
8045 	case WITH_SIZE_EXPR:
8046 	  {
8047 	    gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8048 			   post_p == &internal_post ? NULL : post_p,
8049 			   gimple_test_f, fallback);
8050 	    gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8051 			   is_gimple_val, fb_rvalue);
8052 	    ret = GS_ALL_DONE;
8053 	  }
8054 	  break;
8055 
8056 	case VAR_DECL:
8057 	case PARM_DECL:
8058 	  ret = gimplify_var_or_parm_decl (expr_p);
8059 	  break;
8060 
8061 	case RESULT_DECL:
8062 	  /* When within an OpenMP context, notice uses of variables.  */
8063 	  if (gimplify_omp_ctxp)
8064 	    omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
8065 	  ret = GS_ALL_DONE;
8066 	  break;
8067 
8068 	case SSA_NAME:
8069 	  /* Allow callbacks into the gimplifier during optimization.  */
8070 	  ret = GS_ALL_DONE;
8071 	  break;
8072 
8073 	case OMP_PARALLEL:
8074 	  gimplify_omp_parallel (expr_p, pre_p);
8075 	  ret = GS_ALL_DONE;
8076 	  break;
8077 
8078 	case OMP_TASK:
8079 	  gimplify_omp_task (expr_p, pre_p);
8080 	  ret = GS_ALL_DONE;
8081 	  break;
8082 
8083 	case OMP_FOR:
8084 	case OMP_SIMD:
8085 	case CILK_SIMD:
8086 	case OMP_DISTRIBUTE:
8087 	  ret = gimplify_omp_for (expr_p, pre_p);
8088 	  break;
8089 
8090 	case OMP_SECTIONS:
8091 	case OMP_SINGLE:
8092 	case OMP_TARGET:
8093 	case OMP_TARGET_DATA:
8094 	case OMP_TEAMS:
8095 	  gimplify_omp_workshare (expr_p, pre_p);
8096 	  ret = GS_ALL_DONE;
8097 	  break;
8098 
8099 	case OMP_TARGET_UPDATE:
8100 	  gimplify_omp_target_update (expr_p, pre_p);
8101 	  ret = GS_ALL_DONE;
8102 	  break;
8103 
8104 	case OMP_SECTION:
8105 	case OMP_MASTER:
8106 	case OMP_TASKGROUP:
8107 	case OMP_ORDERED:
8108 	case OMP_CRITICAL:
8109 	  {
8110 	    gimple_seq body = NULL;
8111 	    gimple g;
8112 
8113 	    gimplify_and_add (OMP_BODY (*expr_p), &body);
8114 	    switch (TREE_CODE (*expr_p))
8115 	      {
8116 	      case OMP_SECTION:
8117 	        g = gimple_build_omp_section (body);
8118 	        break;
8119 	      case OMP_MASTER:
8120 	        g = gimple_build_omp_master (body);
8121 		break;
8122 	      case OMP_TASKGROUP:
8123 		{
8124 		  gimple_seq cleanup = NULL;
8125 		  tree fn
8126 		    = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
8127 		  g = gimple_build_call (fn, 0);
8128 		  gimple_seq_add_stmt (&cleanup, g);
8129 		  g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
8130 		  body = NULL;
8131 		  gimple_seq_add_stmt (&body, g);
8132 		  g = gimple_build_omp_taskgroup (body);
8133 		}
8134 		break;
8135 	      case OMP_ORDERED:
8136 		g = gimple_build_omp_ordered (body);
8137 		break;
8138 	      case OMP_CRITICAL:
8139 		g = gimple_build_omp_critical (body,
8140 		    			       OMP_CRITICAL_NAME (*expr_p));
8141 		break;
8142 	      default:
8143 		gcc_unreachable ();
8144 	      }
8145 	    gimplify_seq_add_stmt (pre_p, g);
8146 	    ret = GS_ALL_DONE;
8147 	    break;
8148 	  }
8149 
8150 	case OMP_ATOMIC:
8151 	case OMP_ATOMIC_READ:
8152 	case OMP_ATOMIC_CAPTURE_OLD:
8153 	case OMP_ATOMIC_CAPTURE_NEW:
8154 	  ret = gimplify_omp_atomic (expr_p, pre_p);
8155 	  break;
8156 
8157 	case TRANSACTION_EXPR:
8158 	  ret = gimplify_transaction (expr_p, pre_p);
8159 	  break;
8160 
8161 	case TRUTH_AND_EXPR:
8162 	case TRUTH_OR_EXPR:
8163 	case TRUTH_XOR_EXPR:
8164 	  {
8165 	    tree orig_type = TREE_TYPE (*expr_p);
8166 	    tree new_type, xop0, xop1;
8167 	    *expr_p = gimple_boolify (*expr_p);
8168 	    new_type = TREE_TYPE (*expr_p);
8169 	    if (!useless_type_conversion_p (orig_type, new_type))
8170 	      {
8171 		*expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
8172 		ret = GS_OK;
8173 		break;
8174 	      }
8175 
8176 	  /* Boolified binary truth expressions are semantically equivalent
8177 	     to bitwise binary expressions.  Canonicalize them to the
8178 	     bitwise variant.  */
8179 	    switch (TREE_CODE (*expr_p))
8180 	      {
8181 	      case TRUTH_AND_EXPR:
8182 		TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
8183 		break;
8184 	      case TRUTH_OR_EXPR:
8185 		TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
8186 		break;
8187 	      case TRUTH_XOR_EXPR:
8188 		TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
8189 		break;
8190 	      default:
8191 		break;
8192 	      }
8193 	    /* Now make sure that operands have compatible type to
8194 	       expression's new_type.  */
8195 	    xop0 = TREE_OPERAND (*expr_p, 0);
8196 	    xop1 = TREE_OPERAND (*expr_p, 1);
8197 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
8198 	      TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
8199 							    new_type,
8200 	      						    xop0);
8201 	    if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
8202 	      TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
8203 							    new_type,
8204 	      						    xop1);
8205 	    /* Continue classified as tcc_binary.  */
8206 	    goto expr_2;
8207 	  }
8208 
8209 	case FMA_EXPR:
8210 	case VEC_COND_EXPR:
8211 	case VEC_PERM_EXPR:
8212 	  /* Classified as tcc_expression.  */
8213 	  goto expr_3;
8214 
8215 	case POINTER_PLUS_EXPR:
8216 	  {
8217 	    enum gimplify_status r0, r1;
8218 	    r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8219 				post_p, is_gimple_val, fb_rvalue);
8220 	    r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8221 				post_p, is_gimple_val, fb_rvalue);
8222 	    recalculate_side_effects (*expr_p);
8223 	    ret = MIN (r0, r1);
8224 	    /* Convert &X + CST to invariant &MEM[&X, CST].  Do this
8225 	       after gimplifying operands - this is similar to how
8226 	       it would be folding all gimplified stmts on creation
8227 	       to have them canonicalized, which is what we eventually
8228 	       should do anyway.  */
8229 	    if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
8230 		&& is_gimple_min_invariant (TREE_OPERAND (*expr_p, 0)))
8231 	      {
8232 		*expr_p = build_fold_addr_expr_with_type_loc
8233 		   (input_location,
8234 		    fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (*expr_p)),
8235 				 TREE_OPERAND (*expr_p, 0),
8236 				 fold_convert (ptr_type_node,
8237 					       TREE_OPERAND (*expr_p, 1))),
8238 		    TREE_TYPE (*expr_p));
8239 		ret = MIN (ret, GS_OK);
8240 	      }
8241 	    break;
8242 	  }
8243 
8244 	case CILK_SYNC_STMT:
8245 	  {
8246 	    if (!fn_contains_cilk_spawn_p (cfun))
8247 	      {
8248 		error_at (EXPR_LOCATION (*expr_p),
8249 			  "expected %<_Cilk_spawn%> before %<_Cilk_sync%>");
8250 		ret = GS_ERROR;
8251 	      }
8252 	    else
8253 	      {
8254 		gimplify_cilk_sync (expr_p, pre_p);
8255 		ret = GS_ALL_DONE;
8256 	      }
8257 	    break;
8258 	  }
8259 
8260 	default:
8261 	  switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
8262 	    {
8263 	    case tcc_comparison:
8264 	      /* Handle comparison of objects of non scalar mode aggregates
8265 	     	 with a call to memcmp.  It would be nice to only have to do
8266 	     	 this for variable-sized objects, but then we'd have to allow
8267 	     	 the same nest of reference nodes we allow for MODIFY_EXPR and
8268 	     	 that's too complex.
8269 
8270 		 Compare scalar mode aggregates as scalar mode values.  Using
8271 		 memcmp for them would be very inefficient at best, and is
8272 		 plain wrong if bitfields are involved.  */
8273 		{
8274 		  tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
8275 
8276 		  /* Vector comparisons need no boolification.  */
8277 		  if (TREE_CODE (type) == VECTOR_TYPE)
8278 		    goto expr_2;
8279 		  else if (!AGGREGATE_TYPE_P (type))
8280 		    {
8281 		      tree org_type = TREE_TYPE (*expr_p);
8282 		      *expr_p = gimple_boolify (*expr_p);
8283 		      if (!useless_type_conversion_p (org_type,
8284 						      TREE_TYPE (*expr_p)))
8285 			{
8286 			  *expr_p = fold_convert_loc (input_location,
8287 						      org_type, *expr_p);
8288 			  ret = GS_OK;
8289 			}
8290 		      else
8291 			goto expr_2;
8292 		    }
8293 		  else if (TYPE_MODE (type) != BLKmode)
8294 		    ret = gimplify_scalar_mode_aggregate_compare (expr_p);
8295 		  else
8296 		    ret = gimplify_variable_sized_compare (expr_p);
8297 
8298 		  break;
8299 		}
8300 
8301 	    /* If *EXPR_P does not need to be special-cased, handle it
8302 	       according to its class.  */
8303 	    case tcc_unary:
8304 	      ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8305 				   post_p, is_gimple_val, fb_rvalue);
8306 	      break;
8307 
8308 	    case tcc_binary:
8309 	    expr_2:
8310 	      {
8311 		enum gimplify_status r0, r1;
8312 
8313 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8314 		                    post_p, is_gimple_val, fb_rvalue);
8315 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8316 				    post_p, is_gimple_val, fb_rvalue);
8317 
8318 		ret = MIN (r0, r1);
8319 		break;
8320 	      }
8321 
8322 	    expr_3:
8323 	      {
8324 		enum gimplify_status r0, r1, r2;
8325 
8326 		r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
8327 		                    post_p, is_gimple_val, fb_rvalue);
8328 		r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
8329 				    post_p, is_gimple_val, fb_rvalue);
8330 		r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
8331 				    post_p, is_gimple_val, fb_rvalue);
8332 
8333 		ret = MIN (MIN (r0, r1), r2);
8334 		break;
8335 	      }
8336 
8337 	    case tcc_declaration:
8338 	    case tcc_constant:
8339 	      ret = GS_ALL_DONE;
8340 	      goto dont_recalculate;
8341 
8342 	    default:
8343 	      gcc_unreachable ();
8344 	    }
8345 
8346 	  recalculate_side_effects (*expr_p);
8347 
8348 	dont_recalculate:
8349 	  break;
8350 	}
8351 
8352       gcc_assert (*expr_p || ret != GS_OK);
8353     }
8354   while (ret == GS_OK);
8355 
8356   /* If we encountered an error_mark somewhere nested inside, either
8357      stub out the statement or propagate the error back out.  */
8358   if (ret == GS_ERROR)
8359     {
8360       if (is_statement)
8361 	*expr_p = NULL;
8362       goto out;
8363     }
8364 
8365   /* This was only valid as a return value from the langhook, which
8366      we handled.  Make sure it doesn't escape from any other context.  */
8367   gcc_assert (ret != GS_UNHANDLED);
8368 
8369   if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
8370     {
8371       /* We aren't looking for a value, and we don't have a valid
8372 	 statement.  If it doesn't have side-effects, throw it away.  */
8373       if (!TREE_SIDE_EFFECTS (*expr_p))
8374 	*expr_p = NULL;
8375       else if (!TREE_THIS_VOLATILE (*expr_p))
8376 	{
8377 	  /* This is probably a _REF that contains something nested that
8378 	     has side effects.  Recurse through the operands to find it.  */
8379 	  enum tree_code code = TREE_CODE (*expr_p);
8380 
8381 	  switch (code)
8382 	    {
8383 	    case COMPONENT_REF:
8384 	    case REALPART_EXPR:
8385 	    case IMAGPART_EXPR:
8386 	    case VIEW_CONVERT_EXPR:
8387 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8388 			     gimple_test_f, fallback);
8389 	      break;
8390 
8391 	    case ARRAY_REF:
8392 	    case ARRAY_RANGE_REF:
8393 	      gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
8394 			     gimple_test_f, fallback);
8395 	      gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
8396 			     gimple_test_f, fallback);
8397 	      break;
8398 
8399 	    default:
8400 	       /* Anything else with side-effects must be converted to
8401 		  a valid statement before we get here.  */
8402 	      gcc_unreachable ();
8403 	    }
8404 
8405 	  *expr_p = NULL;
8406 	}
8407       else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
8408 	       && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
8409 	{
8410 	  /* Historically, the compiler has treated a bare reference
8411 	     to a non-BLKmode volatile lvalue as forcing a load.  */
8412 	  tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
8413 
8414 	  /* Normally, we do not want to create a temporary for a
8415 	     TREE_ADDRESSABLE type because such a type should not be
8416 	     copied by bitwise-assignment.  However, we make an
8417 	     exception here, as all we are doing here is ensuring that
8418 	     we read the bytes that make up the type.  We use
8419 	     create_tmp_var_raw because create_tmp_var will abort when
8420 	     given a TREE_ADDRESSABLE type.  */
8421 	  tree tmp = create_tmp_var_raw (type, "vol");
8422 	  gimple_add_tmp_var (tmp);
8423 	  gimplify_assign (tmp, *expr_p, pre_p);
8424 	  *expr_p = NULL;
8425 	}
8426       else
8427 	/* We can't do anything useful with a volatile reference to
8428 	   an incomplete type, so just throw it away.  Likewise for
8429 	   a BLKmode type, since any implicit inner load should
8430 	   already have been turned into an explicit one by the
8431 	   gimplification process.  */
8432 	*expr_p = NULL;
8433     }
8434 
8435   /* If we are gimplifying at the statement level, we're done.  Tack
8436      everything together and return.  */
8437   if (fallback == fb_none || is_statement)
8438     {
8439       /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
8440          it out for GC to reclaim it.  */
8441       *expr_p = NULL_TREE;
8442 
8443       if (!gimple_seq_empty_p (internal_pre)
8444 	  || !gimple_seq_empty_p (internal_post))
8445 	{
8446 	  gimplify_seq_add_seq (&internal_pre, internal_post);
8447 	  gimplify_seq_add_seq (pre_p, internal_pre);
8448 	}
8449 
8450       /* The result of gimplifying *EXPR_P is going to be the last few
8451 	 statements in *PRE_P and *POST_P.  Add location information
8452 	 to all the statements that were added by the gimplification
8453 	 helpers.  */
8454       if (!gimple_seq_empty_p (*pre_p))
8455 	annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
8456 
8457       if (!gimple_seq_empty_p (*post_p))
8458 	annotate_all_with_location_after (*post_p, post_last_gsi,
8459 					  input_location);
8460 
8461       goto out;
8462     }
8463 
8464 #ifdef ENABLE_GIMPLE_CHECKING
8465   if (*expr_p)
8466     {
8467       enum tree_code code = TREE_CODE (*expr_p);
8468       /* These expressions should already be in gimple IR form.  */
8469       gcc_assert (code != MODIFY_EXPR
8470 		  && code != ASM_EXPR
8471 		  && code != BIND_EXPR
8472 		  && code != CATCH_EXPR
8473 		  && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
8474 		  && code != EH_FILTER_EXPR
8475 		  && code != GOTO_EXPR
8476 		  && code != LABEL_EXPR
8477 		  && code != LOOP_EXPR
8478 		  && code != SWITCH_EXPR
8479 		  && code != TRY_FINALLY_EXPR
8480 		  && code != OMP_CRITICAL
8481 		  && code != OMP_FOR
8482 		  && code != OMP_MASTER
8483 		  && code != OMP_TASKGROUP
8484 		  && code != OMP_ORDERED
8485 		  && code != OMP_PARALLEL
8486 		  && code != OMP_SECTIONS
8487 		  && code != OMP_SECTION
8488 		  && code != OMP_SINGLE);
8489     }
8490 #endif
8491 
8492   /* Otherwise we're gimplifying a subexpression, so the resulting
8493      value is interesting.  If it's a valid operand that matches
8494      GIMPLE_TEST_F, we're done. Unless we are handling some
8495      post-effects internally; if that's the case, we need to copy into
8496      a temporary before adding the post-effects to POST_P.  */
8497   if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
8498     goto out;
8499 
8500   /* Otherwise, we need to create a new temporary for the gimplified
8501      expression.  */
8502 
8503   /* We can't return an lvalue if we have an internal postqueue.  The
8504      object the lvalue refers to would (probably) be modified by the
8505      postqueue; we need to copy the value out first, which means an
8506      rvalue.  */
8507   if ((fallback & fb_lvalue)
8508       && gimple_seq_empty_p (internal_post)
8509       && is_gimple_addressable (*expr_p))
8510     {
8511       /* An lvalue will do.  Take the address of the expression, store it
8512 	 in a temporary, and replace the expression with an INDIRECT_REF of
8513 	 that temporary.  */
8514       tmp = build_fold_addr_expr_loc (input_location, *expr_p);
8515       gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
8516       *expr_p = build_simple_mem_ref (tmp);
8517     }
8518   else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
8519     {
8520       /* An rvalue will do.  Assign the gimplified expression into a
8521 	 new temporary TMP and replace the original expression with
8522 	 TMP.  First, make sure that the expression has a type so that
8523 	 it can be assigned into a temporary.  */
8524       gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
8525       *expr_p = get_formal_tmp_var (*expr_p, pre_p);
8526     }
8527   else
8528     {
8529 #ifdef ENABLE_GIMPLE_CHECKING
8530       if (!(fallback & fb_mayfail))
8531 	{
8532 	  fprintf (stderr, "gimplification failed:\n");
8533 	  print_generic_expr (stderr, *expr_p, 0);
8534 	  debug_tree (*expr_p);
8535 	  internal_error ("gimplification failed");
8536 	}
8537 #endif
8538       gcc_assert (fallback & fb_mayfail);
8539 
8540       /* If this is an asm statement, and the user asked for the
8541 	 impossible, don't die.  Fail and let gimplify_asm_expr
8542 	 issue an error.  */
8543       ret = GS_ERROR;
8544       goto out;
8545     }
8546 
8547   /* Make sure the temporary matches our predicate.  */
8548   gcc_assert ((*gimple_test_f) (*expr_p));
8549 
8550   if (!gimple_seq_empty_p (internal_post))
8551     {
8552       annotate_all_with_location (internal_post, input_location);
8553       gimplify_seq_add_seq (pre_p, internal_post);
8554     }
8555 
8556  out:
8557   input_location = saved_location;
8558   return ret;
8559 }
8560 
8561 /* Look through TYPE for variable-sized objects and gimplify each such
8562    size that we find.  Add to LIST_P any statements generated.  */
8563 
8564 void
gimplify_type_sizes(tree type,gimple_seq * list_p)8565 gimplify_type_sizes (tree type, gimple_seq *list_p)
8566 {
8567   tree field, t;
8568 
8569   if (type == NULL || type == error_mark_node)
8570     return;
8571 
8572   /* We first do the main variant, then copy into any other variants.  */
8573   type = TYPE_MAIN_VARIANT (type);
8574 
8575   /* Avoid infinite recursion.  */
8576   if (TYPE_SIZES_GIMPLIFIED (type))
8577     return;
8578 
8579   TYPE_SIZES_GIMPLIFIED (type) = 1;
8580 
8581   switch (TREE_CODE (type))
8582     {
8583     case INTEGER_TYPE:
8584     case ENUMERAL_TYPE:
8585     case BOOLEAN_TYPE:
8586     case REAL_TYPE:
8587     case FIXED_POINT_TYPE:
8588       gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
8589       gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
8590 
8591       for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8592 	{
8593 	  TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
8594 	  TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
8595 	}
8596       break;
8597 
8598     case ARRAY_TYPE:
8599       /* These types may not have declarations, so handle them here.  */
8600       gimplify_type_sizes (TREE_TYPE (type), list_p);
8601       gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
8602       /* Ensure VLA bounds aren't removed, for -O0 they should be variables
8603 	 with assigned stack slots, for -O1+ -g they should be tracked
8604 	 by VTA.  */
8605       if (!(TYPE_NAME (type)
8606 	    && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
8607 	    && DECL_IGNORED_P (TYPE_NAME (type)))
8608 	  && TYPE_DOMAIN (type)
8609 	  && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
8610 	{
8611 	  t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
8612 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8613 	    DECL_IGNORED_P (t) = 0;
8614 	  t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
8615 	  if (t && TREE_CODE (t) == VAR_DECL && DECL_ARTIFICIAL (t))
8616 	    DECL_IGNORED_P (t) = 0;
8617 	}
8618       break;
8619 
8620     case RECORD_TYPE:
8621     case UNION_TYPE:
8622     case QUAL_UNION_TYPE:
8623       for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
8624 	if (TREE_CODE (field) == FIELD_DECL)
8625 	  {
8626 	    gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
8627 	    gimplify_one_sizepos (&DECL_SIZE (field), list_p);
8628 	    gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
8629 	    gimplify_type_sizes (TREE_TYPE (field), list_p);
8630 	  }
8631       break;
8632 
8633     case POINTER_TYPE:
8634     case REFERENCE_TYPE:
8635 	/* We used to recurse on the pointed-to type here, which turned out to
8636 	   be incorrect because its definition might refer to variables not
8637 	   yet initialized at this point if a forward declaration is involved.
8638 
8639 	   It was actually useful for anonymous pointed-to types to ensure
8640 	   that the sizes evaluation dominates every possible later use of the
8641 	   values.  Restricting to such types here would be safe since there
8642 	   is no possible forward declaration around, but would introduce an
8643 	   undesirable middle-end semantic to anonymity.  We then defer to
8644 	   front-ends the responsibility of ensuring that the sizes are
8645 	   evaluated both early and late enough, e.g. by attaching artificial
8646 	   type declarations to the tree.  */
8647       break;
8648 
8649     default:
8650       break;
8651     }
8652 
8653   gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
8654   gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
8655 
8656   for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
8657     {
8658       TYPE_SIZE (t) = TYPE_SIZE (type);
8659       TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
8660       TYPE_SIZES_GIMPLIFIED (t) = 1;
8661     }
8662 }
8663 
8664 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
8665    a size or position, has had all of its SAVE_EXPRs evaluated.
8666    We add any required statements to *STMT_P.  */
8667 
8668 void
gimplify_one_sizepos(tree * expr_p,gimple_seq * stmt_p)8669 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
8670 {
8671   tree expr = *expr_p;
8672 
8673   /* We don't do anything if the value isn't there, is constant, or contains
8674      A PLACEHOLDER_EXPR.  We also don't want to do anything if it's already
8675      a VAR_DECL.  If it's a VAR_DECL from another function, the gimplifier
8676      will want to replace it with a new variable, but that will cause problems
8677      if this type is from outside the function.  It's OK to have that here.  */
8678   if (is_gimple_sizepos (expr))
8679     return;
8680 
8681   *expr_p = unshare_expr (expr);
8682 
8683   gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
8684 }
8685 
8686 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
8687    containing the sequence of corresponding GIMPLE statements.  If DO_PARMS
8688    is true, also gimplify the parameters.  */
8689 
8690 gimple
gimplify_body(tree fndecl,bool do_parms)8691 gimplify_body (tree fndecl, bool do_parms)
8692 {
8693   location_t saved_location = input_location;
8694   gimple_seq parm_stmts, seq;
8695   gimple outer_bind;
8696   struct cgraph_node *cgn;
8697 
8698   timevar_push (TV_TREE_GIMPLIFY);
8699 
8700   /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
8701      gimplification.  */
8702   default_rtl_profile ();
8703 
8704   gcc_assert (gimplify_ctxp == NULL);
8705   push_gimplify_context ();
8706 
8707   if (flag_openmp)
8708     {
8709       gcc_assert (gimplify_omp_ctxp == NULL);
8710       if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
8711 	gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
8712     }
8713 
8714   /* Unshare most shared trees in the body and in that of any nested functions.
8715      It would seem we don't have to do this for nested functions because
8716      they are supposed to be output and then the outer function gimplified
8717      first, but the g++ front end doesn't always do it that way.  */
8718   unshare_body (fndecl);
8719   unvisit_body (fndecl);
8720 
8721   cgn = cgraph_get_node (fndecl);
8722   if (cgn && cgn->origin)
8723     nonlocal_vlas = pointer_set_create ();
8724 
8725   /* Make sure input_location isn't set to something weird.  */
8726   input_location = DECL_SOURCE_LOCATION (fndecl);
8727 
8728   /* Resolve callee-copies.  This has to be done before processing
8729      the body so that DECL_VALUE_EXPR gets processed correctly.  */
8730   parm_stmts = do_parms ? gimplify_parameters () : NULL;
8731 
8732   /* Gimplify the function's body.  */
8733   seq = NULL;
8734   gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
8735   outer_bind = gimple_seq_first_stmt (seq);
8736   if (!outer_bind)
8737     {
8738       outer_bind = gimple_build_nop ();
8739       gimplify_seq_add_stmt (&seq, outer_bind);
8740     }
8741 
8742   /* The body must contain exactly one statement, a GIMPLE_BIND.  If this is
8743      not the case, wrap everything in a GIMPLE_BIND to make it so.  */
8744   if (gimple_code (outer_bind) == GIMPLE_BIND
8745       && gimple_seq_first (seq) == gimple_seq_last (seq))
8746     ;
8747   else
8748     outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
8749 
8750   DECL_SAVED_TREE (fndecl) = NULL_TREE;
8751 
8752   /* If we had callee-copies statements, insert them at the beginning
8753      of the function and clear DECL_VALUE_EXPR_P on the parameters.  */
8754   if (!gimple_seq_empty_p (parm_stmts))
8755     {
8756       tree parm;
8757 
8758       gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
8759       gimple_bind_set_body (outer_bind, parm_stmts);
8760 
8761       for (parm = DECL_ARGUMENTS (current_function_decl);
8762 	   parm; parm = DECL_CHAIN (parm))
8763 	if (DECL_HAS_VALUE_EXPR_P (parm))
8764 	  {
8765 	    DECL_HAS_VALUE_EXPR_P (parm) = 0;
8766 	    DECL_IGNORED_P (parm) = 0;
8767 	  }
8768     }
8769 
8770   if (nonlocal_vlas)
8771     {
8772       if (nonlocal_vla_vars)
8773 	{
8774 	  /* tree-nested.c may later on call declare_vars (..., true);
8775 	     which relies on BLOCK_VARS chain to be the tail of the
8776 	     gimple_bind_vars chain.  Ensure we don't violate that
8777 	     assumption.  */
8778 	  if (gimple_bind_block (outer_bind)
8779 	      == DECL_INITIAL (current_function_decl))
8780 	    declare_vars (nonlocal_vla_vars, outer_bind, true);
8781 	  else
8782 	    BLOCK_VARS (DECL_INITIAL (current_function_decl))
8783 	      = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
8784 			 nonlocal_vla_vars);
8785 	  nonlocal_vla_vars = NULL_TREE;
8786 	}
8787       pointer_set_destroy (nonlocal_vlas);
8788       nonlocal_vlas = NULL;
8789     }
8790 
8791   if ((flag_openmp || flag_openmp_simd) && gimplify_omp_ctxp)
8792     {
8793       delete_omp_context (gimplify_omp_ctxp);
8794       gimplify_omp_ctxp = NULL;
8795     }
8796 
8797   pop_gimplify_context (outer_bind);
8798   gcc_assert (gimplify_ctxp == NULL);
8799 
8800 #ifdef ENABLE_CHECKING
8801   if (!seen_error ())
8802     verify_gimple_in_seq (gimple_bind_body (outer_bind));
8803 #endif
8804 
8805   timevar_pop (TV_TREE_GIMPLIFY);
8806   input_location = saved_location;
8807 
8808   return outer_bind;
8809 }
8810 
8811 typedef char *char_p; /* For DEF_VEC_P.  */
8812 
8813 /* Return whether we should exclude FNDECL from instrumentation.  */
8814 
8815 static bool
flag_instrument_functions_exclude_p(tree fndecl)8816 flag_instrument_functions_exclude_p (tree fndecl)
8817 {
8818   vec<char_p> *v;
8819 
8820   v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
8821   if (v && v->length () > 0)
8822     {
8823       const char *name;
8824       int i;
8825       char *s;
8826 
8827       name = lang_hooks.decl_printable_name (fndecl, 0);
8828       FOR_EACH_VEC_ELT (*v, i, s)
8829 	if (strstr (name, s) != NULL)
8830 	  return true;
8831     }
8832 
8833   v = (vec<char_p> *) flag_instrument_functions_exclude_files;
8834   if (v && v->length () > 0)
8835     {
8836       const char *name;
8837       int i;
8838       char *s;
8839 
8840       name = DECL_SOURCE_FILE (fndecl);
8841       FOR_EACH_VEC_ELT (*v, i, s)
8842 	if (strstr (name, s) != NULL)
8843 	  return true;
8844     }
8845 
8846   return false;
8847 }
8848 
8849 /* Entry point to the gimplification pass.  FNDECL is the FUNCTION_DECL
8850    node for the function we want to gimplify.
8851 
8852    Return the sequence of GIMPLE statements corresponding to the body
8853    of FNDECL.  */
8854 
8855 void
gimplify_function_tree(tree fndecl)8856 gimplify_function_tree (tree fndecl)
8857 {
8858   tree parm, ret;
8859   gimple_seq seq;
8860   gimple bind;
8861 
8862   gcc_assert (!gimple_body (fndecl));
8863 
8864   if (DECL_STRUCT_FUNCTION (fndecl))
8865     push_cfun (DECL_STRUCT_FUNCTION (fndecl));
8866   else
8867     push_struct_function (fndecl);
8868 
8869   for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
8870     {
8871       /* Preliminarily mark non-addressed complex variables as eligible
8872          for promotion to gimple registers.  We'll transform their uses
8873          as we find them.  */
8874       if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
8875 	   || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
8876           && !TREE_THIS_VOLATILE (parm)
8877           && !needs_to_live_in_memory (parm))
8878         DECL_GIMPLE_REG_P (parm) = 1;
8879     }
8880 
8881   ret = DECL_RESULT (fndecl);
8882   if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
8883        || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
8884       && !needs_to_live_in_memory (ret))
8885     DECL_GIMPLE_REG_P (ret) = 1;
8886 
8887   bind = gimplify_body (fndecl, true);
8888 
8889   /* The tree body of the function is no longer needed, replace it
8890      with the new GIMPLE body.  */
8891   seq = NULL;
8892   gimple_seq_add_stmt (&seq, bind);
8893   gimple_set_body (fndecl, seq);
8894 
8895   /* If we're instrumenting function entry/exit, then prepend the call to
8896      the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
8897      catch the exit hook.  */
8898   /* ??? Add some way to ignore exceptions for this TFE.  */
8899   if (flag_instrument_function_entry_exit
8900       && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
8901       && !flag_instrument_functions_exclude_p (fndecl))
8902     {
8903       tree x;
8904       gimple new_bind;
8905       gimple tf;
8906       gimple_seq cleanup = NULL, body = NULL;
8907       tree tmp_var;
8908       gimple call;
8909 
8910       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8911       call = gimple_build_call (x, 1, integer_zero_node);
8912       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8913       gimple_call_set_lhs (call, tmp_var);
8914       gimplify_seq_add_stmt (&cleanup, call);
8915       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
8916       call = gimple_build_call (x, 2,
8917 				build_fold_addr_expr (current_function_decl),
8918 				tmp_var);
8919       gimplify_seq_add_stmt (&cleanup, call);
8920       tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
8921 
8922       x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
8923       call = gimple_build_call (x, 1, integer_zero_node);
8924       tmp_var = create_tmp_var (ptr_type_node, "return_addr");
8925       gimple_call_set_lhs (call, tmp_var);
8926       gimplify_seq_add_stmt (&body, call);
8927       x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
8928       call = gimple_build_call (x, 2,
8929 				build_fold_addr_expr (current_function_decl),
8930 				tmp_var);
8931       gimplify_seq_add_stmt (&body, call);
8932       gimplify_seq_add_stmt (&body, tf);
8933       new_bind = gimple_build_bind (NULL, body, gimple_bind_block (bind));
8934       /* Clear the block for BIND, since it is no longer directly inside
8935          the function, but within a try block.  */
8936       gimple_bind_set_block (bind, NULL);
8937 
8938       /* Replace the current function body with the body
8939          wrapped in the try/finally TF.  */
8940       seq = NULL;
8941       gimple_seq_add_stmt (&seq, new_bind);
8942       gimple_set_body (fndecl, seq);
8943     }
8944 
8945   DECL_SAVED_TREE (fndecl) = NULL_TREE;
8946   cfun->curr_properties = PROP_gimple_any;
8947 
8948   pop_cfun ();
8949 }
8950 
8951 /* Return a dummy expression of type TYPE in order to keep going after an
8952    error.  */
8953 
8954 static tree
dummy_object(tree type)8955 dummy_object (tree type)
8956 {
8957   tree t = build_int_cst (build_pointer_type (type), 0);
8958   return build2 (MEM_REF, type, t, t);
8959 }
8960 
8961 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
8962    builtin function, but a very special sort of operator.  */
8963 
8964 enum gimplify_status
gimplify_va_arg_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)8965 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
8966 {
8967   tree promoted_type, have_va_type;
8968   tree valist = TREE_OPERAND (*expr_p, 0);
8969   tree type = TREE_TYPE (*expr_p);
8970   tree t;
8971   location_t loc = EXPR_LOCATION (*expr_p);
8972 
8973   /* Verify that valist is of the proper type.  */
8974   have_va_type = TREE_TYPE (valist);
8975   if (have_va_type == error_mark_node)
8976     return GS_ERROR;
8977   have_va_type = targetm.canonical_va_list_type (have_va_type);
8978 
8979   if (have_va_type == NULL_TREE)
8980     {
8981       error_at (loc, "first argument to %<va_arg%> not of type %<va_list%>");
8982       return GS_ERROR;
8983     }
8984 
8985   /* Generate a diagnostic for requesting data of a type that cannot
8986      be passed through `...' due to type promotion at the call site.  */
8987   if ((promoted_type = lang_hooks.types.type_promotes_to (type))
8988 	   != type)
8989     {
8990       static bool gave_help;
8991       bool warned;
8992 
8993       /* Unfortunately, this is merely undefined, rather than a constraint
8994 	 violation, so we cannot make this an error.  If this call is never
8995 	 executed, the program is still strictly conforming.  */
8996       warned = warning_at (loc, 0,
8997 	  		   "%qT is promoted to %qT when passed through %<...%>",
8998 			   type, promoted_type);
8999       if (!gave_help && warned)
9000 	{
9001 	  gave_help = true;
9002 	  inform (loc, "(so you should pass %qT not %qT to %<va_arg%>)",
9003 		  promoted_type, type);
9004 	}
9005 
9006       /* We can, however, treat "undefined" any way we please.
9007 	 Call abort to encourage the user to fix the program.  */
9008       if (warned)
9009 	inform (loc, "if this code is reached, the program will abort");
9010       /* Before the abort, allow the evaluation of the va_list
9011 	 expression to exit or longjmp.  */
9012       gimplify_and_add (valist, pre_p);
9013       t = build_call_expr_loc (loc,
9014 			       builtin_decl_implicit (BUILT_IN_TRAP), 0);
9015       gimplify_and_add (t, pre_p);
9016 
9017       /* This is dead code, but go ahead and finish so that the
9018 	 mode of the result comes out right.  */
9019       *expr_p = dummy_object (type);
9020       return GS_ALL_DONE;
9021     }
9022   else
9023     {
9024       /* Make it easier for the backends by protecting the valist argument
9025 	 from multiple evaluations.  */
9026       if (TREE_CODE (have_va_type) == ARRAY_TYPE)
9027 	{
9028 	  /* For this case, the backends will be expecting a pointer to
9029 	     TREE_TYPE (abi), but it's possible we've
9030 	     actually been given an array (an actual TARGET_FN_ABI_VA_LIST).
9031 	     So fix it.  */
9032 	  if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
9033 	    {
9034 	      tree p1 = build_pointer_type (TREE_TYPE (have_va_type));
9035 	      valist = fold_convert_loc (loc, p1,
9036 					 build_fold_addr_expr_loc (loc, valist));
9037 	    }
9038 
9039 	  gimplify_expr (&valist, pre_p, post_p, is_gimple_val, fb_rvalue);
9040 	}
9041       else
9042 	gimplify_expr (&valist, pre_p, post_p, is_gimple_min_lval, fb_lvalue);
9043 
9044       if (!targetm.gimplify_va_arg_expr)
9045 	/* FIXME: Once most targets are converted we should merely
9046 	   assert this is non-null.  */
9047 	return GS_ALL_DONE;
9048 
9049       *expr_p = targetm.gimplify_va_arg_expr (valist, type, pre_p, post_p);
9050       return GS_OK;
9051     }
9052 }
9053 
9054 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
9055 
9056    DST/SRC are the destination and source respectively.  You can pass
9057    ungimplified trees in DST or SRC, in which case they will be
9058    converted to a gimple operand if necessary.
9059 
9060    This function returns the newly created GIMPLE_ASSIGN tuple.  */
9061 
9062 gimple
gimplify_assign(tree dst,tree src,gimple_seq * seq_p)9063 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
9064 {
9065   tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
9066   gimplify_and_add (t, seq_p);
9067   ggc_free (t);
9068   return gimple_seq_last_stmt (*seq_p);
9069 }
9070 
9071 inline hashval_t
hash(const value_type * p)9072 gimplify_hasher::hash (const value_type *p)
9073 {
9074   tree t = p->val;
9075   return iterative_hash_expr (t, 0);
9076 }
9077 
9078 inline bool
equal(const value_type * p1,const compare_type * p2)9079 gimplify_hasher::equal (const value_type *p1, const compare_type *p2)
9080 {
9081   tree t1 = p1->val;
9082   tree t2 = p2->val;
9083   enum tree_code code = TREE_CODE (t1);
9084 
9085   if (TREE_CODE (t2) != code
9086       || TREE_TYPE (t1) != TREE_TYPE (t2))
9087     return false;
9088 
9089   if (!operand_equal_p (t1, t2, 0))
9090     return false;
9091 
9092 #ifdef ENABLE_CHECKING
9093   /* Only allow them to compare equal if they also hash equal; otherwise
9094      results are nondeterminate, and we fail bootstrap comparison.  */
9095   gcc_assert (hash (p1) == hash (p2));
9096 #endif
9097 
9098   return true;
9099 }
9100