1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 2, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING. If not, write to the Free
21 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
22 02110-1301, USA. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "tree.h"
29 #include "rtl.h"
30 #include "varray.h"
31 #include "tree-gimple.h"
32 #include "tree-inline.h"
33 #include "diagnostic.h"
34 #include "langhooks.h"
35 #include "langhooks-def.h"
36 #include "tree-flow.h"
37 #include "cgraph.h"
38 #include "timevar.h"
39 #include "except.h"
40 #include "hashtab.h"
41 #include "flags.h"
42 #include "real.h"
43 #include "function.h"
44 #include "output.h"
45 #include "expr.h"
46 #include "ggc.h"
47 #include "toplev.h"
48 #include "target.h"
49 #include "optabs.h"
50 #include "pointer-set.h"
51
52
53 enum gimplify_omp_var_data
54 {
55 GOVD_SEEN = 1,
56 GOVD_EXPLICIT = 2,
57 GOVD_SHARED = 4,
58 GOVD_PRIVATE = 8,
59 GOVD_FIRSTPRIVATE = 16,
60 GOVD_LASTPRIVATE = 32,
61 GOVD_REDUCTION = 64,
62 GOVD_LOCAL = 128,
63 GOVD_DEBUG_PRIVATE = 256,
64 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
65 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LOCAL)
66 };
67
68 struct gimplify_omp_ctx
69 {
70 struct gimplify_omp_ctx *outer_context;
71 splay_tree variables;
72 struct pointer_set_t *privatized_types;
73 location_t location;
74 enum omp_clause_default_kind default_kind;
75 bool is_parallel;
76 bool is_combined_parallel;
77 };
78
79 struct gimplify_ctx
80 {
81 struct gimplify_ctx *prev_context;
82
83 tree current_bind_expr;
84 tree temps;
85 tree conditional_cleanups;
86 tree exit_label;
87 tree return_temp;
88
89 VEC(tree,heap) *case_labels;
90 /* The formal temporary table. Should this be persistent? */
91 htab_t temp_htab;
92
93 int conditions;
94 bool save_stack;
95 bool into_ssa;
96 };
97
98 static struct gimplify_ctx *gimplify_ctxp;
99 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
100
101
102
103 /* Formal (expression) temporary table handling: Multiple occurrences of
104 the same scalar expression are evaluated into the same temporary. */
105
106 typedef struct gimple_temp_hash_elt
107 {
108 tree val; /* Key */
109 tree temp; /* Value */
110 } elt_t;
111
112 /* Forward declarations. */
113 static enum gimplify_status gimplify_compound_expr (tree *, tree *, bool);
114 #ifdef ENABLE_CHECKING
115 static bool cpt_same_type (tree a, tree b);
116 #endif
117
118
119 /* Return a hash value for a formal temporary table entry. */
120
121 static hashval_t
gimple_tree_hash(const void * p)122 gimple_tree_hash (const void *p)
123 {
124 tree t = ((const elt_t *) p)->val;
125 return iterative_hash_expr (t, 0);
126 }
127
128 /* Compare two formal temporary table entries. */
129
130 static int
gimple_tree_eq(const void * p1,const void * p2)131 gimple_tree_eq (const void *p1, const void *p2)
132 {
133 tree t1 = ((const elt_t *) p1)->val;
134 tree t2 = ((const elt_t *) p2)->val;
135 enum tree_code code = TREE_CODE (t1);
136
137 if (TREE_CODE (t2) != code
138 || TREE_TYPE (t1) != TREE_TYPE (t2))
139 return 0;
140
141 if (!operand_equal_p (t1, t2, 0))
142 return 0;
143
144 /* Only allow them to compare equal if they also hash equal; otherwise
145 results are nondeterminate, and we fail bootstrap comparison. */
146 gcc_assert (gimple_tree_hash (p1) == gimple_tree_hash (p2));
147
148 return 1;
149 }
150
151 /* Set up a context for the gimplifier. */
152
153 void
push_gimplify_context(void)154 push_gimplify_context (void)
155 {
156 struct gimplify_ctx *c;
157
158 c = (struct gimplify_ctx *) xcalloc (1, sizeof (struct gimplify_ctx));
159 c->prev_context = gimplify_ctxp;
160 if (optimize)
161 c->temp_htab = htab_create (1000, gimple_tree_hash, gimple_tree_eq, free);
162
163 gimplify_ctxp = c;
164 }
165
166 /* Tear down a context for the gimplifier. If BODY is non-null, then
167 put the temporaries into the outer BIND_EXPR. Otherwise, put them
168 in the unexpanded_var_list. */
169
170 void
pop_gimplify_context(tree body)171 pop_gimplify_context (tree body)
172 {
173 struct gimplify_ctx *c = gimplify_ctxp;
174 tree t;
175
176 gcc_assert (c && !c->current_bind_expr);
177 gimplify_ctxp = c->prev_context;
178
179 for (t = c->temps; t ; t = TREE_CHAIN (t))
180 DECL_GIMPLE_FORMAL_TEMP_P (t) = 0;
181
182 if (body)
183 declare_vars (c->temps, body, false);
184 else
185 record_vars (c->temps);
186
187 if (optimize)
188 htab_delete (c->temp_htab);
189 free (c);
190 }
191
192 static void
gimple_push_bind_expr(tree bind)193 gimple_push_bind_expr (tree bind)
194 {
195 TREE_CHAIN (bind) = gimplify_ctxp->current_bind_expr;
196 gimplify_ctxp->current_bind_expr = bind;
197 }
198
199 static void
gimple_pop_bind_expr(void)200 gimple_pop_bind_expr (void)
201 {
202 gimplify_ctxp->current_bind_expr
203 = TREE_CHAIN (gimplify_ctxp->current_bind_expr);
204 }
205
206 tree
gimple_current_bind_expr(void)207 gimple_current_bind_expr (void)
208 {
209 return gimplify_ctxp->current_bind_expr;
210 }
211
212 /* Returns true iff there is a COND_EXPR between us and the innermost
213 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
214
215 static bool
gimple_conditional_context(void)216 gimple_conditional_context (void)
217 {
218 return gimplify_ctxp->conditions > 0;
219 }
220
221 /* Note that we've entered a COND_EXPR. */
222
223 static void
gimple_push_condition(void)224 gimple_push_condition (void)
225 {
226 #ifdef ENABLE_CHECKING
227 if (gimplify_ctxp->conditions == 0)
228 gcc_assert (!gimplify_ctxp->conditional_cleanups);
229 #endif
230 ++(gimplify_ctxp->conditions);
231 }
232
233 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
234 now, add any conditional cleanups we've seen to the prequeue. */
235
236 static void
gimple_pop_condition(tree * pre_p)237 gimple_pop_condition (tree *pre_p)
238 {
239 int conds = --(gimplify_ctxp->conditions);
240
241 gcc_assert (conds >= 0);
242 if (conds == 0)
243 {
244 append_to_statement_list (gimplify_ctxp->conditional_cleanups, pre_p);
245 gimplify_ctxp->conditional_cleanups = NULL_TREE;
246 }
247 }
248
249 /* A stable comparison routine for use with splay trees and DECLs. */
250
251 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)252 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
253 {
254 tree a = (tree) xa;
255 tree b = (tree) xb;
256
257 return DECL_UID (a) - DECL_UID (b);
258 }
259
260 /* Create a new omp construct that deals with variable remapping. */
261
262 static struct gimplify_omp_ctx *
new_omp_context(bool is_parallel,bool is_combined_parallel)263 new_omp_context (bool is_parallel, bool is_combined_parallel)
264 {
265 struct gimplify_omp_ctx *c;
266
267 c = XCNEW (struct gimplify_omp_ctx);
268 c->outer_context = gimplify_omp_ctxp;
269 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
270 c->privatized_types = pointer_set_create ();
271 c->location = input_location;
272 c->is_parallel = is_parallel;
273 c->is_combined_parallel = is_combined_parallel;
274 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
275
276 return c;
277 }
278
279 /* Destroy an omp construct that deals with variable remapping. */
280
281 static void
delete_omp_context(struct gimplify_omp_ctx * c)282 delete_omp_context (struct gimplify_omp_ctx *c)
283 {
284 splay_tree_delete (c->variables);
285 pointer_set_destroy (c->privatized_types);
286 XDELETE (c);
287 }
288
289 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
290 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
291
292 /* A subroutine of append_to_statement_list{,_force}. T is not NULL. */
293
294 static void
append_to_statement_list_1(tree t,tree * list_p)295 append_to_statement_list_1 (tree t, tree *list_p)
296 {
297 tree list = *list_p;
298 tree_stmt_iterator i;
299
300 if (!list)
301 {
302 if (t && TREE_CODE (t) == STATEMENT_LIST)
303 {
304 *list_p = t;
305 return;
306 }
307 *list_p = list = alloc_stmt_list ();
308 }
309
310 i = tsi_last (list);
311 tsi_link_after (&i, t, TSI_CONTINUE_LINKING);
312 }
313
314 /* Add T to the end of the list container pointed to by LIST_P.
315 If T is an expression with no effects, it is ignored. */
316
317 void
append_to_statement_list(tree t,tree * list_p)318 append_to_statement_list (tree t, tree *list_p)
319 {
320 if (t && TREE_SIDE_EFFECTS (t))
321 append_to_statement_list_1 (t, list_p);
322 }
323
324 /* Similar, but the statement is always added, regardless of side effects. */
325
326 void
append_to_statement_list_force(tree t,tree * list_p)327 append_to_statement_list_force (tree t, tree *list_p)
328 {
329 if (t != NULL_TREE)
330 append_to_statement_list_1 (t, list_p);
331 }
332
333 /* Both gimplify the statement T and append it to LIST_P. */
334
335 void
gimplify_and_add(tree t,tree * list_p)336 gimplify_and_add (tree t, tree *list_p)
337 {
338 gimplify_stmt (&t);
339 append_to_statement_list (t, list_p);
340 }
341
342 /* Strip off a legitimate source ending from the input string NAME of
343 length LEN. Rather than having to know the names used by all of
344 our front ends, we strip off an ending of a period followed by
345 up to five characters. (Java uses ".class".) */
346
347 static inline void
remove_suffix(char * name,int len)348 remove_suffix (char *name, int len)
349 {
350 int i;
351
352 for (i = 2; i < 8 && len > i; i++)
353 {
354 if (name[len - i] == '.')
355 {
356 name[len - i] = '\0';
357 break;
358 }
359 }
360 }
361
362 /* Create a nameless artificial label and put it in the current function
363 context. Returns the newly created label. */
364
365 tree
create_artificial_label(void)366 create_artificial_label (void)
367 {
368 tree lab = build_decl (LABEL_DECL, NULL_TREE, void_type_node);
369
370 DECL_ARTIFICIAL (lab) = 1;
371 DECL_IGNORED_P (lab) = 1;
372 DECL_CONTEXT (lab) = current_function_decl;
373 return lab;
374 }
375
376 /* Subroutine for find_single_pointer_decl. */
377
378 static tree
find_single_pointer_decl_1(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data)379 find_single_pointer_decl_1 (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
380 void *data)
381 {
382 tree *pdecl = (tree *) data;
383
384 if (DECL_P (*tp) && POINTER_TYPE_P (TREE_TYPE (*tp)))
385 {
386 if (*pdecl)
387 {
388 /* We already found a pointer decl; return anything other
389 than NULL_TREE to unwind from walk_tree signalling that
390 we have a duplicate. */
391 return *tp;
392 }
393 *pdecl = *tp;
394 }
395
396 return NULL_TREE;
397 }
398
399 /* Find the single DECL of pointer type in the tree T and return it.
400 If there are zero or more than one such DECLs, return NULL. */
401
402 static tree
find_single_pointer_decl(tree t)403 find_single_pointer_decl (tree t)
404 {
405 tree decl = NULL_TREE;
406
407 if (walk_tree (&t, find_single_pointer_decl_1, &decl, NULL))
408 {
409 /* find_single_pointer_decl_1 returns a nonzero value, causing
410 walk_tree to return a nonzero value, to indicate that it
411 found more than one pointer DECL. */
412 return NULL_TREE;
413 }
414
415 return decl;
416 }
417
418 /* Create a new temporary name with PREFIX. Returns an identifier. */
419
420 static GTY(()) unsigned int tmp_var_id_num;
421
422 tree
create_tmp_var_name(const char * prefix)423 create_tmp_var_name (const char *prefix)
424 {
425 char *tmp_name;
426
427 if (prefix)
428 {
429 char *preftmp = ASTRDUP (prefix);
430
431 remove_suffix (preftmp, strlen (preftmp));
432 prefix = preftmp;
433 }
434
435 ASM_FORMAT_PRIVATE_NAME (tmp_name, prefix ? prefix : "T", tmp_var_id_num++);
436 return get_identifier (tmp_name);
437 }
438
439
440 /* Create a new temporary variable declaration of type TYPE.
441 Does NOT push it into the current binding. */
442
443 tree
create_tmp_var_raw(tree type,const char * prefix)444 create_tmp_var_raw (tree type, const char *prefix)
445 {
446 tree tmp_var;
447 tree new_type;
448
449 /* Make the type of the variable writable. */
450 new_type = build_type_variant (type, 0, 0);
451 TYPE_ATTRIBUTES (new_type) = TYPE_ATTRIBUTES (type);
452
453 tmp_var = build_decl (VAR_DECL, prefix ? create_tmp_var_name (prefix) : NULL,
454 type);
455
456 /* The variable was declared by the compiler. */
457 DECL_ARTIFICIAL (tmp_var) = 1;
458 /* And we don't want debug info for it. */
459 DECL_IGNORED_P (tmp_var) = 1;
460
461 /* Make the variable writable. */
462 TREE_READONLY (tmp_var) = 0;
463
464 DECL_EXTERNAL (tmp_var) = 0;
465 TREE_STATIC (tmp_var) = 0;
466 TREE_USED (tmp_var) = 1;
467
468 return tmp_var;
469 }
470
471 /* Create a new temporary variable declaration of type TYPE. DOES push the
472 variable into the current binding. Further, assume that this is called
473 only from gimplification or optimization, at which point the creation of
474 certain types are bugs. */
475
476 tree
create_tmp_var(tree type,const char * prefix)477 create_tmp_var (tree type, const char *prefix)
478 {
479 tree tmp_var;
480
481 /* We don't allow types that are addressable (meaning we can't make copies),
482 or incomplete. We also used to reject every variable size objects here,
483 but now support those for which a constant upper bound can be obtained.
484 The processing for variable sizes is performed in gimple_add_tmp_var,
485 point at which it really matters and possibly reached via paths not going
486 through this function, e.g. after direct calls to create_tmp_var_raw. */
487 gcc_assert (!TREE_ADDRESSABLE (type) && COMPLETE_TYPE_P (type));
488
489 tmp_var = create_tmp_var_raw (type, prefix);
490 gimple_add_tmp_var (tmp_var);
491 return tmp_var;
492 }
493
494 /* Given a tree, try to return a useful variable name that we can use
495 to prefix a temporary that is being assigned the value of the tree.
496 I.E. given <temp> = &A, return A. */
497
498 const char *
get_name(tree t)499 get_name (tree t)
500 {
501 tree stripped_decl;
502
503 stripped_decl = t;
504 STRIP_NOPS (stripped_decl);
505 if (DECL_P (stripped_decl) && DECL_NAME (stripped_decl))
506 return IDENTIFIER_POINTER (DECL_NAME (stripped_decl));
507 else
508 {
509 switch (TREE_CODE (stripped_decl))
510 {
511 case ADDR_EXPR:
512 return get_name (TREE_OPERAND (stripped_decl, 0));
513 break;
514 default:
515 return NULL;
516 }
517 }
518 }
519
520 /* Create a temporary with a name derived from VAL. Subroutine of
521 lookup_tmp_var; nobody else should call this function. */
522
523 static inline tree
create_tmp_from_val(tree val)524 create_tmp_from_val (tree val)
525 {
526 return create_tmp_var (TYPE_MAIN_VARIANT (TREE_TYPE (val)), get_name (val));
527 }
528
529 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
530 an existing expression temporary. */
531
532 static tree
lookup_tmp_var(tree val,bool is_formal)533 lookup_tmp_var (tree val, bool is_formal)
534 {
535 tree ret;
536
537 /* If not optimizing, never really reuse a temporary. local-alloc
538 won't allocate any variable that is used in more than one basic
539 block, which means it will go into memory, causing much extra
540 work in reload and final and poorer code generation, outweighing
541 the extra memory allocation here. */
542 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
543 ret = create_tmp_from_val (val);
544 else
545 {
546 elt_t elt, *elt_p;
547 void **slot;
548
549 elt.val = val;
550 slot = htab_find_slot (gimplify_ctxp->temp_htab, (void *)&elt, INSERT);
551 if (*slot == NULL)
552 {
553 elt_p = XNEW (elt_t);
554 elt_p->val = val;
555 elt_p->temp = ret = create_tmp_from_val (val);
556 *slot = (void *) elt_p;
557 }
558 else
559 {
560 elt_p = (elt_t *) *slot;
561 ret = elt_p->temp;
562 }
563 }
564
565 if (is_formal)
566 DECL_GIMPLE_FORMAL_TEMP_P (ret) = 1;
567
568 return ret;
569 }
570
571 /* Returns a formal temporary variable initialized with VAL. PRE_P is as
572 in gimplify_expr. Only use this function if:
573
574 1) The value of the unfactored expression represented by VAL will not
575 change between the initialization and use of the temporary, and
576 2) The temporary will not be otherwise modified.
577
578 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
579 and #2 means it is inappropriate for && temps.
580
581 For other cases, use get_initialized_tmp_var instead. */
582
583 static tree
internal_get_tmp_var(tree val,tree * pre_p,tree * post_p,bool is_formal)584 internal_get_tmp_var (tree val, tree *pre_p, tree *post_p, bool is_formal)
585 {
586 tree t, mod;
587
588 gimplify_expr (&val, pre_p, post_p, is_gimple_formal_tmp_rhs, fb_rvalue);
589
590 t = lookup_tmp_var (val, is_formal);
591
592 if (is_formal)
593 {
594 tree u = find_single_pointer_decl (val);
595
596 if (u && TREE_CODE (u) == VAR_DECL && DECL_BASED_ON_RESTRICT_P (u))
597 u = DECL_GET_RESTRICT_BASE (u);
598 if (u && TYPE_RESTRICT (TREE_TYPE (u)))
599 {
600 if (DECL_BASED_ON_RESTRICT_P (t))
601 gcc_assert (u == DECL_GET_RESTRICT_BASE (t));
602 else
603 {
604 DECL_BASED_ON_RESTRICT_P (t) = 1;
605 SET_DECL_RESTRICT_BASE (t, u);
606 }
607 }
608 }
609
610 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE)
611 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
612
613 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, val);
614
615 if (EXPR_HAS_LOCATION (val))
616 SET_EXPR_LOCUS (mod, EXPR_LOCUS (val));
617 else
618 SET_EXPR_LOCATION (mod, input_location);
619
620 /* gimplify_modify_expr might want to reduce this further. */
621 gimplify_and_add (mod, pre_p);
622
623 /* If we're gimplifying into ssa, gimplify_modify_expr will have
624 given our temporary an ssa name. Find and return it. */
625 if (gimplify_ctxp->into_ssa)
626 t = TREE_OPERAND (mod, 0);
627
628 return t;
629 }
630
631 /* Returns a formal temporary variable initialized with VAL. PRE_P
632 points to a statement list where side-effects needed to compute VAL
633 should be stored. */
634
635 tree
get_formal_tmp_var(tree val,tree * pre_p)636 get_formal_tmp_var (tree val, tree *pre_p)
637 {
638 return internal_get_tmp_var (val, pre_p, NULL, true);
639 }
640
641 /* Returns a temporary variable initialized with VAL. PRE_P and POST_P
642 are as in gimplify_expr. */
643
644 tree
get_initialized_tmp_var(tree val,tree * pre_p,tree * post_p)645 get_initialized_tmp_var (tree val, tree *pre_p, tree *post_p)
646 {
647 return internal_get_tmp_var (val, pre_p, post_p, false);
648 }
649
650 /* Declares all the variables in VARS in SCOPE. If DEBUG_INFO is
651 true, generate debug info for them; otherwise don't. */
652
653 void
declare_vars(tree vars,tree scope,bool debug_info)654 declare_vars (tree vars, tree scope, bool debug_info)
655 {
656 tree last = vars;
657 if (last)
658 {
659 tree temps, block;
660
661 /* C99 mode puts the default 'return 0;' for main outside the outer
662 braces. So drill down until we find an actual scope. */
663 while (TREE_CODE (scope) == COMPOUND_EXPR)
664 scope = TREE_OPERAND (scope, 0);
665
666 gcc_assert (TREE_CODE (scope) == BIND_EXPR);
667
668 temps = nreverse (last);
669
670 block = BIND_EXPR_BLOCK (scope);
671 if (!block || !debug_info)
672 {
673 TREE_CHAIN (last) = BIND_EXPR_VARS (scope);
674 BIND_EXPR_VARS (scope) = temps;
675 }
676 else
677 {
678 /* We need to attach the nodes both to the BIND_EXPR and to its
679 associated BLOCK for debugging purposes. The key point here
680 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
681 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
682 if (BLOCK_VARS (block))
683 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
684 else
685 {
686 BIND_EXPR_VARS (scope) = chainon (BIND_EXPR_VARS (scope), temps);
687 BLOCK_VARS (block) = temps;
688 }
689 }
690 }
691 }
692
693 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
694 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
695 no such upper bound can be obtained. */
696
697 static void
force_constant_size(tree var)698 force_constant_size (tree var)
699 {
700 /* The only attempt we make is by querying the maximum size of objects
701 of the variable's type. */
702
703 HOST_WIDE_INT max_size;
704
705 gcc_assert (TREE_CODE (var) == VAR_DECL);
706
707 max_size = max_int_size_in_bytes (TREE_TYPE (var));
708
709 gcc_assert (max_size >= 0);
710
711 DECL_SIZE_UNIT (var)
712 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
713 DECL_SIZE (var)
714 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
715 }
716
717 void
gimple_add_tmp_var(tree tmp)718 gimple_add_tmp_var (tree tmp)
719 {
720 gcc_assert (!TREE_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
726 force_constant_size (tmp);
727
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730
731 if (gimplify_ctxp)
732 {
733 TREE_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
735
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
738 {
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx && !ctx->is_parallel)
741 ctx = ctx->outer_context;
742 if (ctx)
743 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
744 }
745 }
746 else if (cfun)
747 record_vars (tmp);
748 else
749 declare_vars (tmp, DECL_SAVED_TREE (current_function_decl), false);
750 }
751
752 /* Determines whether to assign a locus to the statement STMT. */
753
754 static bool
should_carry_locus_p(tree stmt)755 should_carry_locus_p (tree stmt)
756 {
757 /* Don't emit a line note for a label. We particularly don't want to
758 emit one for the break label, since it doesn't actually correspond
759 to the beginning of the loop/switch. */
760 if (TREE_CODE (stmt) == LABEL_EXPR)
761 return false;
762
763 /* Do not annotate empty statements, since it confuses gcov. */
764 if (!TREE_SIDE_EFFECTS (stmt))
765 return false;
766
767 return true;
768 }
769
770 static void
annotate_one_with_locus(tree t,location_t locus)771 annotate_one_with_locus (tree t, location_t locus)
772 {
773 if (EXPR_P (t) && ! EXPR_HAS_LOCATION (t) && should_carry_locus_p (t))
774 SET_EXPR_LOCATION (t, locus);
775 }
776
777 void
annotate_all_with_locus(tree * stmt_p,location_t locus)778 annotate_all_with_locus (tree *stmt_p, location_t locus)
779 {
780 tree_stmt_iterator i;
781
782 if (!*stmt_p)
783 return;
784
785 for (i = tsi_start (*stmt_p); !tsi_end_p (i); tsi_next (&i))
786 {
787 tree t = tsi_stmt (i);
788
789 /* Assuming we've already been gimplified, we shouldn't
790 see nested chaining constructs anymore. */
791 gcc_assert (TREE_CODE (t) != STATEMENT_LIST
792 && TREE_CODE (t) != COMPOUND_EXPR);
793
794 annotate_one_with_locus (t, locus);
795 }
796 }
797
798 /* Similar to copy_tree_r() but do not copy SAVE_EXPR or TARGET_EXPR nodes.
799 These nodes model computations that should only be done once. If we
800 were to unshare something like SAVE_EXPR(i++), the gimplification
801 process would create wrong code. */
802
803 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)804 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
805 {
806 enum tree_code code = TREE_CODE (*tp);
807 /* Don't unshare types, decls, constants and SAVE_EXPR nodes. */
808 if (TREE_CODE_CLASS (code) == tcc_type
809 || TREE_CODE_CLASS (code) == tcc_declaration
810 || TREE_CODE_CLASS (code) == tcc_constant
811 || code == SAVE_EXPR || code == TARGET_EXPR
812 /* We can't do anything sensible with a BLOCK used as an expression,
813 but we also can't just die when we see it because of non-expression
814 uses. So just avert our eyes and cross our fingers. Silly Java. */
815 || code == BLOCK)
816 *walk_subtrees = 0;
817 else
818 {
819 gcc_assert (code != BIND_EXPR);
820 copy_tree_r (tp, walk_subtrees, data);
821 }
822
823 return NULL_TREE;
824 }
825
826 /* Callback for walk_tree to unshare most of the shared trees rooted at
827 *TP. If *TP has been visited already (i.e., TREE_VISITED (*TP) == 1),
828 then *TP is deep copied by calling copy_tree_r.
829
830 This unshares the same trees as copy_tree_r with the exception of
831 SAVE_EXPR nodes. These nodes model computations that should only be
832 done once. If we were to unshare something like SAVE_EXPR(i++), the
833 gimplification process would create wrong code. */
834
835 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)836 copy_if_shared_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
837 void *data ATTRIBUTE_UNUSED)
838 {
839 tree t = *tp;
840 enum tree_code code = TREE_CODE (t);
841
842 /* Skip types, decls, and constants. But we do want to look at their
843 types and the bounds of types. Mark them as visited so we properly
844 unmark their subtrees on the unmark pass. If we've already seen them,
845 don't look down further. */
846 if (TREE_CODE_CLASS (code) == tcc_type
847 || TREE_CODE_CLASS (code) == tcc_declaration
848 || TREE_CODE_CLASS (code) == tcc_constant)
849 {
850 if (TREE_VISITED (t))
851 *walk_subtrees = 0;
852 else
853 TREE_VISITED (t) = 1;
854 }
855
856 /* If this node has been visited already, unshare it and don't look
857 any deeper. */
858 else if (TREE_VISITED (t))
859 {
860 walk_tree (tp, mostly_copy_tree_r, NULL, NULL);
861 *walk_subtrees = 0;
862 }
863
864 /* Otherwise, mark the tree as visited and keep looking. */
865 else
866 TREE_VISITED (t) = 1;
867
868 return NULL_TREE;
869 }
870
871 static tree
unmark_visited_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)872 unmark_visited_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
873 void *data ATTRIBUTE_UNUSED)
874 {
875 if (TREE_VISITED (*tp))
876 TREE_VISITED (*tp) = 0;
877 else
878 *walk_subtrees = 0;
879
880 return NULL_TREE;
881 }
882
883 /* Unshare all the trees in BODY_P, a pointer into the body of FNDECL, and the
884 bodies of any nested functions if we are unsharing the entire body of
885 FNDECL. */
886
887 static void
unshare_body(tree * body_p,tree fndecl)888 unshare_body (tree *body_p, tree fndecl)
889 {
890 struct cgraph_node *cgn = cgraph_node (fndecl);
891
892 walk_tree (body_p, copy_if_shared_r, NULL, NULL);
893 if (body_p == &DECL_SAVED_TREE (fndecl))
894 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
895 unshare_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
896 }
897
898 /* Likewise, but mark all trees as not visited. */
899
900 static void
unvisit_body(tree * body_p,tree fndecl)901 unvisit_body (tree *body_p, tree fndecl)
902 {
903 struct cgraph_node *cgn = cgraph_node (fndecl);
904
905 walk_tree (body_p, unmark_visited_r, NULL, NULL);
906 if (body_p == &DECL_SAVED_TREE (fndecl))
907 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
908 unvisit_body (&DECL_SAVED_TREE (cgn->decl), cgn->decl);
909 }
910
911 /* Unshare T and all the trees reached from T via TREE_CHAIN. */
912
913 static void
unshare_all_trees(tree t)914 unshare_all_trees (tree t)
915 {
916 walk_tree (&t, copy_if_shared_r, NULL, NULL);
917 walk_tree (&t, unmark_visited_r, NULL, NULL);
918 }
919
920 /* Unconditionally make an unshared copy of EXPR. This is used when using
921 stored expressions which span multiple functions, such as BINFO_VTABLE,
922 as the normal unsharing process can't tell that they're shared. */
923
924 tree
unshare_expr(tree expr)925 unshare_expr (tree expr)
926 {
927 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
928 return expr;
929 }
930
931 /* A terser interface for building a representation of an exception
932 specification. */
933
934 tree
gimple_build_eh_filter(tree body,tree allowed,tree failure)935 gimple_build_eh_filter (tree body, tree allowed, tree failure)
936 {
937 tree t;
938
939 /* FIXME should the allowed types go in TREE_TYPE? */
940 t = build2 (EH_FILTER_EXPR, void_type_node, allowed, NULL_TREE);
941 append_to_statement_list (failure, &EH_FILTER_FAILURE (t));
942
943 t = build2 (TRY_CATCH_EXPR, void_type_node, NULL_TREE, t);
944 append_to_statement_list (body, &TREE_OPERAND (t, 0));
945
946 return t;
947 }
948
949
950 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
951 contain statements and have a value. Assign its value to a temporary
952 and give it void_type_node. Returns the temporary, or NULL_TREE if
953 WRAPPER was already void. */
954
955 tree
voidify_wrapper_expr(tree wrapper,tree temp)956 voidify_wrapper_expr (tree wrapper, tree temp)
957 {
958 tree type = TREE_TYPE (wrapper);
959 if (type && !VOID_TYPE_P (type))
960 {
961 tree *p;
962
963 /* Set p to point to the body of the wrapper. Loop until we find
964 something that isn't a wrapper. */
965 for (p = &wrapper; p && *p; )
966 {
967 switch (TREE_CODE (*p))
968 {
969 case BIND_EXPR:
970 TREE_SIDE_EFFECTS (*p) = 1;
971 TREE_TYPE (*p) = void_type_node;
972 /* For a BIND_EXPR, the body is operand 1. */
973 p = &BIND_EXPR_BODY (*p);
974 break;
975
976 case CLEANUP_POINT_EXPR:
977 case TRY_FINALLY_EXPR:
978 case TRY_CATCH_EXPR:
979 TREE_SIDE_EFFECTS (*p) = 1;
980 TREE_TYPE (*p) = void_type_node;
981 p = &TREE_OPERAND (*p, 0);
982 break;
983
984 case STATEMENT_LIST:
985 {
986 tree_stmt_iterator i = tsi_last (*p);
987 TREE_SIDE_EFFECTS (*p) = 1;
988 TREE_TYPE (*p) = void_type_node;
989 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
990 }
991 break;
992
993 case COMPOUND_EXPR:
994 /* Advance to the last statement. Set all container types to void. */
995 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
996 {
997 TREE_SIDE_EFFECTS (*p) = 1;
998 TREE_TYPE (*p) = void_type_node;
999 }
1000 break;
1001
1002 default:
1003 goto out;
1004 }
1005 }
1006
1007 out:
1008 if (p == NULL || IS_EMPTY_STMT (*p))
1009 temp = NULL_TREE;
1010 else if (temp)
1011 {
1012 /* The wrapper is on the RHS of an assignment that we're pushing
1013 down. */
1014 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1015 || TREE_CODE (temp) == MODIFY_EXPR);
1016 TREE_OPERAND (temp, 1) = *p;
1017 *p = temp;
1018 }
1019 else
1020 {
1021 temp = create_tmp_var (type, "retval");
1022 *p = build2 (INIT_EXPR, type, temp, *p);
1023 }
1024
1025 return temp;
1026 }
1027
1028 return NULL_TREE;
1029 }
1030
1031 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1032 a temporary through which they communicate. */
1033
1034 static void
build_stack_save_restore(tree * save,tree * restore)1035 build_stack_save_restore (tree *save, tree *restore)
1036 {
1037 tree save_call, tmp_var;
1038
1039 save_call =
1040 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_SAVE],
1041 NULL_TREE);
1042 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1043
1044 *save = build2 (MODIFY_EXPR, ptr_type_node, tmp_var, save_call);
1045 *restore =
1046 build_function_call_expr (implicit_built_in_decls[BUILT_IN_STACK_RESTORE],
1047 tree_cons (NULL_TREE, tmp_var, NULL_TREE));
1048 }
1049
1050 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1051
1052 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,tree * pre_p)1053 gimplify_bind_expr (tree *expr_p, tree *pre_p)
1054 {
1055 tree bind_expr = *expr_p;
1056 bool old_save_stack = gimplify_ctxp->save_stack;
1057 tree t;
1058
1059 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1060
1061 /* Mark variables seen in this bind expr. */
1062 for (t = BIND_EXPR_VARS (bind_expr); t ; t = TREE_CHAIN (t))
1063 {
1064 if (TREE_CODE (t) == VAR_DECL)
1065 {
1066 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1067
1068 /* Mark variable as local. */
1069 if (ctx && !is_global_var (t)
1070 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1071 || splay_tree_lookup (ctx->variables,
1072 (splay_tree_key) t) == NULL))
1073 omp_add_variable (gimplify_omp_ctxp, t, GOVD_LOCAL | GOVD_SEEN);
1074
1075 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1076 }
1077
1078 /* Preliminarily mark non-addressed complex variables as eligible
1079 for promotion to gimple registers. We'll transform their uses
1080 as we find them. */
1081 if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1082 && !TREE_THIS_VOLATILE (t)
1083 && (TREE_CODE (t) == VAR_DECL && !DECL_HARD_REGISTER (t))
1084 && !needs_to_live_in_memory (t))
1085 DECL_COMPLEX_GIMPLE_REG_P (t) = 1;
1086 }
1087
1088 gimple_push_bind_expr (bind_expr);
1089 gimplify_ctxp->save_stack = false;
1090
1091 gimplify_to_stmt_list (&BIND_EXPR_BODY (bind_expr));
1092
1093 if (gimplify_ctxp->save_stack)
1094 {
1095 tree stack_save, stack_restore;
1096
1097 /* Save stack on entry and restore it on exit. Add a try_finally
1098 block to achieve this. Note that mudflap depends on the
1099 format of the emitted code: see mx_register_decls(). */
1100 build_stack_save_restore (&stack_save, &stack_restore);
1101
1102 t = build2 (TRY_FINALLY_EXPR, void_type_node,
1103 BIND_EXPR_BODY (bind_expr), NULL_TREE);
1104 append_to_statement_list (stack_restore, &TREE_OPERAND (t, 1));
1105
1106 BIND_EXPR_BODY (bind_expr) = NULL_TREE;
1107 append_to_statement_list (stack_save, &BIND_EXPR_BODY (bind_expr));
1108 append_to_statement_list (t, &BIND_EXPR_BODY (bind_expr));
1109 }
1110
1111 gimplify_ctxp->save_stack = old_save_stack;
1112 gimple_pop_bind_expr ();
1113
1114 if (temp)
1115 {
1116 *expr_p = temp;
1117 append_to_statement_list (bind_expr, pre_p);
1118 return GS_OK;
1119 }
1120 else
1121 return GS_ALL_DONE;
1122 }
1123
1124 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1125 GIMPLE value, it is assigned to a new temporary and the statement is
1126 re-written to return the temporary.
1127
1128 PRE_P points to the list where side effects that must happen before
1129 STMT should be stored. */
1130
1131 static enum gimplify_status
gimplify_return_expr(tree stmt,tree * pre_p)1132 gimplify_return_expr (tree stmt, tree *pre_p)
1133 {
1134 tree ret_expr = TREE_OPERAND (stmt, 0);
1135 tree result_decl, result;
1136
1137 if (!ret_expr || TREE_CODE (ret_expr) == RESULT_DECL
1138 || ret_expr == error_mark_node)
1139 return GS_ALL_DONE;
1140
1141 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1142 result_decl = NULL_TREE;
1143 else
1144 {
1145 result_decl = TREE_OPERAND (ret_expr, 0);
1146 if (TREE_CODE (result_decl) == INDIRECT_REF)
1147 /* See through a return by reference. */
1148 result_decl = TREE_OPERAND (result_decl, 0);
1149
1150 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1151 || TREE_CODE (ret_expr) == INIT_EXPR)
1152 && TREE_CODE (result_decl) == RESULT_DECL);
1153 }
1154
1155 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1156 Recall that aggregate_value_p is FALSE for any aggregate type that is
1157 returned in registers. If we're returning values in registers, then
1158 we don't want to extend the lifetime of the RESULT_DECL, particularly
1159 across another call. In addition, for those aggregates for which
1160 hard_function_value generates a PARALLEL, we'll die during normal
1161 expansion of structure assignments; there's special code in expand_return
1162 to handle this case that does not exist in expand_expr. */
1163 if (!result_decl
1164 || aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1165 result = result_decl;
1166 else if (gimplify_ctxp->return_temp)
1167 result = gimplify_ctxp->return_temp;
1168 else
1169 {
1170 result = create_tmp_var (TREE_TYPE (result_decl), NULL);
1171
1172 /* ??? With complex control flow (usually involving abnormal edges),
1173 we can wind up warning about an uninitialized value for this. Due
1174 to how this variable is constructed and initialized, this is never
1175 true. Give up and never warn. */
1176 TREE_NO_WARNING (result) = 1;
1177
1178 gimplify_ctxp->return_temp = result;
1179 }
1180
1181 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1182 Then gimplify the whole thing. */
1183 if (result != result_decl)
1184 TREE_OPERAND (ret_expr, 0) = result;
1185
1186 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1187
1188 /* If we didn't use a temporary, then the result is just the result_decl.
1189 Otherwise we need a simple copy. This should already be gimple. */
1190 if (result == result_decl)
1191 ret_expr = result;
1192 else
1193 ret_expr = build2 (MODIFY_EXPR, TREE_TYPE (result), result_decl, result);
1194 TREE_OPERAND (stmt, 0) = ret_expr;
1195
1196 return GS_ALL_DONE;
1197 }
1198
1199 /* Gimplifies a DECL_EXPR node *STMT_P by making any necessary allocation
1200 and initialization explicit. */
1201
1202 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p)1203 gimplify_decl_expr (tree *stmt_p)
1204 {
1205 tree stmt = *stmt_p;
1206 tree decl = DECL_EXPR_DECL (stmt);
1207
1208 *stmt_p = NULL_TREE;
1209
1210 if (TREE_TYPE (decl) == error_mark_node)
1211 return GS_ERROR;
1212
1213 if ((TREE_CODE (decl) == TYPE_DECL
1214 || TREE_CODE (decl) == VAR_DECL)
1215 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1216 gimplify_type_sizes (TREE_TYPE (decl), stmt_p);
1217
1218 if (TREE_CODE (decl) == VAR_DECL && !DECL_EXTERNAL (decl))
1219 {
1220 tree init = DECL_INITIAL (decl);
1221
1222 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
1223 {
1224 /* This is a variable-sized decl. Simplify its size and mark it
1225 for deferred expansion. Note that mudflap depends on the format
1226 of the emitted code: see mx_register_decls(). */
1227 tree t, args, addr, ptr_type;
1228
1229 gimplify_one_sizepos (&DECL_SIZE (decl), stmt_p);
1230 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), stmt_p);
1231
1232 /* All occurrences of this decl in final gimplified code will be
1233 replaced by indirection. Setting DECL_VALUE_EXPR does two
1234 things: First, it lets the rest of the gimplifier know what
1235 replacement to use. Second, it lets the debug info know
1236 where to find the value. */
1237 ptr_type = build_pointer_type (TREE_TYPE (decl));
1238 addr = create_tmp_var (ptr_type, get_name (decl));
1239 DECL_IGNORED_P (addr) = 0;
1240 t = build_fold_indirect_ref (addr);
1241 SET_DECL_VALUE_EXPR (decl, t);
1242 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1243
1244 args = tree_cons (NULL, DECL_SIZE_UNIT (decl), NULL);
1245 t = built_in_decls[BUILT_IN_ALLOCA];
1246 t = build_function_call_expr (t, args);
1247 t = fold_convert (ptr_type, t);
1248 t = build2 (MODIFY_EXPR, void_type_node, addr, t);
1249
1250 gimplify_and_add (t, stmt_p);
1251
1252 /* Indicate that we need to restore the stack level when the
1253 enclosing BIND_EXPR is exited. */
1254 gimplify_ctxp->save_stack = true;
1255 }
1256
1257 if (init && init != error_mark_node)
1258 {
1259 if (!TREE_STATIC (decl))
1260 {
1261 DECL_INITIAL (decl) = NULL_TREE;
1262 init = build2 (INIT_EXPR, void_type_node, decl, init);
1263 gimplify_and_add (init, stmt_p);
1264 }
1265 else
1266 /* We must still examine initializers for static variables
1267 as they may contain a label address. */
1268 walk_tree (&init, force_labels_r, NULL, NULL);
1269 }
1270
1271 /* Some front ends do not explicitly declare all anonymous
1272 artificial variables. We compensate here by declaring the
1273 variables, though it would be better if the front ends would
1274 explicitly declare them. */
1275 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1276 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1277 gimple_add_tmp_var (decl);
1278 }
1279
1280 return GS_ALL_DONE;
1281 }
1282
1283 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1284 and replacing the LOOP_EXPR with goto, but if the loop contains an
1285 EXIT_EXPR, we need to append a label for it to jump to. */
1286
1287 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,tree * pre_p)1288 gimplify_loop_expr (tree *expr_p, tree *pre_p)
1289 {
1290 tree saved_label = gimplify_ctxp->exit_label;
1291 tree start_label = build1 (LABEL_EXPR, void_type_node, NULL_TREE);
1292 tree jump_stmt = build_and_jump (&LABEL_EXPR_LABEL (start_label));
1293
1294 append_to_statement_list (start_label, pre_p);
1295
1296 gimplify_ctxp->exit_label = NULL_TREE;
1297
1298 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1299
1300 if (gimplify_ctxp->exit_label)
1301 {
1302 append_to_statement_list (jump_stmt, pre_p);
1303 *expr_p = build1 (LABEL_EXPR, void_type_node, gimplify_ctxp->exit_label);
1304 }
1305 else
1306 *expr_p = jump_stmt;
1307
1308 gimplify_ctxp->exit_label = saved_label;
1309
1310 return GS_ALL_DONE;
1311 }
1312
1313 /* Compare two case labels. Because the front end should already have
1314 made sure that case ranges do not overlap, it is enough to only compare
1315 the CASE_LOW values of each case label. */
1316
1317 static int
compare_case_labels(const void * p1,const void * p2)1318 compare_case_labels (const void *p1, const void *p2)
1319 {
1320 tree case1 = *(tree *)p1;
1321 tree case2 = *(tree *)p2;
1322
1323 return tree_int_cst_compare (CASE_LOW (case1), CASE_LOW (case2));
1324 }
1325
1326 /* Sort the case labels in LABEL_VEC in place in ascending order. */
1327
1328 void
sort_case_labels(tree label_vec)1329 sort_case_labels (tree label_vec)
1330 {
1331 size_t len = TREE_VEC_LENGTH (label_vec);
1332 tree default_case = TREE_VEC_ELT (label_vec, len - 1);
1333
1334 if (CASE_LOW (default_case))
1335 {
1336 size_t i;
1337
1338 /* The last label in the vector should be the default case
1339 but it is not. */
1340 for (i = 0; i < len; ++i)
1341 {
1342 tree t = TREE_VEC_ELT (label_vec, i);
1343 if (!CASE_LOW (t))
1344 {
1345 default_case = t;
1346 TREE_VEC_ELT (label_vec, i) = TREE_VEC_ELT (label_vec, len - 1);
1347 TREE_VEC_ELT (label_vec, len - 1) = default_case;
1348 break;
1349 }
1350 }
1351 }
1352
1353 qsort (&TREE_VEC_ELT (label_vec, 0), len - 1, sizeof (tree),
1354 compare_case_labels);
1355 }
1356
1357 /* Gimplify a SWITCH_EXPR, and collect a TREE_VEC of the labels it can
1358 branch to. */
1359
1360 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,tree * pre_p)1361 gimplify_switch_expr (tree *expr_p, tree *pre_p)
1362 {
1363 tree switch_expr = *expr_p;
1364 enum gimplify_status ret;
1365
1366 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL,
1367 is_gimple_val, fb_rvalue);
1368
1369 if (SWITCH_BODY (switch_expr))
1370 {
1371 VEC(tree,heap) *labels, *saved_labels;
1372 tree label_vec, default_case = NULL_TREE;
1373 size_t i, len;
1374
1375 /* If someone can be bothered to fill in the labels, they can
1376 be bothered to null out the body too. */
1377 gcc_assert (!SWITCH_LABELS (switch_expr));
1378
1379 saved_labels = gimplify_ctxp->case_labels;
1380 gimplify_ctxp->case_labels = VEC_alloc (tree, heap, 8);
1381
1382 gimplify_to_stmt_list (&SWITCH_BODY (switch_expr));
1383
1384 labels = gimplify_ctxp->case_labels;
1385 gimplify_ctxp->case_labels = saved_labels;
1386
1387 i = 0;
1388 while (i < VEC_length (tree, labels))
1389 {
1390 tree elt = VEC_index (tree, labels, i);
1391 tree low = CASE_LOW (elt);
1392 bool remove_element = FALSE;
1393
1394 if (low)
1395 {
1396 /* Discard empty ranges. */
1397 tree high = CASE_HIGH (elt);
1398 if (high && INT_CST_LT (high, low))
1399 remove_element = TRUE;
1400 }
1401 else
1402 {
1403 /* The default case must be the last label in the list. */
1404 gcc_assert (!default_case);
1405 default_case = elt;
1406 remove_element = TRUE;
1407 }
1408
1409 if (remove_element)
1410 VEC_ordered_remove (tree, labels, i);
1411 else
1412 i++;
1413 }
1414 len = i;
1415
1416 label_vec = make_tree_vec (len + 1);
1417 SWITCH_LABELS (*expr_p) = label_vec;
1418 append_to_statement_list (switch_expr, pre_p);
1419
1420 if (! default_case)
1421 {
1422 /* If the switch has no default label, add one, so that we jump
1423 around the switch body. */
1424 default_case = build3 (CASE_LABEL_EXPR, void_type_node, NULL_TREE,
1425 NULL_TREE, create_artificial_label ());
1426 append_to_statement_list (SWITCH_BODY (switch_expr), pre_p);
1427 *expr_p = build1 (LABEL_EXPR, void_type_node,
1428 CASE_LABEL (default_case));
1429 }
1430 else
1431 *expr_p = SWITCH_BODY (switch_expr);
1432
1433 for (i = 0; i < len; ++i)
1434 TREE_VEC_ELT (label_vec, i) = VEC_index (tree, labels, i);
1435 TREE_VEC_ELT (label_vec, len) = default_case;
1436
1437 VEC_free (tree, heap, labels);
1438
1439 sort_case_labels (label_vec);
1440
1441 SWITCH_BODY (switch_expr) = NULL;
1442 }
1443 else
1444 gcc_assert (SWITCH_LABELS (switch_expr));
1445
1446 return ret;
1447 }
1448
1449 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p)1450 gimplify_case_label_expr (tree *expr_p)
1451 {
1452 tree expr = *expr_p;
1453 struct gimplify_ctx *ctxp;
1454
1455 /* Invalid OpenMP programs can play Duff's Device type games with
1456 #pragma omp parallel. At least in the C front end, we don't
1457 detect such invalid branches until after gimplification. */
1458 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
1459 if (ctxp->case_labels)
1460 break;
1461
1462 VEC_safe_push (tree, heap, ctxp->case_labels, expr);
1463 *expr_p = build1 (LABEL_EXPR, void_type_node, CASE_LABEL (expr));
1464 return GS_ALL_DONE;
1465 }
1466
1467 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
1468 if necessary. */
1469
1470 tree
build_and_jump(tree * label_p)1471 build_and_jump (tree *label_p)
1472 {
1473 if (label_p == NULL)
1474 /* If there's nowhere to jump, just fall through. */
1475 return NULL_TREE;
1476
1477 if (*label_p == NULL_TREE)
1478 {
1479 tree label = create_artificial_label ();
1480 *label_p = label;
1481 }
1482
1483 return build1 (GOTO_EXPR, void_type_node, *label_p);
1484 }
1485
1486 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
1487 This also involves building a label to jump to and communicating it to
1488 gimplify_loop_expr through gimplify_ctxp->exit_label. */
1489
1490 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)1491 gimplify_exit_expr (tree *expr_p)
1492 {
1493 tree cond = TREE_OPERAND (*expr_p, 0);
1494 tree expr;
1495
1496 expr = build_and_jump (&gimplify_ctxp->exit_label);
1497 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
1498 *expr_p = expr;
1499
1500 return GS_OK;
1501 }
1502
1503 /* A helper function to be called via walk_tree. Mark all labels under *TP
1504 as being forced. To be called for DECL_INITIAL of static variables. */
1505
1506 tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1507 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1508 {
1509 if (TYPE_P (*tp))
1510 *walk_subtrees = 0;
1511 if (TREE_CODE (*tp) == LABEL_DECL)
1512 FORCED_LABEL (*tp) = 1;
1513
1514 return NULL_TREE;
1515 }
1516
1517 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
1518 different from its canonical type, wrap the whole thing inside a
1519 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
1520 type.
1521
1522 The canonical type of a COMPONENT_REF is the type of the field being
1523 referenced--unless the field is a bit-field which can be read directly
1524 in a smaller mode, in which case the canonical type is the
1525 sign-appropriate type corresponding to that mode. */
1526
1527 static void
canonicalize_component_ref(tree * expr_p)1528 canonicalize_component_ref (tree *expr_p)
1529 {
1530 tree expr = *expr_p;
1531 tree type;
1532
1533 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
1534
1535 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
1536 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
1537 else
1538 type = TREE_TYPE (TREE_OPERAND (expr, 1));
1539
1540 if (TREE_TYPE (expr) != type)
1541 {
1542 tree old_type = TREE_TYPE (expr);
1543
1544 /* Set the type of the COMPONENT_REF to the underlying type. */
1545 TREE_TYPE (expr) = type;
1546
1547 /* And wrap the whole thing inside a NOP_EXPR. */
1548 expr = build1 (NOP_EXPR, old_type, expr);
1549
1550 *expr_p = expr;
1551 }
1552 }
1553
1554 /* If a NOP conversion is changing a pointer to array of foo to a pointer
1555 to foo, embed that change in the ADDR_EXPR by converting
1556 T array[U];
1557 (T *)&array
1558 ==>
1559 &array[L]
1560 where L is the lower bound. For simplicity, only do this for constant
1561 lower bound. */
1562
1563 static void
canonicalize_addr_expr(tree * expr_p)1564 canonicalize_addr_expr (tree *expr_p)
1565 {
1566 tree expr = *expr_p;
1567 tree ctype = TREE_TYPE (expr);
1568 tree addr_expr = TREE_OPERAND (expr, 0);
1569 tree atype = TREE_TYPE (addr_expr);
1570 tree dctype, datype, ddatype, otype, obj_expr;
1571
1572 /* Both cast and addr_expr types should be pointers. */
1573 if (!POINTER_TYPE_P (ctype) || !POINTER_TYPE_P (atype))
1574 return;
1575
1576 /* The addr_expr type should be a pointer to an array. */
1577 datype = TREE_TYPE (atype);
1578 if (TREE_CODE (datype) != ARRAY_TYPE)
1579 return;
1580
1581 /* Both cast and addr_expr types should address the same object type. */
1582 dctype = TREE_TYPE (ctype);
1583 ddatype = TREE_TYPE (datype);
1584 if (!lang_hooks.types_compatible_p (ddatype, dctype))
1585 return;
1586
1587 /* The addr_expr and the object type should match. */
1588 obj_expr = TREE_OPERAND (addr_expr, 0);
1589 otype = TREE_TYPE (obj_expr);
1590 if (!lang_hooks.types_compatible_p (otype, datype))
1591 return;
1592
1593 /* The lower bound and element sizes must be constant. */
1594 if (!TYPE_SIZE_UNIT (dctype)
1595 || TREE_CODE (TYPE_SIZE_UNIT (dctype)) != INTEGER_CST
1596 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
1597 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
1598 return;
1599
1600 /* All checks succeeded. Build a new node to merge the cast. */
1601 *expr_p = build4 (ARRAY_REF, dctype, obj_expr,
1602 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1603 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
1604 size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (dctype),
1605 size_int (TYPE_ALIGN_UNIT (dctype))));
1606 *expr_p = build1 (ADDR_EXPR, ctype, *expr_p);
1607 }
1608
1609 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
1610 underneath as appropriate. */
1611
1612 static enum gimplify_status
gimplify_conversion(tree * expr_p)1613 gimplify_conversion (tree *expr_p)
1614 {
1615 gcc_assert (TREE_CODE (*expr_p) == NOP_EXPR
1616 || TREE_CODE (*expr_p) == CONVERT_EXPR);
1617
1618 /* Then strip away all but the outermost conversion. */
1619 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
1620
1621 /* And remove the outermost conversion if it's useless. */
1622 if (tree_ssa_useless_type_conversion (*expr_p))
1623 *expr_p = TREE_OPERAND (*expr_p, 0);
1624
1625 /* If we still have a conversion at the toplevel,
1626 then canonicalize some constructs. */
1627 if (TREE_CODE (*expr_p) == NOP_EXPR || TREE_CODE (*expr_p) == CONVERT_EXPR)
1628 {
1629 tree sub = TREE_OPERAND (*expr_p, 0);
1630
1631 /* If a NOP conversion is changing the type of a COMPONENT_REF
1632 expression, then canonicalize its type now in order to expose more
1633 redundant conversions. */
1634 if (TREE_CODE (sub) == COMPONENT_REF)
1635 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
1636
1637 /* If a NOP conversion is changing a pointer to array of foo
1638 to a pointer to foo, embed that change in the ADDR_EXPR. */
1639 else if (TREE_CODE (sub) == ADDR_EXPR)
1640 canonicalize_addr_expr (expr_p);
1641 }
1642
1643 return GS_OK;
1644 }
1645
1646 /* Gimplify a VAR_DECL or PARM_DECL. Returns GS_OK if we expanded a
1647 DECL_VALUE_EXPR, and it's worth re-examining things. */
1648
1649 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)1650 gimplify_var_or_parm_decl (tree *expr_p)
1651 {
1652 tree decl = *expr_p;
1653
1654 /* ??? If this is a local variable, and it has not been seen in any
1655 outer BIND_EXPR, then it's probably the result of a duplicate
1656 declaration, for which we've already issued an error. It would
1657 be really nice if the front end wouldn't leak these at all.
1658 Currently the only known culprit is C++ destructors, as seen
1659 in g++.old-deja/g++.jason/binding.C. */
1660 if (TREE_CODE (decl) == VAR_DECL
1661 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
1662 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
1663 && decl_function_context (decl) == current_function_decl)
1664 {
1665 gcc_assert (errorcount || sorrycount);
1666 return GS_ERROR;
1667 }
1668
1669 /* When within an OpenMP context, notice uses of variables. */
1670 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
1671 return GS_ALL_DONE;
1672
1673 /* If the decl is an alias for another expression, substitute it now. */
1674 if (DECL_HAS_VALUE_EXPR_P (decl))
1675 {
1676 *expr_p = unshare_expr (DECL_VALUE_EXPR (decl));
1677 return GS_OK;
1678 }
1679
1680 return GS_ALL_DONE;
1681 }
1682
1683
1684 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
1685 node pointed to by EXPR_P.
1686
1687 compound_lval
1688 : min_lval '[' val ']'
1689 | min_lval '.' ID
1690 | compound_lval '[' val ']'
1691 | compound_lval '.' ID
1692
1693 This is not part of the original SIMPLE definition, which separates
1694 array and member references, but it seems reasonable to handle them
1695 together. Also, this way we don't run into problems with union
1696 aliasing; gcc requires that for accesses through a union to alias, the
1697 union reference must be explicit, which was not always the case when we
1698 were splitting up array and member refs.
1699
1700 PRE_P points to the list where side effects that must happen before
1701 *EXPR_P should be stored.
1702
1703 POST_P points to the list where side effects that must happen after
1704 *EXPR_P should be stored. */
1705
1706 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,tree * pre_p,tree * post_p,fallback_t fallback)1707 gimplify_compound_lval (tree *expr_p, tree *pre_p,
1708 tree *post_p, fallback_t fallback)
1709 {
1710 tree *p;
1711 VEC(tree,heap) *stack;
1712 enum gimplify_status ret = GS_OK, tret;
1713 int i;
1714
1715 /* Create a stack of the subexpressions so later we can walk them in
1716 order from inner to outer. */
1717 stack = VEC_alloc (tree, heap, 10);
1718
1719 /* We can handle anything that get_inner_reference can deal with. */
1720 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
1721 {
1722 restart:
1723 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
1724 if (TREE_CODE (*p) == INDIRECT_REF)
1725 *p = fold_indirect_ref (*p);
1726
1727 if (handled_component_p (*p))
1728 ;
1729 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
1730 additional COMPONENT_REFs. */
1731 else if ((TREE_CODE (*p) == VAR_DECL || TREE_CODE (*p) == PARM_DECL)
1732 && gimplify_var_or_parm_decl (p) == GS_OK)
1733 goto restart;
1734 else
1735 break;
1736
1737 VEC_safe_push (tree, heap, stack, *p);
1738 }
1739
1740 gcc_assert (VEC_length (tree, stack));
1741
1742 /* Now STACK is a stack of pointers to all the refs we've walked through
1743 and P points to the innermost expression.
1744
1745 Java requires that we elaborated nodes in source order. That
1746 means we must gimplify the inner expression followed by each of
1747 the indices, in order. But we can't gimplify the inner
1748 expression until we deal with any variable bounds, sizes, or
1749 positions in order to deal with PLACEHOLDER_EXPRs.
1750
1751 So we do this in three steps. First we deal with the annotations
1752 for any variables in the components, then we gimplify the base,
1753 then we gimplify any indices, from left to right. */
1754 for (i = VEC_length (tree, stack) - 1; i >= 0; i--)
1755 {
1756 tree t = VEC_index (tree, stack, i);
1757
1758 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1759 {
1760 /* Gimplify the low bound and element type size and put them into
1761 the ARRAY_REF. If these values are set, they have already been
1762 gimplified. */
1763 if (!TREE_OPERAND (t, 2))
1764 {
1765 tree low = unshare_expr (array_ref_low_bound (t));
1766 if (!is_gimple_min_invariant (low))
1767 {
1768 TREE_OPERAND (t, 2) = low;
1769 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1770 is_gimple_formal_tmp_reg, fb_rvalue);
1771 ret = MIN (ret, tret);
1772 }
1773 }
1774
1775 if (!TREE_OPERAND (t, 3))
1776 {
1777 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
1778 tree elmt_size = unshare_expr (array_ref_element_size (t));
1779 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
1780
1781 /* Divide the element size by the alignment of the element
1782 type (above). */
1783 elmt_size = size_binop (EXACT_DIV_EXPR, elmt_size, factor);
1784
1785 if (!is_gimple_min_invariant (elmt_size))
1786 {
1787 TREE_OPERAND (t, 3) = elmt_size;
1788 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
1789 is_gimple_formal_tmp_reg, fb_rvalue);
1790 ret = MIN (ret, tret);
1791 }
1792 }
1793 }
1794 else if (TREE_CODE (t) == COMPONENT_REF)
1795 {
1796 /* Set the field offset into T and gimplify it. */
1797 if (!TREE_OPERAND (t, 2))
1798 {
1799 tree offset = unshare_expr (component_ref_field_offset (t));
1800 tree field = TREE_OPERAND (t, 1);
1801 tree factor
1802 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
1803
1804 /* Divide the offset by its alignment. */
1805 offset = size_binop (EXACT_DIV_EXPR, offset, factor);
1806
1807 if (!is_gimple_min_invariant (offset))
1808 {
1809 TREE_OPERAND (t, 2) = offset;
1810 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1811 is_gimple_formal_tmp_reg, fb_rvalue);
1812 ret = MIN (ret, tret);
1813 }
1814 }
1815 }
1816 }
1817
1818 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
1819 so as to match the min_lval predicate. Failure to do so may result
1820 in the creation of large aggregate temporaries. */
1821 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
1822 fallback | fb_lvalue);
1823 ret = MIN (ret, tret);
1824
1825 /* And finally, the indices and operands to BIT_FIELD_REF. During this
1826 loop we also remove any useless conversions. */
1827 for (; VEC_length (tree, stack) > 0; )
1828 {
1829 tree t = VEC_pop (tree, stack);
1830
1831 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1832 {
1833 /* Gimplify the dimension.
1834 Temporary fix for gcc.c-torture/execute/20040313-1.c.
1835 Gimplify non-constant array indices into a temporary
1836 variable.
1837 FIXME - The real fix is to gimplify post-modify
1838 expressions into a minimal gimple lvalue. However, that
1839 exposes bugs in alias analysis. The alias analyzer does
1840 not handle &PTR->FIELD very well. Will fix after the
1841 branch is merged into mainline (dnovillo 2004-05-03). */
1842 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
1843 {
1844 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1845 is_gimple_formal_tmp_reg, fb_rvalue);
1846 ret = MIN (ret, tret);
1847 }
1848 }
1849 else if (TREE_CODE (t) == BIT_FIELD_REF)
1850 {
1851 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
1852 is_gimple_val, fb_rvalue);
1853 ret = MIN (ret, tret);
1854 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
1855 is_gimple_val, fb_rvalue);
1856 ret = MIN (ret, tret);
1857 }
1858
1859 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
1860
1861 /* The innermost expression P may have originally had TREE_SIDE_EFFECTS
1862 set which would have caused all the outer expressions in EXPR_P
1863 leading to P to also have had TREE_SIDE_EFFECTS set. */
1864 recalculate_side_effects (t);
1865 }
1866
1867 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval, fallback);
1868 ret = MIN (ret, tret);
1869
1870 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
1871 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
1872 {
1873 canonicalize_component_ref (expr_p);
1874 ret = MIN (ret, GS_OK);
1875 }
1876
1877 VEC_free (tree, heap, stack);
1878
1879 return ret;
1880 }
1881
1882 /* Gimplify the self modifying expression pointed to by EXPR_P
1883 (++, --, +=, -=).
1884
1885 PRE_P points to the list where side effects that must happen before
1886 *EXPR_P should be stored.
1887
1888 POST_P points to the list where side effects that must happen after
1889 *EXPR_P should be stored.
1890
1891 WANT_VALUE is nonzero iff we want to use the value of this expression
1892 in another expression. */
1893
1894 static enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,tree * pre_p,tree * post_p,bool want_value)1895 gimplify_self_mod_expr (tree *expr_p, tree *pre_p, tree *post_p,
1896 bool want_value)
1897 {
1898 enum tree_code code;
1899 tree lhs, lvalue, rhs, t1, post = NULL, *orig_post_p = post_p;
1900 bool postfix;
1901 enum tree_code arith_code;
1902 enum gimplify_status ret;
1903
1904 code = TREE_CODE (*expr_p);
1905
1906 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
1907 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
1908
1909 /* Prefix or postfix? */
1910 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
1911 /* Faster to treat as prefix if result is not used. */
1912 postfix = want_value;
1913 else
1914 postfix = false;
1915
1916 /* For postfix, make sure the inner expression's post side effects
1917 are executed after side effects from this expression. */
1918 if (postfix)
1919 post_p = &post;
1920
1921 /* Add or subtract? */
1922 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
1923 arith_code = PLUS_EXPR;
1924 else
1925 arith_code = MINUS_EXPR;
1926
1927 /* Gimplify the LHS into a GIMPLE lvalue. */
1928 lvalue = TREE_OPERAND (*expr_p, 0);
1929 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
1930 if (ret == GS_ERROR)
1931 return ret;
1932
1933 /* Extract the operands to the arithmetic operation. */
1934 lhs = lvalue;
1935 rhs = TREE_OPERAND (*expr_p, 1);
1936
1937 /* For postfix operator, we evaluate the LHS to an rvalue and then use
1938 that as the result value and in the postqueue operation. */
1939 if (postfix)
1940 {
1941 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
1942 if (ret == GS_ERROR)
1943 return ret;
1944 }
1945
1946 t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
1947 t1 = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
1948
1949 if (postfix)
1950 {
1951 gimplify_and_add (t1, orig_post_p);
1952 append_to_statement_list (post, orig_post_p);
1953 *expr_p = lhs;
1954 return GS_ALL_DONE;
1955 }
1956 else
1957 {
1958 *expr_p = t1;
1959 return GS_OK;
1960 }
1961 }
1962
1963 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
1964
1965 static void
maybe_with_size_expr(tree * expr_p)1966 maybe_with_size_expr (tree *expr_p)
1967 {
1968 tree expr = *expr_p;
1969 tree type = TREE_TYPE (expr);
1970 tree size;
1971
1972 /* If we've already wrapped this or the type is error_mark_node, we can't do
1973 anything. */
1974 if (TREE_CODE (expr) == WITH_SIZE_EXPR
1975 || type == error_mark_node)
1976 return;
1977
1978 /* If the size isn't known or is a constant, we have nothing to do. */
1979 size = TYPE_SIZE_UNIT (type);
1980 if (!size || TREE_CODE (size) == INTEGER_CST)
1981 return;
1982
1983 /* Otherwise, make a WITH_SIZE_EXPR. */
1984 size = unshare_expr (size);
1985 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
1986 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
1987 }
1988
1989 /* Subroutine of gimplify_call_expr: Gimplify a single argument. */
1990
1991 static enum gimplify_status
gimplify_arg(tree * expr_p,tree * pre_p)1992 gimplify_arg (tree *expr_p, tree *pre_p)
1993 {
1994 bool (*test) (tree);
1995 fallback_t fb;
1996
1997 /* In general, we allow lvalues for function arguments to avoid
1998 extra overhead of copying large aggregates out of even larger
1999 aggregates into temporaries only to copy the temporaries to
2000 the argument list. Make optimizers happy by pulling out to
2001 temporaries those types that fit in registers. */
2002 if (is_gimple_reg_type (TREE_TYPE (*expr_p)))
2003 test = is_gimple_val, fb = fb_rvalue;
2004 else
2005 test = is_gimple_lvalue, fb = fb_either;
2006
2007 /* If this is a variable sized type, we must remember the size. */
2008 maybe_with_size_expr (expr_p);
2009
2010 /* There is a sequence point before a function call. Side effects in
2011 the argument list must occur before the actual call. So, when
2012 gimplifying arguments, force gimplify_expr to use an internal
2013 post queue which is then appended to the end of PRE_P. */
2014 return gimplify_expr (expr_p, pre_p, NULL, test, fb);
2015 }
2016
2017 /* Gimplify the CALL_EXPR node pointed to by EXPR_P. PRE_P points to the
2018 list where side effects that must happen before *EXPR_P should be stored.
2019 WANT_VALUE is true if the result of the call is desired. */
2020
2021 static enum gimplify_status
gimplify_call_expr(tree * expr_p,tree * pre_p,bool want_value)2022 gimplify_call_expr (tree *expr_p, tree *pre_p, bool want_value)
2023 {
2024 tree decl;
2025 tree arglist;
2026 enum gimplify_status ret;
2027
2028 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
2029
2030 /* For reliable diagnostics during inlining, it is necessary that
2031 every call_expr be annotated with file and line. */
2032 if (! EXPR_HAS_LOCATION (*expr_p))
2033 SET_EXPR_LOCATION (*expr_p, input_location);
2034
2035 /* This may be a call to a builtin function.
2036
2037 Builtin function calls may be transformed into different
2038 (and more efficient) builtin function calls under certain
2039 circumstances. Unfortunately, gimplification can muck things
2040 up enough that the builtin expanders are not aware that certain
2041 transformations are still valid.
2042
2043 So we attempt transformation/gimplification of the call before
2044 we gimplify the CALL_EXPR. At this time we do not manage to
2045 transform all calls in the same manner as the expanders do, but
2046 we do transform most of them. */
2047 decl = get_callee_fndecl (*expr_p);
2048 if (decl && DECL_BUILT_IN (decl))
2049 {
2050 tree arglist = TREE_OPERAND (*expr_p, 1);
2051 tree new = fold_builtin (decl, arglist, !want_value);
2052
2053 if (new && new != *expr_p)
2054 {
2055 /* There was a transformation of this call which computes the
2056 same value, but in a more efficient way. Return and try
2057 again. */
2058 *expr_p = new;
2059 return GS_OK;
2060 }
2061
2062 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
2063 && DECL_FUNCTION_CODE (decl) == BUILT_IN_VA_START)
2064 {
2065 if (!arglist || !TREE_CHAIN (arglist))
2066 {
2067 error ("too few arguments to function %<va_start%>");
2068 *expr_p = build_empty_stmt ();
2069 return GS_OK;
2070 }
2071
2072 if (fold_builtin_next_arg (TREE_CHAIN (arglist)))
2073 {
2074 *expr_p = build_empty_stmt ();
2075 return GS_OK;
2076 }
2077 /* Avoid gimplifying the second argument to va_start, which needs
2078 to be the plain PARM_DECL. */
2079 return gimplify_arg (&TREE_VALUE (TREE_OPERAND (*expr_p, 1)), pre_p);
2080 }
2081 }
2082
2083 /* There is a sequence point before the call, so any side effects in
2084 the calling expression must occur before the actual call. Force
2085 gimplify_expr to use an internal post queue. */
2086 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, NULL,
2087 is_gimple_call_addr, fb_rvalue);
2088
2089 if (PUSH_ARGS_REVERSED)
2090 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2091 for (arglist = TREE_OPERAND (*expr_p, 1); arglist;
2092 arglist = TREE_CHAIN (arglist))
2093 {
2094 enum gimplify_status t;
2095
2096 t = gimplify_arg (&TREE_VALUE (arglist), pre_p);
2097
2098 if (t == GS_ERROR)
2099 ret = GS_ERROR;
2100 }
2101 if (PUSH_ARGS_REVERSED)
2102 TREE_OPERAND (*expr_p, 1) = nreverse (TREE_OPERAND (*expr_p, 1));
2103
2104 /* Try this again in case gimplification exposed something. */
2105 if (ret != GS_ERROR)
2106 {
2107 decl = get_callee_fndecl (*expr_p);
2108 if (decl && DECL_BUILT_IN (decl))
2109 {
2110 tree arglist = TREE_OPERAND (*expr_p, 1);
2111 tree new = fold_builtin (decl, arglist, !want_value);
2112
2113 if (new && new != *expr_p)
2114 {
2115 /* There was a transformation of this call which computes the
2116 same value, but in a more efficient way. Return and try
2117 again. */
2118 *expr_p = new;
2119 return GS_OK;
2120 }
2121 }
2122 }
2123
2124 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
2125 decl. This allows us to eliminate redundant or useless
2126 calls to "const" functions. */
2127 if (TREE_CODE (*expr_p) == CALL_EXPR
2128 && (call_expr_flags (*expr_p) & (ECF_CONST | ECF_PURE)))
2129 TREE_SIDE_EFFECTS (*expr_p) = 0;
2130
2131 return ret;
2132 }
2133
2134 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
2135 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
2136
2137 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
2138 condition is true or false, respectively. If null, we should generate
2139 our own to skip over the evaluation of this specific expression.
2140
2141 This function is the tree equivalent of do_jump.
2142
2143 shortcut_cond_r should only be called by shortcut_cond_expr. */
2144
2145 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p)2146 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p)
2147 {
2148 tree local_label = NULL_TREE;
2149 tree t, expr = NULL;
2150
2151 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
2152 retain the shortcut semantics. Just insert the gotos here;
2153 shortcut_cond_expr will append the real blocks later. */
2154 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2155 {
2156 /* Turn if (a && b) into
2157
2158 if (a); else goto no;
2159 if (b) goto yes; else goto no;
2160 (no:) */
2161
2162 if (false_label_p == NULL)
2163 false_label_p = &local_label;
2164
2165 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p);
2166 append_to_statement_list (t, &expr);
2167
2168 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2169 false_label_p);
2170 append_to_statement_list (t, &expr);
2171 }
2172 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2173 {
2174 /* Turn if (a || b) into
2175
2176 if (a) goto yes;
2177 if (b) goto yes; else goto no;
2178 (yes:) */
2179
2180 if (true_label_p == NULL)
2181 true_label_p = &local_label;
2182
2183 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL);
2184 append_to_statement_list (t, &expr);
2185
2186 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2187 false_label_p);
2188 append_to_statement_list (t, &expr);
2189 }
2190 else if (TREE_CODE (pred) == COND_EXPR)
2191 {
2192 /* As long as we're messing with gotos, turn if (a ? b : c) into
2193 if (a)
2194 if (b) goto yes; else goto no;
2195 else
2196 if (c) goto yes; else goto no; */
2197 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
2198 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
2199 false_label_p),
2200 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
2201 false_label_p));
2202 }
2203 else
2204 {
2205 expr = build3 (COND_EXPR, void_type_node, pred,
2206 build_and_jump (true_label_p),
2207 build_and_jump (false_label_p));
2208 }
2209
2210 if (local_label)
2211 {
2212 t = build1 (LABEL_EXPR, void_type_node, local_label);
2213 append_to_statement_list (t, &expr);
2214 }
2215
2216 return expr;
2217 }
2218
2219 static tree
shortcut_cond_expr(tree expr)2220 shortcut_cond_expr (tree expr)
2221 {
2222 tree pred = TREE_OPERAND (expr, 0);
2223 tree then_ = TREE_OPERAND (expr, 1);
2224 tree else_ = TREE_OPERAND (expr, 2);
2225 tree true_label, false_label, end_label, t;
2226 tree *true_label_p;
2227 tree *false_label_p;
2228 bool emit_end, emit_false, jump_over_else;
2229 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
2230 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
2231
2232 /* First do simple transformations. */
2233 if (!else_se)
2234 {
2235 /* If there is no 'else', turn (a && b) into if (a) if (b). */
2236 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
2237 {
2238 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2239 then_ = shortcut_cond_expr (expr);
2240 then_se = then_ && TREE_SIDE_EFFECTS (then_);
2241 pred = TREE_OPERAND (pred, 0);
2242 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
2243 }
2244 }
2245 if (!then_se)
2246 {
2247 /* If there is no 'then', turn
2248 if (a || b); else d
2249 into
2250 if (a); else if (b); else d. */
2251 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
2252 {
2253 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
2254 else_ = shortcut_cond_expr (expr);
2255 else_se = else_ && TREE_SIDE_EFFECTS (else_);
2256 pred = TREE_OPERAND (pred, 0);
2257 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
2258 }
2259 }
2260
2261 /* If we're done, great. */
2262 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
2263 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
2264 return expr;
2265
2266 /* Otherwise we need to mess with gotos. Change
2267 if (a) c; else d;
2268 to
2269 if (a); else goto no;
2270 c; goto end;
2271 no: d; end:
2272 and recursively gimplify the condition. */
2273
2274 true_label = false_label = end_label = NULL_TREE;
2275
2276 /* If our arms just jump somewhere, hijack those labels so we don't
2277 generate jumps to jumps. */
2278
2279 if (then_
2280 && TREE_CODE (then_) == GOTO_EXPR
2281 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
2282 {
2283 true_label = GOTO_DESTINATION (then_);
2284 then_ = NULL;
2285 then_se = false;
2286 }
2287
2288 if (else_
2289 && TREE_CODE (else_) == GOTO_EXPR
2290 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
2291 {
2292 false_label = GOTO_DESTINATION (else_);
2293 else_ = NULL;
2294 else_se = false;
2295 }
2296
2297 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
2298 if (true_label)
2299 true_label_p = &true_label;
2300 else
2301 true_label_p = NULL;
2302
2303 /* The 'else' branch also needs a label if it contains interesting code. */
2304 if (false_label || else_se)
2305 false_label_p = &false_label;
2306 else
2307 false_label_p = NULL;
2308
2309 /* If there was nothing else in our arms, just forward the label(s). */
2310 if (!then_se && !else_se)
2311 return shortcut_cond_r (pred, true_label_p, false_label_p);
2312
2313 /* If our last subexpression already has a terminal label, reuse it. */
2314 if (else_se)
2315 expr = expr_last (else_);
2316 else if (then_se)
2317 expr = expr_last (then_);
2318 else
2319 expr = NULL;
2320 if (expr && TREE_CODE (expr) == LABEL_EXPR)
2321 end_label = LABEL_EXPR_LABEL (expr);
2322
2323 /* If we don't care about jumping to the 'else' branch, jump to the end
2324 if the condition is false. */
2325 if (!false_label_p)
2326 false_label_p = &end_label;
2327
2328 /* We only want to emit these labels if we aren't hijacking them. */
2329 emit_end = (end_label == NULL_TREE);
2330 emit_false = (false_label == NULL_TREE);
2331
2332 /* We only emit the jump over the else clause if we have to--if the
2333 then clause may fall through. Otherwise we can wind up with a
2334 useless jump and a useless label at the end of gimplified code,
2335 which will cause us to think that this conditional as a whole
2336 falls through even if it doesn't. If we then inline a function
2337 which ends with such a condition, that can cause us to issue an
2338 inappropriate warning about control reaching the end of a
2339 non-void function. */
2340 jump_over_else = block_may_fallthru (then_);
2341
2342 pred = shortcut_cond_r (pred, true_label_p, false_label_p);
2343
2344 expr = NULL;
2345 append_to_statement_list (pred, &expr);
2346
2347 append_to_statement_list (then_, &expr);
2348 if (else_se)
2349 {
2350 if (jump_over_else)
2351 {
2352 t = build_and_jump (&end_label);
2353 append_to_statement_list (t, &expr);
2354 }
2355 if (emit_false)
2356 {
2357 t = build1 (LABEL_EXPR, void_type_node, false_label);
2358 append_to_statement_list (t, &expr);
2359 }
2360 append_to_statement_list (else_, &expr);
2361 }
2362 if (emit_end && end_label)
2363 {
2364 t = build1 (LABEL_EXPR, void_type_node, end_label);
2365 append_to_statement_list (t, &expr);
2366 }
2367
2368 return expr;
2369 }
2370
2371 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
2372
2373 tree
gimple_boolify(tree expr)2374 gimple_boolify (tree expr)
2375 {
2376 tree type = TREE_TYPE (expr);
2377
2378 if (TREE_CODE (type) == BOOLEAN_TYPE)
2379 return expr;
2380
2381 switch (TREE_CODE (expr))
2382 {
2383 case TRUTH_AND_EXPR:
2384 case TRUTH_OR_EXPR:
2385 case TRUTH_XOR_EXPR:
2386 case TRUTH_ANDIF_EXPR:
2387 case TRUTH_ORIF_EXPR:
2388 /* Also boolify the arguments of truth exprs. */
2389 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
2390 /* FALLTHRU */
2391
2392 case TRUTH_NOT_EXPR:
2393 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2394 /* FALLTHRU */
2395
2396 case EQ_EXPR: case NE_EXPR:
2397 case LE_EXPR: case GE_EXPR: case LT_EXPR: case GT_EXPR:
2398 /* These expressions always produce boolean results. */
2399 TREE_TYPE (expr) = boolean_type_node;
2400 return expr;
2401
2402 default:
2403 /* Other expressions that get here must have boolean values, but
2404 might need to be converted to the appropriate mode. */
2405 return fold_convert (boolean_type_node, expr);
2406 }
2407 }
2408
2409 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
2410 into
2411
2412 if (p) if (p)
2413 t1 = a; a;
2414 else or else
2415 t1 = b; b;
2416 t1;
2417
2418 The second form is used when *EXPR_P is of type void.
2419
2420 TARGET is the tree for T1 above.
2421
2422 PRE_P points to the list where side effects that must happen before
2423 *EXPR_P should be stored. */
2424
2425 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,tree * pre_p,fallback_t fallback)2426 gimplify_cond_expr (tree *expr_p, tree *pre_p, fallback_t fallback)
2427 {
2428 tree expr = *expr_p;
2429 tree tmp, tmp2, type;
2430 enum gimplify_status ret;
2431
2432 type = TREE_TYPE (expr);
2433
2434 /* If this COND_EXPR has a value, copy the values into a temporary within
2435 the arms. */
2436 if (! VOID_TYPE_P (type))
2437 {
2438 tree result;
2439
2440 if ((fallback & fb_lvalue) == 0)
2441 {
2442 result = tmp2 = tmp = create_tmp_var (TREE_TYPE (expr), "iftmp");
2443 ret = GS_ALL_DONE;
2444 }
2445 else
2446 {
2447 tree type = build_pointer_type (TREE_TYPE (expr));
2448
2449 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2450 TREE_OPERAND (expr, 1) =
2451 build_fold_addr_expr (TREE_OPERAND (expr, 1));
2452
2453 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2454 TREE_OPERAND (expr, 2) =
2455 build_fold_addr_expr (TREE_OPERAND (expr, 2));
2456
2457 tmp2 = tmp = create_tmp_var (type, "iftmp");
2458
2459 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (expr, 0),
2460 TREE_OPERAND (expr, 1), TREE_OPERAND (expr, 2));
2461
2462 result = build_fold_indirect_ref (tmp);
2463 ret = GS_ALL_DONE;
2464 }
2465
2466 /* Build the then clause, 't1 = a;'. But don't build an assignment
2467 if this branch is void; in C++ it can be, if it's a throw. */
2468 if (TREE_TYPE (TREE_OPERAND (expr, 1)) != void_type_node)
2469 TREE_OPERAND (expr, 1)
2470 = build2 (MODIFY_EXPR, void_type_node, tmp, TREE_OPERAND (expr, 1));
2471
2472 /* Build the else clause, 't1 = b;'. */
2473 if (TREE_TYPE (TREE_OPERAND (expr, 2)) != void_type_node)
2474 TREE_OPERAND (expr, 2)
2475 = build2 (MODIFY_EXPR, void_type_node, tmp2, TREE_OPERAND (expr, 2));
2476
2477 TREE_TYPE (expr) = void_type_node;
2478 recalculate_side_effects (expr);
2479
2480 /* Move the COND_EXPR to the prequeue. */
2481 gimplify_and_add (expr, pre_p);
2482
2483 *expr_p = result;
2484 return ret;
2485 }
2486
2487 /* Make sure the condition has BOOLEAN_TYPE. */
2488 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
2489
2490 /* Break apart && and || conditions. */
2491 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
2492 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
2493 {
2494 expr = shortcut_cond_expr (expr);
2495
2496 if (expr != *expr_p)
2497 {
2498 *expr_p = expr;
2499
2500 /* We can't rely on gimplify_expr to re-gimplify the expanded
2501 form properly, as cleanups might cause the target labels to be
2502 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
2503 set up a conditional context. */
2504 gimple_push_condition ();
2505 gimplify_stmt (expr_p);
2506 gimple_pop_condition (pre_p);
2507
2508 return GS_ALL_DONE;
2509 }
2510 }
2511
2512 /* Now do the normal gimplification. */
2513 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2514 is_gimple_condexpr, fb_rvalue);
2515
2516 gimple_push_condition ();
2517
2518 gimplify_to_stmt_list (&TREE_OPERAND (expr, 1));
2519 gimplify_to_stmt_list (&TREE_OPERAND (expr, 2));
2520 recalculate_side_effects (expr);
2521
2522 gimple_pop_condition (pre_p);
2523
2524 if (ret == GS_ERROR)
2525 ;
2526 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 1)))
2527 ret = GS_ALL_DONE;
2528 else if (TREE_SIDE_EFFECTS (TREE_OPERAND (expr, 2)))
2529 /* Rewrite "if (a); else b" to "if (!a) b" */
2530 {
2531 TREE_OPERAND (expr, 0) = invert_truthvalue (TREE_OPERAND (expr, 0));
2532 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL,
2533 is_gimple_condexpr, fb_rvalue);
2534
2535 tmp = TREE_OPERAND (expr, 1);
2536 TREE_OPERAND (expr, 1) = TREE_OPERAND (expr, 2);
2537 TREE_OPERAND (expr, 2) = tmp;
2538 }
2539 else
2540 /* Both arms are empty; replace the COND_EXPR with its predicate. */
2541 expr = TREE_OPERAND (expr, 0);
2542
2543 *expr_p = expr;
2544 return ret;
2545 }
2546
2547 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2548 a call to __builtin_memcpy. */
2549
2550 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value)2551 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value)
2552 {
2553 tree args, t, to, to_ptr, from;
2554
2555 to = TREE_OPERAND (*expr_p, 0);
2556 from = TREE_OPERAND (*expr_p, 1);
2557
2558 args = tree_cons (NULL, size, NULL);
2559
2560 t = build_fold_addr_expr (from);
2561 args = tree_cons (NULL, t, args);
2562
2563 to_ptr = build_fold_addr_expr (to);
2564 args = tree_cons (NULL, to_ptr, args);
2565 t = implicit_built_in_decls[BUILT_IN_MEMCPY];
2566 t = build_function_call_expr (t, args);
2567
2568 if (want_value)
2569 {
2570 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2571 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2572 }
2573
2574 *expr_p = t;
2575 return GS_OK;
2576 }
2577
2578 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
2579 a call to __builtin_memset. In this case we know that the RHS is
2580 a CONSTRUCTOR with an empty element list. */
2581
2582 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value)2583 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value)
2584 {
2585 tree args, t, to, to_ptr;
2586
2587 to = TREE_OPERAND (*expr_p, 0);
2588
2589 args = tree_cons (NULL, size, NULL);
2590
2591 args = tree_cons (NULL, integer_zero_node, args);
2592
2593 to_ptr = build_fold_addr_expr (to);
2594 args = tree_cons (NULL, to_ptr, args);
2595 t = implicit_built_in_decls[BUILT_IN_MEMSET];
2596 t = build_function_call_expr (t, args);
2597
2598 if (want_value)
2599 {
2600 t = build1 (NOP_EXPR, TREE_TYPE (to_ptr), t);
2601 t = build1 (INDIRECT_REF, TREE_TYPE (to), t);
2602 }
2603
2604 *expr_p = t;
2605 return GS_OK;
2606 }
2607
2608 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
2609 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
2610 assignment. Returns non-null if we detect a potential overlap. */
2611
2612 struct gimplify_init_ctor_preeval_data
2613 {
2614 /* The base decl of the lhs object. May be NULL, in which case we
2615 have to assume the lhs is indirect. */
2616 tree lhs_base_decl;
2617
2618 /* The alias set of the lhs object. */
2619 int lhs_alias_set;
2620 };
2621
2622 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)2623 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
2624 {
2625 struct gimplify_init_ctor_preeval_data *data
2626 = (struct gimplify_init_ctor_preeval_data *) xdata;
2627 tree t = *tp;
2628
2629 /* If we find the base object, obviously we have overlap. */
2630 if (data->lhs_base_decl == t)
2631 return t;
2632
2633 /* If the constructor component is indirect, determine if we have a
2634 potential overlap with the lhs. The only bits of information we
2635 have to go on at this point are addressability and alias sets. */
2636 if (TREE_CODE (t) == INDIRECT_REF
2637 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2638 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
2639 return t;
2640
2641 /* If the constructor component is a call, determine if it can hide a
2642 potential overlap with the lhs through an INDIRECT_REF like above. */
2643 if (TREE_CODE (t) == CALL_EXPR)
2644 {
2645 tree type, fntype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2646
2647 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
2648 if (POINTER_TYPE_P (TREE_VALUE (type))
2649 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
2650 && alias_sets_conflict_p (data->lhs_alias_set,
2651 get_alias_set
2652 (TREE_TYPE (TREE_VALUE (type)))))
2653 return t;
2654 }
2655
2656 if (IS_TYPE_OR_DECL_P (t))
2657 *walk_subtrees = 0;
2658 return NULL;
2659 }
2660
2661 /* A subroutine of gimplify_init_constructor. Pre-evaluate *EXPR_P,
2662 force values that overlap with the lhs (as described by *DATA)
2663 into temporaries. */
2664
2665 static void
gimplify_init_ctor_preeval(tree * expr_p,tree * pre_p,tree * post_p,struct gimplify_init_ctor_preeval_data * data)2666 gimplify_init_ctor_preeval (tree *expr_p, tree *pre_p, tree *post_p,
2667 struct gimplify_init_ctor_preeval_data *data)
2668 {
2669 enum gimplify_status one;
2670
2671 /* If the value is invariant, then there's nothing to pre-evaluate.
2672 But ensure it doesn't have any side-effects since a SAVE_EXPR is
2673 invariant but has side effects and might contain a reference to
2674 the object we're initializing. */
2675 if (TREE_INVARIANT (*expr_p) && !TREE_SIDE_EFFECTS (*expr_p))
2676 return;
2677
2678 /* If the type has non-trivial constructors, we can't pre-evaluate. */
2679 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
2680 return;
2681
2682 /* Recurse for nested constructors. */
2683 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
2684 {
2685 unsigned HOST_WIDE_INT ix;
2686 constructor_elt *ce;
2687 VEC(constructor_elt,gc) *v = CONSTRUCTOR_ELTS (*expr_p);
2688
2689 for (ix = 0; VEC_iterate (constructor_elt, v, ix, ce); ix++)
2690 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
2691 return;
2692 }
2693
2694 /* If this is a variable sized type, we must remember the size. */
2695 maybe_with_size_expr (expr_p);
2696
2697 /* Gimplify the constructor element to something appropriate for the rhs
2698 of a MODIFY_EXPR. Given that we know the lhs is an aggregate, we know
2699 the gimplifier will consider this a store to memory. Doing this
2700 gimplification now means that we won't have to deal with complicated
2701 language-specific trees, nor trees like SAVE_EXPR that can induce
2702 exponential search behavior. */
2703 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
2704 if (one == GS_ERROR)
2705 {
2706 *expr_p = NULL;
2707 return;
2708 }
2709
2710 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
2711 with the lhs, since "a = { .x=a }" doesn't make sense. This will
2712 always be true for all scalars, since is_gimple_mem_rhs insists on a
2713 temporary variable for them. */
2714 if (DECL_P (*expr_p))
2715 return;
2716
2717 /* If this is of variable size, we have no choice but to assume it doesn't
2718 overlap since we can't make a temporary for it. */
2719 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
2720 return;
2721
2722 /* Otherwise, we must search for overlap ... */
2723 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
2724 return;
2725
2726 /* ... and if found, force the value into a temporary. */
2727 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
2728 }
2729
2730 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
2731 a RANGE_EXPR in a CONSTRUCTOR for an array.
2732
2733 var = lower;
2734 loop_entry:
2735 object[var] = value;
2736 if (var == upper)
2737 goto loop_exit;
2738 var = var + 1;
2739 goto loop_entry;
2740 loop_exit:
2741
2742 We increment var _after_ the loop exit check because we might otherwise
2743 fail if upper == TYPE_MAX_VALUE (type for upper).
2744
2745 Note that we never have to deal with SAVE_EXPRs here, because this has
2746 already been taken care of for us, in gimplify_init_ctor_preeval(). */
2747
2748 static void gimplify_init_ctor_eval (tree, VEC(constructor_elt,gc) *,
2749 tree *, bool);
2750
2751 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,tree * pre_p,bool cleared)2752 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
2753 tree value, tree array_elt_type,
2754 tree *pre_p, bool cleared)
2755 {
2756 tree loop_entry_label, loop_exit_label;
2757 tree var, var_type, cref;
2758
2759 loop_entry_label = create_artificial_label ();
2760 loop_exit_label = create_artificial_label ();
2761
2762 /* Create and initialize the index variable. */
2763 var_type = TREE_TYPE (upper);
2764 var = create_tmp_var (var_type, NULL);
2765 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var, lower), pre_p);
2766
2767 /* Add the loop entry label. */
2768 append_to_statement_list (build1 (LABEL_EXPR,
2769 void_type_node,
2770 loop_entry_label),
2771 pre_p);
2772
2773 /* Build the reference. */
2774 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2775 var, NULL_TREE, NULL_TREE);
2776
2777 /* If we are a constructor, just call gimplify_init_ctor_eval to do
2778 the store. Otherwise just assign value to the reference. */
2779
2780 if (TREE_CODE (value) == CONSTRUCTOR)
2781 /* NB we might have to call ourself recursively through
2782 gimplify_init_ctor_eval if the value is a constructor. */
2783 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2784 pre_p, cleared);
2785 else
2786 append_to_statement_list (build2 (MODIFY_EXPR, TREE_TYPE (cref),
2787 cref, value),
2788 pre_p);
2789
2790 /* We exit the loop when the index var is equal to the upper bound. */
2791 gimplify_and_add (build3 (COND_EXPR, void_type_node,
2792 build2 (EQ_EXPR, boolean_type_node,
2793 var, upper),
2794 build1 (GOTO_EXPR,
2795 void_type_node,
2796 loop_exit_label),
2797 NULL_TREE),
2798 pre_p);
2799
2800 /* Otherwise, increment the index var... */
2801 append_to_statement_list (build2 (MODIFY_EXPR, var_type, var,
2802 build2 (PLUS_EXPR, var_type, var,
2803 fold_convert (var_type,
2804 integer_one_node))),
2805 pre_p);
2806
2807 /* ...and jump back to the loop entry. */
2808 append_to_statement_list (build1 (GOTO_EXPR,
2809 void_type_node,
2810 loop_entry_label),
2811 pre_p);
2812
2813 /* Add the loop exit label. */
2814 append_to_statement_list (build1 (LABEL_EXPR,
2815 void_type_node,
2816 loop_exit_label),
2817 pre_p);
2818 }
2819
2820 /* Return true if FDECL is accessing a field that is zero sized. */
2821
2822 static bool
zero_sized_field_decl(tree fdecl)2823 zero_sized_field_decl (tree fdecl)
2824 {
2825 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
2826 && integer_zerop (DECL_SIZE (fdecl)))
2827 return true;
2828 return false;
2829 }
2830
2831 /* Return true if TYPE is zero sized. */
2832
2833 static bool
zero_sized_type(tree type)2834 zero_sized_type (tree type)
2835 {
2836 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
2837 && integer_zerop (TYPE_SIZE (type)))
2838 return true;
2839 return false;
2840 }
2841
2842 /* A subroutine of gimplify_init_constructor. Generate individual
2843 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
2844 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
2845 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
2846 zeroed first. */
2847
2848 static void
gimplify_init_ctor_eval(tree object,VEC (constructor_elt,gc)* elts,tree * pre_p,bool cleared)2849 gimplify_init_ctor_eval (tree object, VEC(constructor_elt,gc) *elts,
2850 tree *pre_p, bool cleared)
2851 {
2852 tree array_elt_type = NULL;
2853 unsigned HOST_WIDE_INT ix;
2854 tree purpose, value;
2855
2856 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
2857 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
2858
2859 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
2860 {
2861 tree cref, init;
2862
2863 /* NULL values are created above for gimplification errors. */
2864 if (value == NULL)
2865 continue;
2866
2867 if (cleared && initializer_zerop (value))
2868 continue;
2869
2870 /* ??? Here's to hoping the front end fills in all of the indices,
2871 so we don't have to figure out what's missing ourselves. */
2872 gcc_assert (purpose);
2873
2874 /* Skip zero-sized fields, unless value has side-effects. This can
2875 happen with calls to functions returning a zero-sized type, which
2876 we shouldn't discard. As a number of downstream passes don't
2877 expect sets of zero-sized fields, we rely on the gimplification of
2878 the MODIFY_EXPR we make below to drop the assignment statement. */
2879 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
2880 continue;
2881
2882 /* If we have a RANGE_EXPR, we have to build a loop to assign the
2883 whole range. */
2884 if (TREE_CODE (purpose) == RANGE_EXPR)
2885 {
2886 tree lower = TREE_OPERAND (purpose, 0);
2887 tree upper = TREE_OPERAND (purpose, 1);
2888
2889 /* If the lower bound is equal to upper, just treat it as if
2890 upper was the index. */
2891 if (simple_cst_equal (lower, upper))
2892 purpose = upper;
2893 else
2894 {
2895 gimplify_init_ctor_eval_range (object, lower, upper, value,
2896 array_elt_type, pre_p, cleared);
2897 continue;
2898 }
2899 }
2900
2901 if (array_elt_type)
2902 {
2903 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
2904 purpose, NULL_TREE, NULL_TREE);
2905 }
2906 else
2907 {
2908 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
2909 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
2910 unshare_expr (object), purpose, NULL_TREE);
2911 }
2912
2913 if (TREE_CODE (value) == CONSTRUCTOR
2914 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
2915 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
2916 pre_p, cleared);
2917 else
2918 {
2919 init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
2920 gimplify_and_add (init, pre_p);
2921 }
2922 }
2923 }
2924
2925 /* A subroutine of gimplify_modify_expr. Break out elements of a
2926 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
2927
2928 Note that we still need to clear any elements that don't have explicit
2929 initializers, so if not all elements are initialized we keep the
2930 original MODIFY_EXPR, we just remove all of the constructor elements. */
2931
2932 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,tree * pre_p,tree * post_p,bool want_value)2933 gimplify_init_constructor (tree *expr_p, tree *pre_p,
2934 tree *post_p, bool want_value)
2935 {
2936 tree object;
2937 tree ctor = TREE_OPERAND (*expr_p, 1);
2938 tree type = TREE_TYPE (ctor);
2939 enum gimplify_status ret;
2940 VEC(constructor_elt,gc) *elts;
2941
2942 if (TREE_CODE (ctor) != CONSTRUCTOR)
2943 return GS_UNHANDLED;
2944
2945 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
2946 is_gimple_lvalue, fb_lvalue);
2947 if (ret == GS_ERROR)
2948 return ret;
2949 object = TREE_OPERAND (*expr_p, 0);
2950
2951 elts = CONSTRUCTOR_ELTS (ctor);
2952
2953 ret = GS_ALL_DONE;
2954 switch (TREE_CODE (type))
2955 {
2956 case RECORD_TYPE:
2957 case UNION_TYPE:
2958 case QUAL_UNION_TYPE:
2959 case ARRAY_TYPE:
2960 {
2961 struct gimplify_init_ctor_preeval_data preeval_data;
2962 HOST_WIDE_INT num_type_elements, num_ctor_elements;
2963 HOST_WIDE_INT num_nonzero_elements;
2964 bool cleared, valid_const_initializer;
2965
2966 /* Aggregate types must lower constructors to initialization of
2967 individual elements. The exception is that a CONSTRUCTOR node
2968 with no elements indicates zero-initialization of the whole. */
2969 if (VEC_empty (constructor_elt, elts))
2970 break;
2971
2972 /* Fetch information about the constructor to direct later processing.
2973 We might want to make static versions of it in various cases, and
2974 can only do so if it known to be a valid constant initializer. */
2975 valid_const_initializer
2976 = categorize_ctor_elements (ctor, &num_nonzero_elements,
2977 &num_ctor_elements, &cleared);
2978
2979 /* If a const aggregate variable is being initialized, then it
2980 should never be a lose to promote the variable to be static. */
2981 if (valid_const_initializer
2982 && num_nonzero_elements > 1
2983 && TREE_READONLY (object)
2984 && TREE_CODE (object) == VAR_DECL)
2985 {
2986 DECL_INITIAL (object) = ctor;
2987 TREE_STATIC (object) = 1;
2988 if (!DECL_NAME (object))
2989 DECL_NAME (object) = create_tmp_var_name ("C");
2990 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
2991
2992 /* ??? C++ doesn't automatically append a .<number> to the
2993 assembler name, and even when it does, it looks a FE private
2994 data structures to figure out what that number should be,
2995 which are not set for this variable. I suppose this is
2996 important for local statics for inline functions, which aren't
2997 "local" in the object file sense. So in order to get a unique
2998 TU-local symbol, we must invoke the lhd version now. */
2999 lhd_set_decl_assembler_name (object);
3000
3001 *expr_p = NULL_TREE;
3002 break;
3003 }
3004
3005 /* If there are "lots" of initialized elements, even discounting
3006 those that are not address constants (and thus *must* be
3007 computed at runtime), then partition the constructor into
3008 constant and non-constant parts. Block copy the constant
3009 parts in, then generate code for the non-constant parts. */
3010 /* TODO. There's code in cp/typeck.c to do this. */
3011
3012 num_type_elements = count_type_elements (type, true);
3013
3014 /* If count_type_elements could not determine number of type elements
3015 for a constant-sized object, assume clearing is needed.
3016 Don't do this for variable-sized objects, as store_constructor
3017 will ignore the clearing of variable-sized objects. */
3018 if (num_type_elements < 0 && int_size_in_bytes (type) >= 0)
3019 cleared = true;
3020 /* If there are "lots" of zeros, then block clear the object first. */
3021 else if (num_type_elements - num_nonzero_elements > CLEAR_RATIO
3022 && num_nonzero_elements < num_type_elements/4)
3023 cleared = true;
3024 /* ??? This bit ought not be needed. For any element not present
3025 in the initializer, we should simply set them to zero. Except
3026 we'd need to *find* the elements that are not present, and that
3027 requires trickery to avoid quadratic compile-time behavior in
3028 large cases or excessive memory use in small cases. */
3029 else if (num_ctor_elements < num_type_elements)
3030 cleared = true;
3031
3032 /* If there are "lots" of initialized elements, and all of them
3033 are valid address constants, then the entire initializer can
3034 be dropped to memory, and then memcpy'd out. Don't do this
3035 for sparse arrays, though, as it's more efficient to follow
3036 the standard CONSTRUCTOR behavior of memset followed by
3037 individual element initialization. */
3038 if (valid_const_initializer && !cleared)
3039 {
3040 HOST_WIDE_INT size = int_size_in_bytes (type);
3041 unsigned int align;
3042
3043 /* ??? We can still get unbounded array types, at least
3044 from the C++ front end. This seems wrong, but attempt
3045 to work around it for now. */
3046 if (size < 0)
3047 {
3048 size = int_size_in_bytes (TREE_TYPE (object));
3049 if (size >= 0)
3050 TREE_TYPE (ctor) = type = TREE_TYPE (object);
3051 }
3052
3053 /* Find the maximum alignment we can assume for the object. */
3054 /* ??? Make use of DECL_OFFSET_ALIGN. */
3055 if (DECL_P (object))
3056 align = DECL_ALIGN (object);
3057 else
3058 align = TYPE_ALIGN (type);
3059
3060 if (size > 0 && !can_move_by_pieces (size, align))
3061 {
3062 tree new = create_tmp_var_raw (type, "C");
3063
3064 gimple_add_tmp_var (new);
3065 TREE_STATIC (new) = 1;
3066 TREE_READONLY (new) = 1;
3067 DECL_INITIAL (new) = ctor;
3068 if (align > DECL_ALIGN (new))
3069 {
3070 DECL_ALIGN (new) = align;
3071 DECL_USER_ALIGN (new) = 1;
3072 }
3073 walk_tree (&DECL_INITIAL (new), force_labels_r, NULL, NULL);
3074
3075 TREE_OPERAND (*expr_p, 1) = new;
3076
3077 /* This is no longer an assignment of a CONSTRUCTOR, but
3078 we still may have processing to do on the LHS. So
3079 pretend we didn't do anything here to let that happen. */
3080 return GS_UNHANDLED;
3081 }
3082 }
3083
3084 /* If there are nonzero elements, pre-evaluate to capture elements
3085 overlapping with the lhs into temporaries. We must do this before
3086 clearing to fetch the values before they are zeroed-out. */
3087 if (num_nonzero_elements > 0)
3088 {
3089 preeval_data.lhs_base_decl = get_base_address (object);
3090 if (!DECL_P (preeval_data.lhs_base_decl))
3091 preeval_data.lhs_base_decl = NULL;
3092 preeval_data.lhs_alias_set = get_alias_set (object);
3093
3094 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
3095 pre_p, post_p, &preeval_data);
3096 }
3097
3098 if (cleared)
3099 {
3100 /* Zap the CONSTRUCTOR element list, which simplifies this case.
3101 Note that we still have to gimplify, in order to handle the
3102 case of variable sized types. Avoid shared tree structures. */
3103 CONSTRUCTOR_ELTS (ctor) = NULL;
3104 object = unshare_expr (object);
3105 gimplify_stmt (expr_p);
3106 append_to_statement_list (*expr_p, pre_p);
3107 }
3108
3109 /* If we have not block cleared the object, or if there are nonzero
3110 elements in the constructor, add assignments to the individual
3111 scalar fields of the object. */
3112 if (!cleared || num_nonzero_elements > 0)
3113 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
3114
3115 *expr_p = NULL_TREE;
3116 }
3117 break;
3118
3119 case COMPLEX_TYPE:
3120 {
3121 tree r, i;
3122
3123 /* Extract the real and imaginary parts out of the ctor. */
3124 gcc_assert (VEC_length (constructor_elt, elts) == 2);
3125 r = VEC_index (constructor_elt, elts, 0)->value;
3126 i = VEC_index (constructor_elt, elts, 1)->value;
3127 if (r == NULL || i == NULL)
3128 {
3129 tree zero = fold_convert (TREE_TYPE (type), integer_zero_node);
3130 if (r == NULL)
3131 r = zero;
3132 if (i == NULL)
3133 i = zero;
3134 }
3135
3136 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
3137 represent creation of a complex value. */
3138 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
3139 {
3140 ctor = build_complex (type, r, i);
3141 TREE_OPERAND (*expr_p, 1) = ctor;
3142 }
3143 else
3144 {
3145 ctor = build2 (COMPLEX_EXPR, type, r, i);
3146 TREE_OPERAND (*expr_p, 1) = ctor;
3147 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
3148 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
3149 fb_rvalue);
3150 }
3151 }
3152 break;
3153
3154 case VECTOR_TYPE:
3155 {
3156 unsigned HOST_WIDE_INT ix;
3157 constructor_elt *ce;
3158
3159 /* Go ahead and simplify constant constructors to VECTOR_CST. */
3160 if (TREE_CONSTANT (ctor))
3161 {
3162 bool constant_p = true;
3163 tree value;
3164
3165 /* Even when ctor is constant, it might contain non-*_CST
3166 elements (e.g. { 1.0/0.0 - 1.0/0.0, 0.0 }) and those don't
3167 belong into VECTOR_CST nodes. */
3168 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
3169 if (!CONSTANT_CLASS_P (value))
3170 {
3171 constant_p = false;
3172 break;
3173 }
3174
3175 if (constant_p)
3176 {
3177 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
3178 break;
3179 }
3180
3181 /* Don't reduce a TREE_CONSTANT vector ctor even if we can't
3182 make a VECTOR_CST. It won't do anything for us, and it'll
3183 prevent us from representing it as a single constant. */
3184 break;
3185 }
3186
3187 /* Vector types use CONSTRUCTOR all the way through gimple
3188 compilation as a general initializer. */
3189 for (ix = 0; VEC_iterate (constructor_elt, elts, ix, ce); ix++)
3190 {
3191 enum gimplify_status tret;
3192 tret = gimplify_expr (&ce->value, pre_p, post_p,
3193 is_gimple_val, fb_rvalue);
3194 if (tret == GS_ERROR)
3195 ret = GS_ERROR;
3196 }
3197 }
3198 break;
3199
3200 default:
3201 /* So how did we get a CONSTRUCTOR for a scalar type? */
3202 gcc_unreachable ();
3203 }
3204
3205 if (ret == GS_ERROR)
3206 return GS_ERROR;
3207 else if (want_value)
3208 {
3209 append_to_statement_list (*expr_p, pre_p);
3210 *expr_p = object;
3211 return GS_OK;
3212 }
3213 else
3214 return GS_ALL_DONE;
3215 }
3216
3217 /* Given a pointer value OP0, return a simplified version of an
3218 indirection through OP0, or NULL_TREE if no simplification is
3219 possible. This may only be applied to a rhs of an expression.
3220 Note that the resulting type may be different from the type pointed
3221 to in the sense that it is still compatible from the langhooks
3222 point of view. */
3223
3224 static tree
fold_indirect_ref_rhs(tree t)3225 fold_indirect_ref_rhs (tree t)
3226 {
3227 tree type = TREE_TYPE (TREE_TYPE (t));
3228 tree sub = t;
3229 tree subtype;
3230
3231 STRIP_USELESS_TYPE_CONVERSION (sub);
3232 subtype = TREE_TYPE (sub);
3233 if (!POINTER_TYPE_P (subtype))
3234 return NULL_TREE;
3235
3236 if (TREE_CODE (sub) == ADDR_EXPR)
3237 {
3238 tree op = TREE_OPERAND (sub, 0);
3239 tree optype = TREE_TYPE (op);
3240 /* *&p => p */
3241 if (lang_hooks.types_compatible_p (type, optype))
3242 return op;
3243 /* *(foo *)&fooarray => fooarray[0] */
3244 else if (TREE_CODE (optype) == ARRAY_TYPE
3245 && lang_hooks.types_compatible_p (type, TREE_TYPE (optype)))
3246 {
3247 tree type_domain = TYPE_DOMAIN (optype);
3248 tree min_val = size_zero_node;
3249 if (type_domain && TYPE_MIN_VALUE (type_domain))
3250 min_val = TYPE_MIN_VALUE (type_domain);
3251 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
3252 }
3253 }
3254
3255 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
3256 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
3257 && lang_hooks.types_compatible_p (type, TREE_TYPE (TREE_TYPE (subtype))))
3258 {
3259 tree type_domain;
3260 tree min_val = size_zero_node;
3261 tree osub = sub;
3262 sub = fold_indirect_ref_rhs (sub);
3263 if (! sub)
3264 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
3265 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
3266 if (type_domain && TYPE_MIN_VALUE (type_domain))
3267 min_val = TYPE_MIN_VALUE (type_domain);
3268 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
3269 }
3270
3271 return NULL_TREE;
3272 }
3273
3274 /* Subroutine of gimplify_modify_expr to do simplifications of MODIFY_EXPRs
3275 based on the code of the RHS. We loop for as long as something changes. */
3276
3277 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,tree * pre_p,tree * post_p,bool want_value)3278 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p, tree *pre_p,
3279 tree *post_p, bool want_value)
3280 {
3281 enum gimplify_status ret = GS_OK;
3282
3283 while (ret != GS_UNHANDLED)
3284 switch (TREE_CODE (*from_p))
3285 {
3286 case INDIRECT_REF:
3287 {
3288 /* If we have code like
3289
3290 *(const A*)(A*)&x
3291
3292 where the type of "x" is a (possibly cv-qualified variant
3293 of "A"), treat the entire expression as identical to "x".
3294 This kind of code arises in C++ when an object is bound
3295 to a const reference, and if "x" is a TARGET_EXPR we want
3296 to take advantage of the optimization below. */
3297 tree t = fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
3298 if (t)
3299 {
3300 *from_p = t;
3301 ret = GS_OK;
3302 }
3303 else
3304 ret = GS_UNHANDLED;
3305 break;
3306 }
3307
3308 case TARGET_EXPR:
3309 {
3310 /* If we are initializing something from a TARGET_EXPR, strip the
3311 TARGET_EXPR and initialize it directly, if possible. This can't
3312 be done if the initializer is void, since that implies that the
3313 temporary is set in some non-trivial way.
3314
3315 ??? What about code that pulls out the temp and uses it
3316 elsewhere? I think that such code never uses the TARGET_EXPR as
3317 an initializer. If I'm wrong, we'll die because the temp won't
3318 have any RTL. In that case, I guess we'll need to replace
3319 references somehow. */
3320 tree init = TARGET_EXPR_INITIAL (*from_p);
3321
3322 if (!VOID_TYPE_P (TREE_TYPE (init)))
3323 {
3324 *from_p = init;
3325 ret = GS_OK;
3326 }
3327 else
3328 ret = GS_UNHANDLED;
3329 }
3330 break;
3331
3332 case COMPOUND_EXPR:
3333 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
3334 caught. */
3335 gimplify_compound_expr (from_p, pre_p, true);
3336 ret = GS_OK;
3337 break;
3338
3339 case CONSTRUCTOR:
3340 /* If we're initializing from a CONSTRUCTOR, break this into
3341 individual MODIFY_EXPRs. */
3342 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value);
3343
3344 case COND_EXPR:
3345 /* If we're assigning to a non-register type, push the assignment
3346 down into the branches. This is mandatory for ADDRESSABLE types,
3347 since we cannot generate temporaries for such, but it saves a
3348 copy in other cases as well. */
3349 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
3350 {
3351 /* This code should mirror the code in gimplify_cond_expr. */
3352 enum tree_code code = TREE_CODE (*expr_p);
3353 tree cond = *from_p;
3354 tree result = *to_p;
3355
3356 ret = gimplify_expr (&result, pre_p, post_p,
3357 is_gimple_min_lval, fb_lvalue);
3358 if (ret != GS_ERROR)
3359 ret = GS_OK;
3360
3361 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
3362 TREE_OPERAND (cond, 1)
3363 = build2 (code, void_type_node, result,
3364 TREE_OPERAND (cond, 1));
3365 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
3366 TREE_OPERAND (cond, 2)
3367 = build2 (code, void_type_node, unshare_expr (result),
3368 TREE_OPERAND (cond, 2));
3369
3370 TREE_TYPE (cond) = void_type_node;
3371 recalculate_side_effects (cond);
3372
3373 if (want_value)
3374 {
3375 gimplify_and_add (cond, pre_p);
3376 *expr_p = unshare_expr (result);
3377 }
3378 else
3379 *expr_p = cond;
3380 return ret;
3381 }
3382 else
3383 ret = GS_UNHANDLED;
3384 break;
3385
3386 case CALL_EXPR:
3387 /* For calls that return in memory, give *to_p as the CALL_EXPR's
3388 return slot so that we don't generate a temporary. */
3389 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
3390 && aggregate_value_p (*from_p, *from_p))
3391 {
3392 bool use_target;
3393
3394 if (!(rhs_predicate_for (*to_p))(*from_p))
3395 /* If we need a temporary, *to_p isn't accurate. */
3396 use_target = false;
3397 else if (TREE_CODE (*to_p) == RESULT_DECL
3398 && DECL_NAME (*to_p) == NULL_TREE
3399 && needs_to_live_in_memory (*to_p))
3400 /* It's OK to use the return slot directly unless it's an NRV. */
3401 use_target = true;
3402 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
3403 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
3404 /* Don't force regs into memory. */
3405 use_target = false;
3406 else if (TREE_CODE (*to_p) == VAR_DECL
3407 && DECL_GIMPLE_FORMAL_TEMP_P (*to_p))
3408 /* Don't use the original target if it's a formal temp; we
3409 don't want to take their addresses. */
3410 use_target = false;
3411 else if (TREE_CODE (*expr_p) == INIT_EXPR)
3412 /* It's OK to use the target directly if it's being
3413 initialized. */
3414 use_target = true;
3415 else if (!is_gimple_non_addressable (*to_p))
3416 /* Don't use the original target if it's already addressable;
3417 if its address escapes, and the called function uses the
3418 NRV optimization, a conforming program could see *to_p
3419 change before the called function returns; see c++/19317.
3420 When optimizing, the return_slot pass marks more functions
3421 as safe after we have escape info. */
3422 use_target = false;
3423 else
3424 use_target = true;
3425
3426 if (use_target)
3427 {
3428 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
3429 lang_hooks.mark_addressable (*to_p);
3430 }
3431 }
3432
3433 ret = GS_UNHANDLED;
3434 break;
3435
3436 /* If we're initializing from a container, push the initialization
3437 inside it. */
3438 case CLEANUP_POINT_EXPR:
3439 case BIND_EXPR:
3440 case STATEMENT_LIST:
3441 {
3442 tree wrap = *from_p;
3443 tree t;
3444
3445 ret = gimplify_expr (to_p, pre_p, post_p,
3446 is_gimple_min_lval, fb_lvalue);
3447 if (ret != GS_ERROR)
3448 ret = GS_OK;
3449
3450 t = voidify_wrapper_expr (wrap, *expr_p);
3451 gcc_assert (t == *expr_p);
3452
3453 if (want_value)
3454 {
3455 gimplify_and_add (wrap, pre_p);
3456 *expr_p = unshare_expr (*to_p);
3457 }
3458 else
3459 *expr_p = wrap;
3460 return GS_OK;
3461 }
3462
3463 default:
3464 ret = GS_UNHANDLED;
3465 break;
3466 }
3467
3468 return ret;
3469 }
3470
3471 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
3472 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
3473 DECL_COMPLEX_GIMPLE_REG_P set. */
3474
3475 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,tree * pre_p,bool want_value)3476 gimplify_modify_expr_complex_part (tree *expr_p, tree *pre_p, bool want_value)
3477 {
3478 enum tree_code code, ocode;
3479 tree lhs, rhs, new_rhs, other, realpart, imagpart;
3480
3481 lhs = TREE_OPERAND (*expr_p, 0);
3482 rhs = TREE_OPERAND (*expr_p, 1);
3483 code = TREE_CODE (lhs);
3484 lhs = TREE_OPERAND (lhs, 0);
3485
3486 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
3487 other = build1 (ocode, TREE_TYPE (rhs), lhs);
3488 other = get_formal_tmp_var (other, pre_p);
3489
3490 realpart = code == REALPART_EXPR ? rhs : other;
3491 imagpart = code == REALPART_EXPR ? other : rhs;
3492
3493 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
3494 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
3495 else
3496 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
3497
3498 TREE_OPERAND (*expr_p, 0) = lhs;
3499 TREE_OPERAND (*expr_p, 1) = new_rhs;
3500
3501 if (want_value)
3502 {
3503 append_to_statement_list (*expr_p, pre_p);
3504 *expr_p = rhs;
3505 }
3506
3507 return GS_ALL_DONE;
3508 }
3509
3510 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
3511
3512 modify_expr
3513 : varname '=' rhs
3514 | '*' ID '=' rhs
3515
3516 PRE_P points to the list where side effects that must happen before
3517 *EXPR_P should be stored.
3518
3519 POST_P points to the list where side effects that must happen after
3520 *EXPR_P should be stored.
3521
3522 WANT_VALUE is nonzero iff we want to use the value of this expression
3523 in another expression. */
3524
3525 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,tree * pre_p,tree * post_p,bool want_value)3526 gimplify_modify_expr (tree *expr_p, tree *pre_p, tree *post_p, bool want_value)
3527 {
3528 tree *from_p = &TREE_OPERAND (*expr_p, 1);
3529 tree *to_p = &TREE_OPERAND (*expr_p, 0);
3530 enum gimplify_status ret = GS_UNHANDLED;
3531
3532 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
3533 || TREE_CODE (*expr_p) == INIT_EXPR);
3534
3535 /* For zero sized types only gimplify the left hand side and right hand side
3536 as statements and throw away the assignment. */
3537 if (zero_sized_type (TREE_TYPE (*from_p)))
3538 {
3539 gimplify_stmt (from_p);
3540 gimplify_stmt (to_p);
3541 append_to_statement_list (*from_p, pre_p);
3542 append_to_statement_list (*to_p, pre_p);
3543 *expr_p = NULL_TREE;
3544 return GS_ALL_DONE;
3545 }
3546
3547 /* See if any simplifications can be done based on what the RHS is. */
3548 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3549 want_value);
3550 if (ret != GS_UNHANDLED)
3551 return ret;
3552
3553 /* If the value being copied is of variable width, compute the length
3554 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
3555 before gimplifying any of the operands so that we can resolve any
3556 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
3557 the size of the expression to be copied, not of the destination, so
3558 that is what we must here. */
3559 maybe_with_size_expr (from_p);
3560
3561 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3562 if (ret == GS_ERROR)
3563 return ret;
3564
3565 ret = gimplify_expr (from_p, pre_p, post_p,
3566 rhs_predicate_for (*to_p), fb_rvalue);
3567 if (ret == GS_ERROR)
3568 return ret;
3569
3570 /* Now see if the above changed *from_p to something we handle specially. */
3571 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
3572 want_value);
3573 if (ret != GS_UNHANDLED)
3574 return ret;
3575
3576 /* If we've got a variable sized assignment between two lvalues (i.e. does
3577 not involve a call), then we can make things a bit more straightforward
3578 by converting the assignment to memcpy or memset. */
3579 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
3580 {
3581 tree from = TREE_OPERAND (*from_p, 0);
3582 tree size = TREE_OPERAND (*from_p, 1);
3583
3584 if (TREE_CODE (from) == CONSTRUCTOR)
3585 return gimplify_modify_expr_to_memset (expr_p, size, want_value);
3586 if (is_gimple_addressable (from))
3587 {
3588 *from_p = from;
3589 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value);
3590 }
3591 }
3592
3593 /* Transform partial stores to non-addressable complex variables into
3594 total stores. This allows us to use real instead of virtual operands
3595 for these variables, which improves optimization. */
3596 if ((TREE_CODE (*to_p) == REALPART_EXPR
3597 || TREE_CODE (*to_p) == IMAGPART_EXPR)
3598 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
3599 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
3600
3601 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
3602 {
3603 /* If we've somehow already got an SSA_NAME on the LHS, then
3604 we're probably modified it twice. Not good. */
3605 gcc_assert (TREE_CODE (*to_p) != SSA_NAME);
3606 *to_p = make_ssa_name (*to_p, *expr_p);
3607 }
3608
3609 if (want_value)
3610 {
3611 append_to_statement_list (*expr_p, pre_p);
3612 *expr_p = *to_p;
3613 return GS_OK;
3614 }
3615
3616 return GS_ALL_DONE;
3617 }
3618
3619 /* Gimplify a comparison between two variable-sized objects. Do this
3620 with a call to BUILT_IN_MEMCMP. */
3621
3622 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)3623 gimplify_variable_sized_compare (tree *expr_p)
3624 {
3625 tree op0 = TREE_OPERAND (*expr_p, 0);
3626 tree op1 = TREE_OPERAND (*expr_p, 1);
3627 tree args, t, dest;
3628
3629 t = TYPE_SIZE_UNIT (TREE_TYPE (op0));
3630 t = unshare_expr (t);
3631 t = SUBSTITUTE_PLACEHOLDER_IN_EXPR (t, op0);
3632 args = tree_cons (NULL, t, NULL);
3633 t = build_fold_addr_expr (op1);
3634 args = tree_cons (NULL, t, args);
3635 dest = build_fold_addr_expr (op0);
3636 args = tree_cons (NULL, dest, args);
3637 t = implicit_built_in_decls[BUILT_IN_MEMCMP];
3638 t = build_function_call_expr (t, args);
3639 *expr_p
3640 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
3641
3642 return GS_OK;
3643 }
3644
3645 /* Gimplify a comparison between two aggregate objects of integral scalar
3646 mode as a comparison between the bitwise equivalent scalar values. */
3647
3648 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)3649 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
3650 {
3651 tree op0 = TREE_OPERAND (*expr_p, 0);
3652 tree op1 = TREE_OPERAND (*expr_p, 1);
3653
3654 tree type = TREE_TYPE (op0);
3655 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
3656
3657 op0 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op0);
3658 op1 = fold_build1 (VIEW_CONVERT_EXPR, scalar_type, op1);
3659
3660 *expr_p
3661 = fold_build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
3662
3663 return GS_OK;
3664 }
3665
3666 /* Gimplify TRUTH_ANDIF_EXPR and TRUTH_ORIF_EXPR expressions. EXPR_P
3667 points to the expression to gimplify.
3668
3669 Expressions of the form 'a && b' are gimplified to:
3670
3671 a && b ? true : false
3672
3673 gimplify_cond_expr will do the rest.
3674
3675 PRE_P points to the list where side effects that must happen before
3676 *EXPR_P should be stored. */
3677
3678 static enum gimplify_status
gimplify_boolean_expr(tree * expr_p)3679 gimplify_boolean_expr (tree *expr_p)
3680 {
3681 /* Preserve the original type of the expression. */
3682 tree type = TREE_TYPE (*expr_p);
3683
3684 *expr_p = build3 (COND_EXPR, type, *expr_p,
3685 fold_convert (type, boolean_true_node),
3686 fold_convert (type, boolean_false_node));
3687
3688 return GS_OK;
3689 }
3690
3691 /* Gimplifies an expression sequence. This function gimplifies each
3692 expression and re-writes the original expression with the last
3693 expression of the sequence in GIMPLE form.
3694
3695 PRE_P points to the list where the side effects for all the
3696 expressions in the sequence will be emitted.
3697
3698 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
3699 /* ??? Should rearrange to share the pre-queue with all the indirect
3700 invocations of gimplify_expr. Would probably save on creations
3701 of statement_list nodes. */
3702
3703 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,tree * pre_p,bool want_value)3704 gimplify_compound_expr (tree *expr_p, tree *pre_p, bool want_value)
3705 {
3706 tree t = *expr_p;
3707
3708 do
3709 {
3710 tree *sub_p = &TREE_OPERAND (t, 0);
3711
3712 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
3713 gimplify_compound_expr (sub_p, pre_p, false);
3714 else
3715 gimplify_stmt (sub_p);
3716 append_to_statement_list (*sub_p, pre_p);
3717
3718 t = TREE_OPERAND (t, 1);
3719 }
3720 while (TREE_CODE (t) == COMPOUND_EXPR);
3721
3722 *expr_p = t;
3723 if (want_value)
3724 return GS_OK;
3725 else
3726 {
3727 gimplify_stmt (expr_p);
3728 return GS_ALL_DONE;
3729 }
3730 }
3731
3732 /* Gimplifies a statement list. These may be created either by an
3733 enlightened front-end, or by shortcut_cond_expr. */
3734
3735 static enum gimplify_status
gimplify_statement_list(tree * expr_p,tree * pre_p)3736 gimplify_statement_list (tree *expr_p, tree *pre_p)
3737 {
3738 tree temp = voidify_wrapper_expr (*expr_p, NULL);
3739
3740 tree_stmt_iterator i = tsi_start (*expr_p);
3741
3742 while (!tsi_end_p (i))
3743 {
3744 tree t;
3745
3746 gimplify_stmt (tsi_stmt_ptr (i));
3747
3748 t = tsi_stmt (i);
3749 if (t == NULL)
3750 tsi_delink (&i);
3751 else if (TREE_CODE (t) == STATEMENT_LIST)
3752 {
3753 tsi_link_before (&i, t, TSI_SAME_STMT);
3754 tsi_delink (&i);
3755 }
3756 else
3757 tsi_next (&i);
3758 }
3759
3760 if (temp)
3761 {
3762 append_to_statement_list (*expr_p, pre_p);
3763 *expr_p = temp;
3764 return GS_OK;
3765 }
3766
3767 return GS_ALL_DONE;
3768 }
3769
3770 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
3771 gimplify. After gimplification, EXPR_P will point to a new temporary
3772 that holds the original value of the SAVE_EXPR node.
3773
3774 PRE_P points to the list where side effects that must happen before
3775 *EXPR_P should be stored. */
3776
3777 static enum gimplify_status
gimplify_save_expr(tree * expr_p,tree * pre_p,tree * post_p)3778 gimplify_save_expr (tree *expr_p, tree *pre_p, tree *post_p)
3779 {
3780 enum gimplify_status ret = GS_ALL_DONE;
3781 tree val;
3782
3783 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
3784 val = TREE_OPERAND (*expr_p, 0);
3785
3786 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
3787 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
3788 {
3789 /* The operand may be a void-valued expression such as SAVE_EXPRs
3790 generated by the Java frontend for class initialization. It is
3791 being executed only for its side-effects. */
3792 if (TREE_TYPE (val) == void_type_node)
3793 {
3794 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
3795 is_gimple_stmt, fb_none);
3796 append_to_statement_list (TREE_OPERAND (*expr_p, 0), pre_p);
3797 val = NULL;
3798 }
3799 else
3800 val = get_initialized_tmp_var (val, pre_p, post_p);
3801
3802 TREE_OPERAND (*expr_p, 0) = val;
3803 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
3804 }
3805
3806 *expr_p = val;
3807
3808 return ret;
3809 }
3810
3811 /* Re-write the ADDR_EXPR node pointed to by EXPR_P
3812
3813 unary_expr
3814 : ...
3815 | '&' varname
3816 ...
3817
3818 PRE_P points to the list where side effects that must happen before
3819 *EXPR_P should be stored.
3820
3821 POST_P points to the list where side effects that must happen after
3822 *EXPR_P should be stored. */
3823
3824 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,tree * pre_p,tree * post_p)3825 gimplify_addr_expr (tree *expr_p, tree *pre_p, tree *post_p)
3826 {
3827 tree expr = *expr_p;
3828 tree op0 = TREE_OPERAND (expr, 0);
3829 enum gimplify_status ret;
3830
3831 switch (TREE_CODE (op0))
3832 {
3833 case INDIRECT_REF:
3834 case MISALIGNED_INDIRECT_REF:
3835 do_indirect_ref:
3836 /* Check if we are dealing with an expression of the form '&*ptr'.
3837 While the front end folds away '&*ptr' into 'ptr', these
3838 expressions may be generated internally by the compiler (e.g.,
3839 builtins like __builtin_va_end). */
3840 /* Caution: the silent array decomposition semantics we allow for
3841 ADDR_EXPR means we can't always discard the pair. */
3842 /* Gimplification of the ADDR_EXPR operand may drop
3843 cv-qualification conversions, so make sure we add them if
3844 needed. */
3845 {
3846 tree op00 = TREE_OPERAND (op0, 0);
3847 tree t_expr = TREE_TYPE (expr);
3848 tree t_op00 = TREE_TYPE (op00);
3849
3850 if (!lang_hooks.types_compatible_p (t_expr, t_op00))
3851 {
3852 #ifdef ENABLE_CHECKING
3853 tree t_op0 = TREE_TYPE (op0);
3854 gcc_assert (POINTER_TYPE_P (t_expr)
3855 && cpt_same_type (TREE_CODE (t_op0) == ARRAY_TYPE
3856 ? TREE_TYPE (t_op0) : t_op0,
3857 TREE_TYPE (t_expr))
3858 && POINTER_TYPE_P (t_op00)
3859 && cpt_same_type (t_op0, TREE_TYPE (t_op00)));
3860 #endif
3861 op00 = fold_convert (TREE_TYPE (expr), op00);
3862 }
3863 *expr_p = op00;
3864 ret = GS_OK;
3865 }
3866 break;
3867
3868 case VIEW_CONVERT_EXPR:
3869 /* Take the address of our operand and then convert it to the type of
3870 this ADDR_EXPR.
3871
3872 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
3873 all clear. The impact of this transformation is even less clear. */
3874
3875 /* If the operand is a useless conversion, look through it. Doing so
3876 guarantees that the ADDR_EXPR and its operand will remain of the
3877 same type. */
3878 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
3879 op0 = TREE_OPERAND (op0, 0);
3880
3881 *expr_p = fold_convert (TREE_TYPE (expr),
3882 build_fold_addr_expr (TREE_OPERAND (op0, 0)));
3883 ret = GS_OK;
3884 break;
3885
3886 default:
3887 /* We use fb_either here because the C frontend sometimes takes
3888 the address of a call that returns a struct; see
3889 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
3890 the implied temporary explicit. */
3891 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
3892 is_gimple_addressable, fb_either);
3893 if (ret != GS_ERROR)
3894 {
3895 op0 = TREE_OPERAND (expr, 0);
3896
3897 /* For various reasons, the gimplification of the expression
3898 may have made a new INDIRECT_REF. */
3899 if (TREE_CODE (op0) == INDIRECT_REF)
3900 goto do_indirect_ref;
3901
3902 /* Make sure TREE_INVARIANT, TREE_CONSTANT, and TREE_SIDE_EFFECTS
3903 is set properly. */
3904 recompute_tree_invariant_for_addr_expr (expr);
3905
3906 /* Mark the RHS addressable. */
3907 lang_hooks.mark_addressable (TREE_OPERAND (expr, 0));
3908 }
3909 break;
3910 }
3911
3912 return ret;
3913 }
3914
3915 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
3916 value; output operands should be a gimple lvalue. */
3917
3918 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,tree * pre_p,tree * post_p)3919 gimplify_asm_expr (tree *expr_p, tree *pre_p, tree *post_p)
3920 {
3921 tree expr = *expr_p;
3922 int noutputs = list_length (ASM_OUTPUTS (expr));
3923 const char **oconstraints
3924 = (const char **) alloca ((noutputs) * sizeof (const char *));
3925 int i;
3926 tree link;
3927 const char *constraint;
3928 bool allows_mem, allows_reg, is_inout;
3929 enum gimplify_status ret, tret;
3930
3931 ret = GS_ALL_DONE;
3932 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = TREE_CHAIN (link))
3933 {
3934 size_t constraint_len;
3935 oconstraints[i] = constraint
3936 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3937 constraint_len = strlen (constraint);
3938 if (constraint_len == 0)
3939 continue;
3940
3941 parse_output_constraint (&constraint, i, 0, 0,
3942 &allows_mem, &allows_reg, &is_inout);
3943
3944 if (!allows_reg && allows_mem)
3945 lang_hooks.mark_addressable (TREE_VALUE (link));
3946
3947 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
3948 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
3949 fb_lvalue | fb_mayfail);
3950 if (tret == GS_ERROR)
3951 {
3952 error ("invalid lvalue in asm output %d", i);
3953 ret = tret;
3954 }
3955
3956 if (is_inout)
3957 {
3958 /* An input/output operand. To give the optimizers more
3959 flexibility, split it into separate input and output
3960 operands. */
3961 tree input;
3962 char buf[10];
3963
3964 /* Turn the in/out constraint into an output constraint. */
3965 char *p = xstrdup (constraint);
3966 p[0] = '=';
3967 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
3968
3969 /* And add a matching input constraint. */
3970 if (allows_reg)
3971 {
3972 sprintf (buf, "%d", i);
3973
3974 /* If there are multiple alternatives in the constraint,
3975 handle each of them individually. Those that allow register
3976 will be replaced with operand number, the others will stay
3977 unchanged. */
3978 if (strchr (p, ',') != NULL)
3979 {
3980 size_t len = 0, buflen = strlen (buf);
3981 char *beg, *end, *str, *dst;
3982
3983 for (beg = p + 1;;)
3984 {
3985 end = strchr (beg, ',');
3986 if (end == NULL)
3987 end = strchr (beg, '\0');
3988 if ((size_t) (end - beg) < buflen)
3989 len += buflen + 1;
3990 else
3991 len += end - beg + 1;
3992 if (*end)
3993 beg = end + 1;
3994 else
3995 break;
3996 }
3997
3998 str = (char *) alloca (len);
3999 for (beg = p + 1, dst = str;;)
4000 {
4001 const char *tem;
4002 bool mem_p, reg_p, inout_p;
4003
4004 end = strchr (beg, ',');
4005 if (end)
4006 *end = '\0';
4007 beg[-1] = '=';
4008 tem = beg - 1;
4009 parse_output_constraint (&tem, i, 0, 0,
4010 &mem_p, ®_p, &inout_p);
4011 if (dst != str)
4012 *dst++ = ',';
4013 if (reg_p)
4014 {
4015 memcpy (dst, buf, buflen);
4016 dst += buflen;
4017 }
4018 else
4019 {
4020 if (end)
4021 len = end - beg;
4022 else
4023 len = strlen (beg);
4024 memcpy (dst, beg, len);
4025 dst += len;
4026 }
4027 if (end)
4028 beg = end + 1;
4029 else
4030 break;
4031 }
4032 *dst = '\0';
4033 input = build_string (dst - str, str);
4034 }
4035 else
4036 input = build_string (strlen (buf), buf);
4037 }
4038 else
4039 input = build_string (constraint_len - 1, constraint + 1);
4040
4041 free (p);
4042
4043 input = build_tree_list (build_tree_list (NULL_TREE, input),
4044 unshare_expr (TREE_VALUE (link)));
4045 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
4046 }
4047 }
4048
4049 for (link = ASM_INPUTS (expr); link; ++i, link = TREE_CHAIN (link))
4050 {
4051 constraint
4052 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
4053 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
4054 oconstraints, &allows_mem, &allows_reg);
4055
4056 /* If we can't make copies, we can only accept memory. */
4057 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
4058 {
4059 if (allows_mem)
4060 allows_reg = 0;
4061 else
4062 {
4063 error ("impossible constraint in %<asm%>");
4064 error ("non-memory input %d must stay in memory", i);
4065 return GS_ERROR;
4066 }
4067 }
4068
4069 /* If the operand is a memory input, it should be an lvalue. */
4070 if (!allows_reg && allows_mem)
4071 {
4072 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4073 is_gimple_lvalue, fb_lvalue | fb_mayfail);
4074 lang_hooks.mark_addressable (TREE_VALUE (link));
4075 if (tret == GS_ERROR)
4076 {
4077 error ("memory input %d is not directly addressable", i);
4078 ret = tret;
4079 }
4080 }
4081 else
4082 {
4083 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
4084 is_gimple_asm_val, fb_rvalue);
4085 if (tret == GS_ERROR)
4086 ret = tret;
4087 }
4088 }
4089
4090 return ret;
4091 }
4092
4093 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
4094 WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
4095 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
4096 return to this function.
4097
4098 FIXME should we complexify the prequeue handling instead? Or use flags
4099 for all the cleanups and let the optimizer tighten them up? The current
4100 code seems pretty fragile; it will break on a cleanup within any
4101 non-conditional nesting. But any such nesting would be broken, anyway;
4102 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
4103 and continues out of it. We can do that at the RTL level, though, so
4104 having an optimizer to tighten up try/finally regions would be a Good
4105 Thing. */
4106
4107 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,tree * pre_p)4108 gimplify_cleanup_point_expr (tree *expr_p, tree *pre_p)
4109 {
4110 tree_stmt_iterator iter;
4111 tree body;
4112
4113 tree temp = voidify_wrapper_expr (*expr_p, NULL);
4114
4115 /* We only care about the number of conditions between the innermost
4116 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
4117 any cleanups collected outside the CLEANUP_POINT_EXPR. */
4118 int old_conds = gimplify_ctxp->conditions;
4119 tree old_cleanups = gimplify_ctxp->conditional_cleanups;
4120 gimplify_ctxp->conditions = 0;
4121 gimplify_ctxp->conditional_cleanups = NULL_TREE;
4122
4123 body = TREE_OPERAND (*expr_p, 0);
4124 gimplify_to_stmt_list (&body);
4125
4126 gimplify_ctxp->conditions = old_conds;
4127 gimplify_ctxp->conditional_cleanups = old_cleanups;
4128
4129 for (iter = tsi_start (body); !tsi_end_p (iter); )
4130 {
4131 tree *wce_p = tsi_stmt_ptr (iter);
4132 tree wce = *wce_p;
4133
4134 if (TREE_CODE (wce) == WITH_CLEANUP_EXPR)
4135 {
4136 if (tsi_one_before_end_p (iter))
4137 {
4138 tsi_link_before (&iter, TREE_OPERAND (wce, 0), TSI_SAME_STMT);
4139 tsi_delink (&iter);
4140 break;
4141 }
4142 else
4143 {
4144 tree sl, tfe;
4145 enum tree_code code;
4146
4147 if (CLEANUP_EH_ONLY (wce))
4148 code = TRY_CATCH_EXPR;
4149 else
4150 code = TRY_FINALLY_EXPR;
4151
4152 sl = tsi_split_statement_list_after (&iter);
4153 tfe = build2 (code, void_type_node, sl, NULL_TREE);
4154 append_to_statement_list (TREE_OPERAND (wce, 0),
4155 &TREE_OPERAND (tfe, 1));
4156 *wce_p = tfe;
4157 iter = tsi_start (sl);
4158 }
4159 }
4160 else
4161 tsi_next (&iter);
4162 }
4163
4164 if (temp)
4165 {
4166 *expr_p = temp;
4167 append_to_statement_list (body, pre_p);
4168 return GS_OK;
4169 }
4170 else
4171 {
4172 *expr_p = body;
4173 return GS_ALL_DONE;
4174 }
4175 }
4176
4177 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
4178 is the cleanup action required. */
4179
4180 static void
gimple_push_cleanup(tree var,tree cleanup,bool eh_only,tree * pre_p)4181 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, tree *pre_p)
4182 {
4183 tree wce;
4184
4185 /* Errors can result in improperly nested cleanups. Which results in
4186 confusion when trying to resolve the WITH_CLEANUP_EXPR. */
4187 if (errorcount || sorrycount)
4188 return;
4189
4190 if (gimple_conditional_context ())
4191 {
4192 /* If we're in a conditional context, this is more complex. We only
4193 want to run the cleanup if we actually ran the initialization that
4194 necessitates it, but we want to run it after the end of the
4195 conditional context. So we wrap the try/finally around the
4196 condition and use a flag to determine whether or not to actually
4197 run the destructor. Thus
4198
4199 test ? f(A()) : 0
4200
4201 becomes (approximately)
4202
4203 flag = 0;
4204 try {
4205 if (test) { A::A(temp); flag = 1; val = f(temp); }
4206 else { val = 0; }
4207 } finally {
4208 if (flag) A::~A(temp);
4209 }
4210 val
4211 */
4212
4213 tree flag = create_tmp_var (boolean_type_node, "cleanup");
4214 tree ffalse = build2 (MODIFY_EXPR, void_type_node, flag,
4215 boolean_false_node);
4216 tree ftrue = build2 (MODIFY_EXPR, void_type_node, flag,
4217 boolean_true_node);
4218 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
4219 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4220 append_to_statement_list (ffalse, &gimplify_ctxp->conditional_cleanups);
4221 append_to_statement_list (wce, &gimplify_ctxp->conditional_cleanups);
4222 append_to_statement_list (ftrue, pre_p);
4223
4224 /* Because of this manipulation, and the EH edges that jump
4225 threading cannot redirect, the temporary (VAR) will appear
4226 to be used uninitialized. Don't warn. */
4227 TREE_NO_WARNING (var) = 1;
4228 }
4229 else
4230 {
4231 wce = build1 (WITH_CLEANUP_EXPR, void_type_node, cleanup);
4232 CLEANUP_EH_ONLY (wce) = eh_only;
4233 append_to_statement_list (wce, pre_p);
4234 }
4235
4236 gimplify_stmt (&TREE_OPERAND (wce, 0));
4237 }
4238
4239 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
4240
4241 static enum gimplify_status
gimplify_target_expr(tree * expr_p,tree * pre_p,tree * post_p)4242 gimplify_target_expr (tree *expr_p, tree *pre_p, tree *post_p)
4243 {
4244 tree targ = *expr_p;
4245 tree temp = TARGET_EXPR_SLOT (targ);
4246 tree init = TARGET_EXPR_INITIAL (targ);
4247 enum gimplify_status ret;
4248
4249 if (init)
4250 {
4251 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
4252 to the temps list. */
4253 gimple_add_tmp_var (temp);
4254
4255 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
4256 expression is supposed to initialize the slot. */
4257 if (VOID_TYPE_P (TREE_TYPE (init)))
4258 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
4259 else
4260 {
4261 init = build2 (INIT_EXPR, void_type_node, temp, init);
4262 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt,
4263 fb_none);
4264 }
4265 if (ret == GS_ERROR)
4266 {
4267 /* PR c++/28266 Make sure this is expanded only once. */
4268 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4269 return GS_ERROR;
4270 }
4271 append_to_statement_list (init, pre_p);
4272
4273 /* If needed, push the cleanup for the temp. */
4274 if (TARGET_EXPR_CLEANUP (targ))
4275 {
4276 gimplify_stmt (&TARGET_EXPR_CLEANUP (targ));
4277 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
4278 CLEANUP_EH_ONLY (targ), pre_p);
4279 }
4280
4281 /* Only expand this once. */
4282 TREE_OPERAND (targ, 3) = init;
4283 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
4284 }
4285 else
4286 /* We should have expanded this before. */
4287 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
4288
4289 *expr_p = temp;
4290 return GS_OK;
4291 }
4292
4293 /* Gimplification of expression trees. */
4294
4295 /* Gimplify an expression which appears at statement context; usually, this
4296 means replacing it with a suitably gimple STATEMENT_LIST. */
4297
4298 void
gimplify_stmt(tree * stmt_p)4299 gimplify_stmt (tree *stmt_p)
4300 {
4301 gimplify_expr (stmt_p, NULL, NULL, is_gimple_stmt, fb_none);
4302 }
4303
4304 /* Similarly, but force the result to be a STATEMENT_LIST. */
4305
4306 void
gimplify_to_stmt_list(tree * stmt_p)4307 gimplify_to_stmt_list (tree *stmt_p)
4308 {
4309 gimplify_stmt (stmt_p);
4310 if (!*stmt_p)
4311 *stmt_p = alloc_stmt_list ();
4312 else if (TREE_CODE (*stmt_p) != STATEMENT_LIST)
4313 {
4314 tree t = *stmt_p;
4315 *stmt_p = alloc_stmt_list ();
4316 append_to_statement_list (t, stmt_p);
4317 }
4318 }
4319
4320
4321 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
4322 to CTX. If entries already exist, force them to be some flavor of private.
4323 If there is no enclosing parallel, do nothing. */
4324
4325 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)4326 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
4327 {
4328 splay_tree_node n;
4329
4330 if (decl == NULL || !DECL_P (decl))
4331 return;
4332
4333 do
4334 {
4335 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4336 if (n != NULL)
4337 {
4338 if (n->value & GOVD_SHARED)
4339 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
4340 else
4341 return;
4342 }
4343 else if (ctx->is_parallel)
4344 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
4345
4346 ctx = ctx->outer_context;
4347 }
4348 while (ctx);
4349 }
4350
4351 /* Similarly for each of the type sizes of TYPE. */
4352
4353 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)4354 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
4355 {
4356 if (type == NULL || type == error_mark_node)
4357 return;
4358 type = TYPE_MAIN_VARIANT (type);
4359
4360 if (pointer_set_insert (ctx->privatized_types, type))
4361 return;
4362
4363 switch (TREE_CODE (type))
4364 {
4365 case INTEGER_TYPE:
4366 case ENUMERAL_TYPE:
4367 case BOOLEAN_TYPE:
4368 case REAL_TYPE:
4369 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
4370 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
4371 break;
4372
4373 case ARRAY_TYPE:
4374 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4375 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
4376 break;
4377
4378 case RECORD_TYPE:
4379 case UNION_TYPE:
4380 case QUAL_UNION_TYPE:
4381 {
4382 tree field;
4383 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4384 if (TREE_CODE (field) == FIELD_DECL)
4385 {
4386 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
4387 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
4388 }
4389 }
4390 break;
4391
4392 case POINTER_TYPE:
4393 case REFERENCE_TYPE:
4394 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
4395 break;
4396
4397 default:
4398 break;
4399 }
4400
4401 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
4402 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
4403 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
4404 }
4405
4406 /* Add an entry for DECL in the OpenMP context CTX with FLAGS. */
4407
4408 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)4409 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
4410 {
4411 splay_tree_node n;
4412 unsigned int nflags;
4413 tree t;
4414
4415 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4416 return;
4417
4418 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
4419 there are constructors involved somewhere. */
4420 if (TREE_ADDRESSABLE (TREE_TYPE (decl))
4421 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl)))
4422 flags |= GOVD_SEEN;
4423
4424 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4425 if (n != NULL)
4426 {
4427 /* We shouldn't be re-adding the decl with the same data
4428 sharing class. */
4429 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
4430 /* The only combination of data sharing classes we should see is
4431 FIRSTPRIVATE and LASTPRIVATE. */
4432 nflags = n->value | flags;
4433 gcc_assert ((nflags & GOVD_DATA_SHARE_CLASS)
4434 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE));
4435 n->value = nflags;
4436 return;
4437 }
4438
4439 /* When adding a variable-sized variable, we have to handle all sorts
4440 of additional bits of data: the pointer replacement variable, and
4441 the parameters of the type. */
4442 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
4443 {
4444 /* Add the pointer replacement variable as PRIVATE if the variable
4445 replacement is private, else FIRSTPRIVATE since we'll need the
4446 address of the original variable either for SHARED, or for the
4447 copy into or out of the context. */
4448 if (!(flags & GOVD_LOCAL))
4449 {
4450 nflags = flags & GOVD_PRIVATE ? GOVD_PRIVATE : GOVD_FIRSTPRIVATE;
4451 nflags |= flags & GOVD_SEEN;
4452 t = DECL_VALUE_EXPR (decl);
4453 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
4454 t = TREE_OPERAND (t, 0);
4455 gcc_assert (DECL_P (t));
4456 omp_add_variable (ctx, t, nflags);
4457 }
4458
4459 /* Add all of the variable and type parameters (which should have
4460 been gimplified to a formal temporary) as FIRSTPRIVATE. */
4461 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
4462 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
4463 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4464
4465 /* The variable-sized variable itself is never SHARED, only some form
4466 of PRIVATE. The sharing would take place via the pointer variable
4467 which we remapped above. */
4468 if (flags & GOVD_SHARED)
4469 flags = GOVD_PRIVATE | GOVD_DEBUG_PRIVATE
4470 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
4471
4472 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
4473 alloca statement we generate for the variable, so make sure it
4474 is available. This isn't automatically needed for the SHARED
4475 case, since we won't be allocating local storage then.
4476 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
4477 in this case omp_notice_variable will be called later
4478 on when it is gimplified. */
4479 else if (! (flags & GOVD_LOCAL))
4480 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
4481 }
4482 else if (lang_hooks.decls.omp_privatize_by_reference (decl))
4483 {
4484 gcc_assert ((flags & GOVD_LOCAL) == 0);
4485 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
4486
4487 /* Similar to the direct variable sized case above, we'll need the
4488 size of references being privatized. */
4489 if ((flags & GOVD_SHARED) == 0)
4490 {
4491 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
4492 if (TREE_CODE (t) != INTEGER_CST)
4493 omp_notice_variable (ctx, t, true);
4494 }
4495 }
4496
4497 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
4498 }
4499
4500 /* Record the fact that DECL was used within the OpenMP context CTX.
4501 IN_CODE is true when real code uses DECL, and false when we should
4502 merely emit default(none) errors. Return true if DECL is going to
4503 be remapped and thus DECL shouldn't be gimplified into its
4504 DECL_VALUE_EXPR (if any). */
4505
4506 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)4507 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
4508 {
4509 splay_tree_node n;
4510 unsigned flags = in_code ? GOVD_SEEN : 0;
4511 bool ret = false, shared;
4512
4513 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4514 return false;
4515
4516 /* Threadprivate variables are predetermined. */
4517 if (is_global_var (decl))
4518 {
4519 if (DECL_THREAD_LOCAL_P (decl))
4520 return false;
4521
4522 if (DECL_HAS_VALUE_EXPR_P (decl))
4523 {
4524 tree value = get_base_address (DECL_VALUE_EXPR (decl));
4525
4526 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
4527 return false;
4528 }
4529 }
4530
4531 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4532 if (n == NULL)
4533 {
4534 enum omp_clause_default_kind default_kind, kind;
4535
4536 if (!ctx->is_parallel)
4537 goto do_outer;
4538
4539 /* ??? Some compiler-generated variables (like SAVE_EXPRs) could be
4540 remapped firstprivate instead of shared. To some extent this is
4541 addressed in omp_firstprivatize_type_sizes, but not effectively. */
4542 default_kind = ctx->default_kind;
4543 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
4544 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
4545 default_kind = kind;
4546
4547 switch (default_kind)
4548 {
4549 case OMP_CLAUSE_DEFAULT_NONE:
4550 error ("%qs not specified in enclosing parallel",
4551 IDENTIFIER_POINTER (DECL_NAME (decl)));
4552 error ("%Henclosing parallel", &ctx->location);
4553 /* FALLTHRU */
4554 case OMP_CLAUSE_DEFAULT_SHARED:
4555 flags |= GOVD_SHARED;
4556 break;
4557 case OMP_CLAUSE_DEFAULT_PRIVATE:
4558 flags |= GOVD_PRIVATE;
4559 break;
4560 default:
4561 gcc_unreachable ();
4562 }
4563
4564 omp_add_variable (ctx, decl, flags);
4565
4566 shared = (flags & GOVD_SHARED) != 0;
4567 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4568 goto do_outer;
4569 }
4570
4571 shared = ((flags | n->value) & GOVD_SHARED) != 0;
4572 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
4573
4574 /* If nothing changed, there's nothing left to do. */
4575 if ((n->value & flags) == flags)
4576 return ret;
4577 flags |= n->value;
4578 n->value = flags;
4579
4580 do_outer:
4581 /* If the variable is private in the current context, then we don't
4582 need to propagate anything to an outer context. */
4583 if (flags & GOVD_PRIVATE)
4584 return ret;
4585 if (ctx->outer_context
4586 && omp_notice_variable (ctx->outer_context, decl, in_code))
4587 return true;
4588 return ret;
4589 }
4590
4591 /* Verify that DECL is private within CTX. If there's specific information
4592 to the contrary in the innermost scope, generate an error. */
4593
4594 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl)4595 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl)
4596 {
4597 splay_tree_node n;
4598
4599 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
4600 if (n != NULL)
4601 {
4602 if (n->value & GOVD_SHARED)
4603 {
4604 if (ctx == gimplify_omp_ctxp)
4605 {
4606 error ("iteration variable %qs should be private",
4607 IDENTIFIER_POINTER (DECL_NAME (decl)));
4608 n->value = GOVD_PRIVATE;
4609 return true;
4610 }
4611 else
4612 return false;
4613 }
4614 else if ((n->value & GOVD_EXPLICIT) != 0
4615 && (ctx == gimplify_omp_ctxp
4616 || (ctx->is_combined_parallel
4617 && gimplify_omp_ctxp->outer_context == ctx)))
4618 {
4619 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
4620 error ("iteration variable %qs should not be firstprivate",
4621 IDENTIFIER_POINTER (DECL_NAME (decl)));
4622 else if ((n->value & GOVD_REDUCTION) != 0)
4623 error ("iteration variable %qs should not be reduction",
4624 IDENTIFIER_POINTER (DECL_NAME (decl)));
4625 }
4626 return true;
4627 }
4628
4629 if (ctx->is_parallel)
4630 return false;
4631 else if (ctx->outer_context)
4632 return omp_is_private (ctx->outer_context, decl);
4633 else
4634 return !is_global_var (decl);
4635 }
4636
4637 /* Return true if DECL is private within a parallel region
4638 that binds to the current construct's context or in parallel
4639 region's REDUCTION clause. */
4640
4641 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl)4642 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl)
4643 {
4644 splay_tree_node n;
4645
4646 do
4647 {
4648 ctx = ctx->outer_context;
4649 if (ctx == NULL)
4650 return !(is_global_var (decl)
4651 /* References might be private, but might be shared too. */
4652 || lang_hooks.decls.omp_privatize_by_reference (decl));
4653
4654 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4655 if (n != NULL)
4656 return (n->value & GOVD_SHARED) == 0;
4657 }
4658 while (!ctx->is_parallel);
4659 return false;
4660 }
4661
4662 /* Scan the OpenMP clauses in *LIST_P, installing mappings into a new
4663 and previous omp contexts. */
4664
4665 static void
gimplify_scan_omp_clauses(tree * list_p,tree * pre_p,bool in_parallel,bool in_combined_parallel)4666 gimplify_scan_omp_clauses (tree *list_p, tree *pre_p, bool in_parallel,
4667 bool in_combined_parallel)
4668 {
4669 struct gimplify_omp_ctx *ctx, *outer_ctx;
4670 tree c;
4671
4672 ctx = new_omp_context (in_parallel, in_combined_parallel);
4673 outer_ctx = ctx->outer_context;
4674
4675 while ((c = *list_p) != NULL)
4676 {
4677 enum gimplify_status gs;
4678 bool remove = false;
4679 bool notice_outer = true;
4680 const char *check_non_private = NULL;
4681 unsigned int flags;
4682 tree decl;
4683
4684 switch (OMP_CLAUSE_CODE (c))
4685 {
4686 case OMP_CLAUSE_PRIVATE:
4687 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
4688 notice_outer = false;
4689 goto do_add;
4690 case OMP_CLAUSE_SHARED:
4691 flags = GOVD_SHARED | GOVD_EXPLICIT;
4692 goto do_add;
4693 case OMP_CLAUSE_FIRSTPRIVATE:
4694 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
4695 check_non_private = "firstprivate";
4696 goto do_add;
4697 case OMP_CLAUSE_LASTPRIVATE:
4698 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
4699 check_non_private = "lastprivate";
4700 goto do_add;
4701 case OMP_CLAUSE_REDUCTION:
4702 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
4703 check_non_private = "reduction";
4704 goto do_add;
4705
4706 do_add:
4707 decl = OMP_CLAUSE_DECL (c);
4708 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4709 {
4710 remove = true;
4711 break;
4712 }
4713 omp_add_variable (ctx, decl, flags);
4714 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
4715 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
4716 {
4717 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
4718 GOVD_LOCAL | GOVD_SEEN);
4719 gimplify_omp_ctxp = ctx;
4720 push_gimplify_context ();
4721 gimplify_stmt (&OMP_CLAUSE_REDUCTION_INIT (c));
4722 pop_gimplify_context (OMP_CLAUSE_REDUCTION_INIT (c));
4723 push_gimplify_context ();
4724 gimplify_stmt (&OMP_CLAUSE_REDUCTION_MERGE (c));
4725 pop_gimplify_context (OMP_CLAUSE_REDUCTION_MERGE (c));
4726 gimplify_omp_ctxp = outer_ctx;
4727 }
4728 if (notice_outer)
4729 goto do_notice;
4730 break;
4731
4732 case OMP_CLAUSE_COPYIN:
4733 case OMP_CLAUSE_COPYPRIVATE:
4734 decl = OMP_CLAUSE_DECL (c);
4735 if (decl == error_mark_node || TREE_TYPE (decl) == error_mark_node)
4736 {
4737 remove = true;
4738 break;
4739 }
4740 do_notice:
4741 if (outer_ctx)
4742 omp_notice_variable (outer_ctx, decl, true);
4743 if (check_non_private
4744 && !in_parallel
4745 && omp_check_private (ctx, decl))
4746 {
4747 error ("%s variable %qs is private in outer context",
4748 check_non_private, IDENTIFIER_POINTER (DECL_NAME (decl)));
4749 remove = true;
4750 }
4751 break;
4752
4753 case OMP_CLAUSE_IF:
4754 OMP_CLAUSE_OPERAND (c, 0)
4755 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
4756 /* Fall through. */
4757
4758 case OMP_CLAUSE_SCHEDULE:
4759 case OMP_CLAUSE_NUM_THREADS:
4760 gs = gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
4761 is_gimple_val, fb_rvalue);
4762 if (gs == GS_ERROR)
4763 remove = true;
4764 break;
4765
4766 case OMP_CLAUSE_NOWAIT:
4767 case OMP_CLAUSE_ORDERED:
4768 break;
4769
4770 case OMP_CLAUSE_DEFAULT:
4771 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
4772 break;
4773
4774 default:
4775 gcc_unreachable ();
4776 }
4777
4778 if (remove)
4779 *list_p = OMP_CLAUSE_CHAIN (c);
4780 else
4781 list_p = &OMP_CLAUSE_CHAIN (c);
4782 }
4783
4784 gimplify_omp_ctxp = ctx;
4785 }
4786
4787 /* For all variables that were not actually used within the context,
4788 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
4789
4790 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)4791 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
4792 {
4793 tree *list_p = (tree *) data;
4794 tree decl = (tree) n->key;
4795 unsigned flags = n->value;
4796 enum omp_clause_code code;
4797 tree clause;
4798 bool private_debug;
4799
4800 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
4801 return 0;
4802 if ((flags & GOVD_SEEN) == 0)
4803 return 0;
4804 if (flags & GOVD_DEBUG_PRIVATE)
4805 {
4806 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_PRIVATE);
4807 private_debug = true;
4808 }
4809 else
4810 private_debug
4811 = lang_hooks.decls.omp_private_debug_clause (decl,
4812 !!(flags & GOVD_SHARED));
4813 if (private_debug)
4814 code = OMP_CLAUSE_PRIVATE;
4815 else if (flags & GOVD_SHARED)
4816 {
4817 if (is_global_var (decl))
4818 {
4819 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
4820 while (ctx != NULL)
4821 {
4822 splay_tree_node on
4823 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4824 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
4825 | GOVD_PRIVATE | GOVD_REDUCTION)) != 0)
4826 break;
4827 ctx = ctx->outer_context;
4828 }
4829 if (ctx == NULL)
4830 return 0;
4831 }
4832 code = OMP_CLAUSE_SHARED;
4833 }
4834 else if (flags & GOVD_PRIVATE)
4835 code = OMP_CLAUSE_PRIVATE;
4836 else if (flags & GOVD_FIRSTPRIVATE)
4837 code = OMP_CLAUSE_FIRSTPRIVATE;
4838 else
4839 gcc_unreachable ();
4840
4841 clause = build_omp_clause (code);
4842 OMP_CLAUSE_DECL (clause) = decl;
4843 OMP_CLAUSE_CHAIN (clause) = *list_p;
4844 if (private_debug)
4845 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
4846 *list_p = clause;
4847
4848 return 0;
4849 }
4850
4851 static void
gimplify_adjust_omp_clauses(tree * list_p)4852 gimplify_adjust_omp_clauses (tree *list_p)
4853 {
4854 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
4855 tree c, decl;
4856
4857 while ((c = *list_p) != NULL)
4858 {
4859 splay_tree_node n;
4860 bool remove = false;
4861
4862 switch (OMP_CLAUSE_CODE (c))
4863 {
4864 case OMP_CLAUSE_PRIVATE:
4865 case OMP_CLAUSE_SHARED:
4866 case OMP_CLAUSE_FIRSTPRIVATE:
4867 decl = OMP_CLAUSE_DECL (c);
4868 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4869 remove = !(n->value & GOVD_SEEN);
4870 if (! remove)
4871 {
4872 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
4873 if ((n->value & GOVD_DEBUG_PRIVATE)
4874 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
4875 {
4876 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
4877 || ((n->value & GOVD_DATA_SHARE_CLASS)
4878 == GOVD_PRIVATE));
4879 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
4880 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
4881 }
4882 }
4883 break;
4884
4885 case OMP_CLAUSE_LASTPRIVATE:
4886 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
4887 accurately reflect the presence of a FIRSTPRIVATE clause. */
4888 decl = OMP_CLAUSE_DECL (c);
4889 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
4890 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
4891 = (n->value & GOVD_FIRSTPRIVATE) != 0;
4892 break;
4893
4894 case OMP_CLAUSE_REDUCTION:
4895 case OMP_CLAUSE_COPYIN:
4896 case OMP_CLAUSE_COPYPRIVATE:
4897 case OMP_CLAUSE_IF:
4898 case OMP_CLAUSE_NUM_THREADS:
4899 case OMP_CLAUSE_SCHEDULE:
4900 case OMP_CLAUSE_NOWAIT:
4901 case OMP_CLAUSE_ORDERED:
4902 case OMP_CLAUSE_DEFAULT:
4903 break;
4904
4905 default:
4906 gcc_unreachable ();
4907 }
4908
4909 if (remove)
4910 *list_p = OMP_CLAUSE_CHAIN (c);
4911 else
4912 list_p = &OMP_CLAUSE_CHAIN (c);
4913 }
4914
4915 /* Add in any implicit data sharing. */
4916 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, list_p);
4917
4918 gimplify_omp_ctxp = ctx->outer_context;
4919 delete_omp_context (ctx);
4920 }
4921
4922 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
4923 gimplification of the body, as well as scanning the body for used
4924 variables. We need to do this scan now, because variable-sized
4925 decls will be decomposed during gimplification. */
4926
4927 static enum gimplify_status
gimplify_omp_parallel(tree * expr_p,tree * pre_p)4928 gimplify_omp_parallel (tree *expr_p, tree *pre_p)
4929 {
4930 tree expr = *expr_p;
4931
4932 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p, true,
4933 OMP_PARALLEL_COMBINED (expr));
4934
4935 push_gimplify_context ();
4936
4937 gimplify_stmt (&OMP_PARALLEL_BODY (expr));
4938
4939 if (TREE_CODE (OMP_PARALLEL_BODY (expr)) == BIND_EXPR)
4940 pop_gimplify_context (OMP_PARALLEL_BODY (expr));
4941 else
4942 pop_gimplify_context (NULL_TREE);
4943
4944 gimplify_adjust_omp_clauses (&OMP_PARALLEL_CLAUSES (expr));
4945
4946 return GS_ALL_DONE;
4947 }
4948
4949 /* Gimplify the gross structure of an OMP_FOR statement. */
4950
4951 static enum gimplify_status
gimplify_omp_for(tree * expr_p,tree * pre_p)4952 gimplify_omp_for (tree *expr_p, tree *pre_p)
4953 {
4954 tree for_stmt, decl, t;
4955 enum gimplify_status ret = 0;
4956
4957 for_stmt = *expr_p;
4958
4959 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, false, false);
4960
4961 t = OMP_FOR_INIT (for_stmt);
4962 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
4963 decl = TREE_OPERAND (t, 0);
4964 gcc_assert (DECL_P (decl));
4965 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl)));
4966
4967 /* Make sure the iteration variable is private. */
4968 if (omp_is_private (gimplify_omp_ctxp, decl))
4969 omp_notice_variable (gimplify_omp_ctxp, decl, true);
4970 else
4971 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
4972
4973 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4974 NULL, is_gimple_val, fb_rvalue);
4975
4976 t = OMP_FOR_COND (for_stmt);
4977 gcc_assert (COMPARISON_CLASS_P (t));
4978 gcc_assert (TREE_OPERAND (t, 0) == decl);
4979
4980 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
4981 NULL, is_gimple_val, fb_rvalue);
4982
4983 t = OMP_FOR_INCR (for_stmt);
4984 switch (TREE_CODE (t))
4985 {
4986 case PREINCREMENT_EXPR:
4987 case POSTINCREMENT_EXPR:
4988 t = build_int_cst (TREE_TYPE (decl), 1);
4989 goto build_modify;
4990 case PREDECREMENT_EXPR:
4991 case POSTDECREMENT_EXPR:
4992 t = build_int_cst (TREE_TYPE (decl), -1);
4993 goto build_modify;
4994 build_modify:
4995 t = build2 (PLUS_EXPR, TREE_TYPE (decl), decl, t);
4996 t = build2 (MODIFY_EXPR, void_type_node, decl, t);
4997 OMP_FOR_INCR (for_stmt) = t;
4998 break;
4999
5000 case MODIFY_EXPR:
5001 gcc_assert (TREE_OPERAND (t, 0) == decl);
5002 t = TREE_OPERAND (t, 1);
5003 switch (TREE_CODE (t))
5004 {
5005 case PLUS_EXPR:
5006 if (TREE_OPERAND (t, 1) == decl)
5007 {
5008 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
5009 TREE_OPERAND (t, 0) = decl;
5010 break;
5011 }
5012 case MINUS_EXPR:
5013 gcc_assert (TREE_OPERAND (t, 0) == decl);
5014 break;
5015 default:
5016 gcc_unreachable ();
5017 }
5018
5019 ret |= gimplify_expr (&TREE_OPERAND (t, 1), &OMP_FOR_PRE_BODY (for_stmt),
5020 NULL, is_gimple_val, fb_rvalue);
5021 break;
5022
5023 default:
5024 gcc_unreachable ();
5025 }
5026
5027 gimplify_to_stmt_list (&OMP_FOR_BODY (for_stmt));
5028 gimplify_adjust_omp_clauses (&OMP_FOR_CLAUSES (for_stmt));
5029
5030 return ret == GS_ALL_DONE ? GS_ALL_DONE : GS_ERROR;
5031 }
5032
5033 /* Gimplify the gross structure of other OpenMP worksharing constructs.
5034 In particular, OMP_SECTIONS and OMP_SINGLE. */
5035
5036 static enum gimplify_status
gimplify_omp_workshare(tree * expr_p,tree * pre_p)5037 gimplify_omp_workshare (tree *expr_p, tree *pre_p)
5038 {
5039 tree stmt = *expr_p;
5040
5041 gimplify_scan_omp_clauses (&OMP_CLAUSES (stmt), pre_p, false, false);
5042 gimplify_to_stmt_list (&OMP_BODY (stmt));
5043 gimplify_adjust_omp_clauses (&OMP_CLAUSES (stmt));
5044
5045 return GS_ALL_DONE;
5046 }
5047
5048 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
5049 stabilized the lhs of the atomic operation as *ADDR. Return true if
5050 EXPR is this stabilized form. */
5051
5052 static bool
goa_lhs_expr_p(tree expr,tree addr)5053 goa_lhs_expr_p (tree expr, tree addr)
5054 {
5055 /* Also include casts to other type variants. The C front end is fond
5056 of adding these for e.g. volatile variables. This is like
5057 STRIP_TYPE_NOPS but includes the main variant lookup. */
5058 while ((TREE_CODE (expr) == NOP_EXPR
5059 || TREE_CODE (expr) == CONVERT_EXPR
5060 || TREE_CODE (expr) == NON_LVALUE_EXPR)
5061 && TREE_OPERAND (expr, 0) != error_mark_node
5062 && (TYPE_MAIN_VARIANT (TREE_TYPE (expr))
5063 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))))
5064 expr = TREE_OPERAND (expr, 0);
5065
5066 if (TREE_CODE (expr) == INDIRECT_REF && TREE_OPERAND (expr, 0) == addr)
5067 return true;
5068 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
5069 return true;
5070 return false;
5071 }
5072
5073 /* A subroutine of gimplify_omp_atomic. Attempt to implement the atomic
5074 operation as a __sync_fetch_and_op builtin. INDEX is log2 of the
5075 size of the data type, and thus usable to find the index of the builtin
5076 decl. Returns GS_UNHANDLED if the expression is not of the proper form. */
5077
5078 static enum gimplify_status
gimplify_omp_atomic_fetch_op(tree * expr_p,tree addr,tree rhs,int index)5079 gimplify_omp_atomic_fetch_op (tree *expr_p, tree addr, tree rhs, int index)
5080 {
5081 enum built_in_function base;
5082 tree decl, args, itype;
5083 enum insn_code *optab;
5084
5085 /* Check for one of the supported fetch-op operations. */
5086 switch (TREE_CODE (rhs))
5087 {
5088 case PLUS_EXPR:
5089 base = BUILT_IN_FETCH_AND_ADD_N;
5090 optab = sync_add_optab;
5091 break;
5092 case MINUS_EXPR:
5093 base = BUILT_IN_FETCH_AND_SUB_N;
5094 optab = sync_add_optab;
5095 break;
5096 case BIT_AND_EXPR:
5097 base = BUILT_IN_FETCH_AND_AND_N;
5098 optab = sync_and_optab;
5099 break;
5100 case BIT_IOR_EXPR:
5101 base = BUILT_IN_FETCH_AND_OR_N;
5102 optab = sync_ior_optab;
5103 break;
5104 case BIT_XOR_EXPR:
5105 base = BUILT_IN_FETCH_AND_XOR_N;
5106 optab = sync_xor_optab;
5107 break;
5108 default:
5109 return GS_UNHANDLED;
5110 }
5111
5112 /* Make sure the expression is of the proper form. */
5113 if (goa_lhs_expr_p (TREE_OPERAND (rhs, 0), addr))
5114 rhs = TREE_OPERAND (rhs, 1);
5115 else if (commutative_tree_code (TREE_CODE (rhs))
5116 && goa_lhs_expr_p (TREE_OPERAND (rhs, 1), addr))
5117 rhs = TREE_OPERAND (rhs, 0);
5118 else
5119 return GS_UNHANDLED;
5120
5121 decl = built_in_decls[base + index + 1];
5122 itype = TREE_TYPE (TREE_TYPE (decl));
5123
5124 if (optab[TYPE_MODE (itype)] == CODE_FOR_nothing)
5125 return GS_UNHANDLED;
5126
5127 args = tree_cons (NULL, fold_convert (itype, rhs), NULL);
5128 args = tree_cons (NULL, addr, args);
5129 *expr_p = build_function_call_expr (decl, args);
5130 return GS_OK;
5131 }
5132
5133 /* A subroutine of gimplify_omp_atomic_pipeline. Walk *EXPR_P and replace
5134 appearances of *LHS_ADDR with LHS_VAR. If an expression does not involve
5135 the lhs, evaluate it into a temporary. Return 1 if the lhs appeared as
5136 a subexpression, 0 if it did not, or -1 if an error was encountered. */
5137
5138 static int
goa_stabilize_expr(tree * expr_p,tree * pre_p,tree lhs_addr,tree lhs_var)5139 goa_stabilize_expr (tree *expr_p, tree *pre_p, tree lhs_addr, tree lhs_var)
5140 {
5141 tree expr = *expr_p;
5142 int saw_lhs;
5143
5144 if (goa_lhs_expr_p (expr, lhs_addr))
5145 {
5146 *expr_p = lhs_var;
5147 return 1;
5148 }
5149 if (is_gimple_val (expr))
5150 return 0;
5151
5152 saw_lhs = 0;
5153 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
5154 {
5155 case tcc_binary:
5156 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
5157 lhs_addr, lhs_var);
5158 case tcc_unary:
5159 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
5160 lhs_addr, lhs_var);
5161 break;
5162 default:
5163 break;
5164 }
5165
5166 if (saw_lhs == 0)
5167 {
5168 enum gimplify_status gs;
5169 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
5170 if (gs != GS_ALL_DONE)
5171 saw_lhs = -1;
5172 }
5173
5174 return saw_lhs;
5175 }
5176
5177 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5178
5179 oldval = *addr;
5180 repeat:
5181 newval = rhs; // with oldval replacing *addr in rhs
5182 oldval = __sync_val_compare_and_swap (addr, oldval, newval);
5183 if (oldval != newval)
5184 goto repeat;
5185
5186 INDEX is log2 of the size of the data type, and thus usable to find the
5187 index of the builtin decl. */
5188
5189 static enum gimplify_status
gimplify_omp_atomic_pipeline(tree * expr_p,tree * pre_p,tree addr,tree rhs,int index)5190 gimplify_omp_atomic_pipeline (tree *expr_p, tree *pre_p, tree addr,
5191 tree rhs, int index)
5192 {
5193 tree oldval, oldival, oldival2, newval, newival, label;
5194 tree type, itype, cmpxchg, args, x, iaddr;
5195
5196 cmpxchg = built_in_decls[BUILT_IN_VAL_COMPARE_AND_SWAP_N + index + 1];
5197 type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5198 itype = TREE_TYPE (TREE_TYPE (cmpxchg));
5199
5200 if (sync_compare_and_swap[TYPE_MODE (itype)] == CODE_FOR_nothing)
5201 return GS_UNHANDLED;
5202
5203 oldval = create_tmp_var (type, NULL);
5204 newval = create_tmp_var (type, NULL);
5205
5206 /* Precompute as much of RHS as possible. In the same walk, replace
5207 occurrences of the lhs value with our temporary. */
5208 if (goa_stabilize_expr (&rhs, pre_p, addr, oldval) < 0)
5209 return GS_ERROR;
5210
5211 x = build_fold_indirect_ref (addr);
5212 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5213 gimplify_and_add (x, pre_p);
5214
5215 /* For floating-point values, we'll need to view-convert them to integers
5216 so that we can perform the atomic compare and swap. Simplify the
5217 following code by always setting up the "i"ntegral variables. */
5218 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5219 {
5220 oldival = oldval;
5221 newival = newval;
5222 iaddr = addr;
5223 }
5224 else
5225 {
5226 oldival = create_tmp_var (itype, NULL);
5227 newival = create_tmp_var (itype, NULL);
5228
5229 x = build1 (VIEW_CONVERT_EXPR, itype, oldval);
5230 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5231 gimplify_and_add (x, pre_p);
5232 iaddr = fold_convert (build_pointer_type (itype), addr);
5233 }
5234
5235 oldival2 = create_tmp_var (itype, NULL);
5236
5237 label = create_artificial_label ();
5238 x = build1 (LABEL_EXPR, void_type_node, label);
5239 gimplify_and_add (x, pre_p);
5240
5241 x = build2 (MODIFY_EXPR, void_type_node, newval, rhs);
5242 gimplify_and_add (x, pre_p);
5243
5244 if (newval != newival)
5245 {
5246 x = build1 (VIEW_CONVERT_EXPR, itype, newval);
5247 x = build2 (MODIFY_EXPR, void_type_node, newival, x);
5248 gimplify_and_add (x, pre_p);
5249 }
5250
5251 x = build2 (MODIFY_EXPR, void_type_node, oldival2,
5252 fold_convert (itype, oldival));
5253 gimplify_and_add (x, pre_p);
5254
5255 args = tree_cons (NULL, fold_convert (itype, newival), NULL);
5256 args = tree_cons (NULL, fold_convert (itype, oldival), args);
5257 args = tree_cons (NULL, iaddr, args);
5258 x = build_function_call_expr (cmpxchg, args);
5259 if (oldval == oldival)
5260 x = fold_convert (type, x);
5261 x = build2 (MODIFY_EXPR, void_type_node, oldival, x);
5262 gimplify_and_add (x, pre_p);
5263
5264 /* For floating point, be prepared for the loop backedge. */
5265 if (oldval != oldival)
5266 {
5267 x = build1 (VIEW_CONVERT_EXPR, type, oldival);
5268 x = build2 (MODIFY_EXPR, void_type_node, oldval, x);
5269 gimplify_and_add (x, pre_p);
5270 }
5271
5272 /* Note that we always perform the comparison as an integer, even for
5273 floating point. This allows the atomic operation to properly
5274 succeed even with NaNs and -0.0. */
5275 x = build3 (COND_EXPR, void_type_node,
5276 build2 (NE_EXPR, boolean_type_node, oldival, oldival2),
5277 build1 (GOTO_EXPR, void_type_node, label), NULL);
5278 gimplify_and_add (x, pre_p);
5279
5280 *expr_p = NULL;
5281 return GS_ALL_DONE;
5282 }
5283
5284 /* A subroutine of gimplify_omp_atomic. Implement the atomic operation as:
5285
5286 GOMP_atomic_start ();
5287 *addr = rhs;
5288 GOMP_atomic_end ();
5289
5290 The result is not globally atomic, but works so long as all parallel
5291 references are within #pragma omp atomic directives. According to
5292 responses received from omp@openmp.org, appears to be within spec.
5293 Which makes sense, since that's how several other compilers handle
5294 this situation as well. */
5295
5296 static enum gimplify_status
gimplify_omp_atomic_mutex(tree * expr_p,tree * pre_p,tree addr,tree rhs)5297 gimplify_omp_atomic_mutex (tree *expr_p, tree *pre_p, tree addr, tree rhs)
5298 {
5299 tree t;
5300
5301 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_START];
5302 t = build_function_call_expr (t, NULL);
5303 gimplify_and_add (t, pre_p);
5304
5305 t = build_fold_indirect_ref (addr);
5306 t = build2 (MODIFY_EXPR, void_type_node, t, rhs);
5307 gimplify_and_add (t, pre_p);
5308
5309 t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
5310 t = build_function_call_expr (t, NULL);
5311 gimplify_and_add (t, pre_p);
5312
5313 *expr_p = NULL;
5314 return GS_ALL_DONE;
5315 }
5316
5317 /* Gimplify an OMP_ATOMIC statement. */
5318
5319 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,tree * pre_p)5320 gimplify_omp_atomic (tree *expr_p, tree *pre_p)
5321 {
5322 tree addr = TREE_OPERAND (*expr_p, 0);
5323 tree rhs = TREE_OPERAND (*expr_p, 1);
5324 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
5325 HOST_WIDE_INT index;
5326
5327 /* Make sure the type is one of the supported sizes. */
5328 index = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5329 index = exact_log2 (index);
5330 if (index >= 0 && index <= 4)
5331 {
5332 enum gimplify_status gs;
5333 unsigned int align;
5334
5335 if (DECL_P (TREE_OPERAND (addr, 0)))
5336 align = DECL_ALIGN_UNIT (TREE_OPERAND (addr, 0));
5337 else if (TREE_CODE (TREE_OPERAND (addr, 0)) == COMPONENT_REF
5338 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (addr, 0), 1))
5339 == FIELD_DECL)
5340 align = DECL_ALIGN_UNIT (TREE_OPERAND (TREE_OPERAND (addr, 0), 1));
5341 else
5342 align = TYPE_ALIGN_UNIT (type);
5343
5344 /* __sync builtins require strict data alignment. */
5345 if (exact_log2 (align) >= index)
5346 {
5347 /* When possible, use specialized atomic update functions. */
5348 if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
5349 {
5350 gs = gimplify_omp_atomic_fetch_op (expr_p, addr, rhs, index);
5351 if (gs != GS_UNHANDLED)
5352 return gs;
5353 }
5354
5355 /* If we don't have specialized __sync builtins, try and implement
5356 as a compare and swap loop. */
5357 gs = gimplify_omp_atomic_pipeline (expr_p, pre_p, addr, rhs, index);
5358 if (gs != GS_UNHANDLED)
5359 return gs;
5360 }
5361 }
5362
5363 /* The ultimate fallback is wrapping the operation in a mutex. */
5364 return gimplify_omp_atomic_mutex (expr_p, pre_p, addr, rhs);
5365 }
5366
5367 /* Gimplifies the expression tree pointed to by EXPR_P. Return 0 if
5368 gimplification failed.
5369
5370 PRE_P points to the list where side effects that must happen before
5371 EXPR should be stored.
5372
5373 POST_P points to the list where side effects that must happen after
5374 EXPR should be stored, or NULL if there is no suitable list. In
5375 that case, we copy the result to a temporary, emit the
5376 post-effects, and then return the temporary.
5377
5378 GIMPLE_TEST_F points to a function that takes a tree T and
5379 returns nonzero if T is in the GIMPLE form requested by the
5380 caller. The GIMPLE predicates are in tree-gimple.c.
5381
5382 This test is used twice. Before gimplification, the test is
5383 invoked to determine whether *EXPR_P is already gimple enough. If
5384 that fails, *EXPR_P is gimplified according to its code and
5385 GIMPLE_TEST_F is called again. If the test still fails, then a new
5386 temporary variable is created and assigned the value of the
5387 gimplified expression.
5388
5389 FALLBACK tells the function what sort of a temporary we want. If the 1
5390 bit is set, an rvalue is OK. If the 2 bit is set, an lvalue is OK.
5391 If both are set, either is OK, but an lvalue is preferable.
5392
5393 The return value is either GS_ERROR or GS_ALL_DONE, since this function
5394 iterates until solution. */
5395
5396 enum gimplify_status
gimplify_expr(tree * expr_p,tree * pre_p,tree * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)5397 gimplify_expr (tree *expr_p, tree *pre_p, tree *post_p,
5398 bool (* gimple_test_f) (tree), fallback_t fallback)
5399 {
5400 tree tmp;
5401 tree internal_pre = NULL_TREE;
5402 tree internal_post = NULL_TREE;
5403 tree save_expr;
5404 int is_statement = (pre_p == NULL);
5405 location_t saved_location;
5406 enum gimplify_status ret;
5407
5408 save_expr = *expr_p;
5409 if (save_expr == NULL_TREE)
5410 return GS_ALL_DONE;
5411
5412 /* We used to check the predicate here and return immediately if it
5413 succeeds. This is wrong; the design is for gimplification to be
5414 idempotent, and for the predicates to only test for valid forms, not
5415 whether they are fully simplified. */
5416
5417 /* Set up our internal queues if needed. */
5418 if (pre_p == NULL)
5419 pre_p = &internal_pre;
5420 if (post_p == NULL)
5421 post_p = &internal_post;
5422
5423 saved_location = input_location;
5424 if (save_expr != error_mark_node
5425 && EXPR_HAS_LOCATION (*expr_p))
5426 input_location = EXPR_LOCATION (*expr_p);
5427
5428 /* Loop over the specific gimplifiers until the toplevel node
5429 remains the same. */
5430 do
5431 {
5432 /* Strip away as many useless type conversions as possible
5433 at the toplevel. */
5434 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
5435
5436 /* Remember the expr. */
5437 save_expr = *expr_p;
5438
5439 /* Die, die, die, my darling. */
5440 if (save_expr == error_mark_node
5441 || (TREE_TYPE (save_expr)
5442 && TREE_TYPE (save_expr) == error_mark_node))
5443 {
5444 ret = GS_ERROR;
5445 break;
5446 }
5447
5448 /* Do any language-specific gimplification. */
5449 ret = lang_hooks.gimplify_expr (expr_p, pre_p, post_p);
5450 if (ret == GS_OK)
5451 {
5452 if (*expr_p == NULL_TREE)
5453 break;
5454 if (*expr_p != save_expr)
5455 continue;
5456 }
5457 else if (ret != GS_UNHANDLED)
5458 break;
5459
5460 ret = GS_OK;
5461 switch (TREE_CODE (*expr_p))
5462 {
5463 /* First deal with the special cases. */
5464
5465 case POSTINCREMENT_EXPR:
5466 case POSTDECREMENT_EXPR:
5467 case PREINCREMENT_EXPR:
5468 case PREDECREMENT_EXPR:
5469 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
5470 fallback != fb_none);
5471 break;
5472
5473 case ARRAY_REF:
5474 case ARRAY_RANGE_REF:
5475 case REALPART_EXPR:
5476 case IMAGPART_EXPR:
5477 case COMPONENT_REF:
5478 case VIEW_CONVERT_EXPR:
5479 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
5480 fallback ? fallback : fb_rvalue);
5481 break;
5482
5483 case COND_EXPR:
5484 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
5485 /* C99 code may assign to an array in a structure value of a
5486 conditional expression, and this has undefined behavior
5487 only on execution, so create a temporary if an lvalue is
5488 required. */
5489 if (fallback == fb_lvalue)
5490 {
5491 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5492 lang_hooks.mark_addressable (*expr_p);
5493 }
5494 break;
5495
5496 case CALL_EXPR:
5497 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
5498 /* C99 code may assign to an array in a structure returned
5499 from a function, and this has undefined behavior only on
5500 execution, so create a temporary if an lvalue is
5501 required. */
5502 if (fallback == fb_lvalue)
5503 {
5504 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5505 lang_hooks.mark_addressable (*expr_p);
5506 }
5507 break;
5508
5509 case TREE_LIST:
5510 gcc_unreachable ();
5511
5512 case COMPOUND_EXPR:
5513 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
5514 break;
5515
5516 case MODIFY_EXPR:
5517 case INIT_EXPR:
5518 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
5519 fallback != fb_none);
5520
5521 /* The distinction between MODIFY_EXPR and INIT_EXPR is no longer
5522 useful. */
5523 if (*expr_p && TREE_CODE (*expr_p) == INIT_EXPR)
5524 TREE_SET_CODE (*expr_p, MODIFY_EXPR);
5525 break;
5526
5527 case TRUTH_ANDIF_EXPR:
5528 case TRUTH_ORIF_EXPR:
5529 ret = gimplify_boolean_expr (expr_p);
5530 break;
5531
5532 case TRUTH_NOT_EXPR:
5533 TREE_OPERAND (*expr_p, 0)
5534 = gimple_boolify (TREE_OPERAND (*expr_p, 0));
5535 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5536 is_gimple_val, fb_rvalue);
5537 recalculate_side_effects (*expr_p);
5538 break;
5539
5540 case ADDR_EXPR:
5541 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
5542 break;
5543
5544 case VA_ARG_EXPR:
5545 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
5546 break;
5547
5548 case CONVERT_EXPR:
5549 case NOP_EXPR:
5550 if (IS_EMPTY_STMT (*expr_p))
5551 {
5552 ret = GS_ALL_DONE;
5553 break;
5554 }
5555
5556 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
5557 || fallback == fb_none)
5558 {
5559 /* Just strip a conversion to void (or in void context) and
5560 try again. */
5561 *expr_p = TREE_OPERAND (*expr_p, 0);
5562 break;
5563 }
5564
5565 ret = gimplify_conversion (expr_p);
5566 if (ret == GS_ERROR)
5567 break;
5568 if (*expr_p != save_expr)
5569 break;
5570 /* FALLTHRU */
5571
5572 case FIX_TRUNC_EXPR:
5573 case FIX_CEIL_EXPR:
5574 case FIX_FLOOR_EXPR:
5575 case FIX_ROUND_EXPR:
5576 /* unary_expr: ... | '(' cast ')' val | ... */
5577 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5578 is_gimple_val, fb_rvalue);
5579 recalculate_side_effects (*expr_p);
5580 break;
5581
5582 case INDIRECT_REF:
5583 *expr_p = fold_indirect_ref (*expr_p);
5584 if (*expr_p != save_expr)
5585 break;
5586 /* else fall through. */
5587 case ALIGN_INDIRECT_REF:
5588 case MISALIGNED_INDIRECT_REF:
5589 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5590 is_gimple_reg, fb_rvalue);
5591 recalculate_side_effects (*expr_p);
5592 break;
5593
5594 /* Constants need not be gimplified. */
5595 case INTEGER_CST:
5596 case REAL_CST:
5597 case STRING_CST:
5598 case COMPLEX_CST:
5599 case VECTOR_CST:
5600 ret = GS_ALL_DONE;
5601 break;
5602
5603 case CONST_DECL:
5604 /* If we require an lvalue, such as for ADDR_EXPR, retain the
5605 CONST_DECL node. Otherwise the decl is replaceable by its
5606 value. */
5607 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
5608 if (fallback & fb_lvalue)
5609 ret = GS_ALL_DONE;
5610 else
5611 *expr_p = DECL_INITIAL (*expr_p);
5612 break;
5613
5614 case DECL_EXPR:
5615 ret = gimplify_decl_expr (expr_p);
5616 break;
5617
5618 case EXC_PTR_EXPR:
5619 /* FIXME make this a decl. */
5620 ret = GS_ALL_DONE;
5621 break;
5622
5623 case BIND_EXPR:
5624 ret = gimplify_bind_expr (expr_p, pre_p);
5625 break;
5626
5627 case LOOP_EXPR:
5628 ret = gimplify_loop_expr (expr_p, pre_p);
5629 break;
5630
5631 case SWITCH_EXPR:
5632 ret = gimplify_switch_expr (expr_p, pre_p);
5633 break;
5634
5635 case EXIT_EXPR:
5636 ret = gimplify_exit_expr (expr_p);
5637 break;
5638
5639 case GOTO_EXPR:
5640 /* If the target is not LABEL, then it is a computed jump
5641 and the target needs to be gimplified. */
5642 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
5643 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
5644 NULL, is_gimple_val, fb_rvalue);
5645 break;
5646
5647 case LABEL_EXPR:
5648 ret = GS_ALL_DONE;
5649 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
5650 == current_function_decl);
5651 break;
5652
5653 case CASE_LABEL_EXPR:
5654 ret = gimplify_case_label_expr (expr_p);
5655 break;
5656
5657 case RETURN_EXPR:
5658 ret = gimplify_return_expr (*expr_p, pre_p);
5659 break;
5660
5661 case CONSTRUCTOR:
5662 /* Don't reduce this in place; let gimplify_init_constructor work its
5663 magic. Buf if we're just elaborating this for side effects, just
5664 gimplify any element that has side-effects. */
5665 if (fallback == fb_none)
5666 {
5667 unsigned HOST_WIDE_INT ix;
5668 constructor_elt *ce;
5669 tree temp = NULL_TREE;
5670 for (ix = 0;
5671 VEC_iterate (constructor_elt, CONSTRUCTOR_ELTS (*expr_p),
5672 ix, ce);
5673 ix++)
5674 if (TREE_SIDE_EFFECTS (ce->value))
5675 append_to_statement_list (ce->value, &temp);
5676
5677 *expr_p = temp;
5678 ret = GS_OK;
5679 }
5680 /* C99 code may assign to an array in a constructed
5681 structure or union, and this has undefined behavior only
5682 on execution, so create a temporary if an lvalue is
5683 required. */
5684 else if (fallback == fb_lvalue)
5685 {
5686 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
5687 lang_hooks.mark_addressable (*expr_p);
5688 }
5689 else
5690 ret = GS_ALL_DONE;
5691 break;
5692
5693 /* The following are special cases that are not handled by the
5694 original GIMPLE grammar. */
5695
5696 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
5697 eliminated. */
5698 case SAVE_EXPR:
5699 ret = gimplify_save_expr (expr_p, pre_p, post_p);
5700 break;
5701
5702 case BIT_FIELD_REF:
5703 {
5704 enum gimplify_status r0, r1, r2;
5705
5706 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5707 is_gimple_lvalue, fb_either);
5708 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5709 is_gimple_val, fb_rvalue);
5710 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p, post_p,
5711 is_gimple_val, fb_rvalue);
5712 recalculate_side_effects (*expr_p);
5713
5714 ret = MIN (r0, MIN (r1, r2));
5715 }
5716 break;
5717
5718 case NON_LVALUE_EXPR:
5719 /* This should have been stripped above. */
5720 gcc_unreachable ();
5721
5722 case ASM_EXPR:
5723 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
5724 break;
5725
5726 case TRY_FINALLY_EXPR:
5727 case TRY_CATCH_EXPR:
5728 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 0));
5729 gimplify_to_stmt_list (&TREE_OPERAND (*expr_p, 1));
5730 ret = GS_ALL_DONE;
5731 break;
5732
5733 case CLEANUP_POINT_EXPR:
5734 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
5735 break;
5736
5737 case TARGET_EXPR:
5738 ret = gimplify_target_expr (expr_p, pre_p, post_p);
5739 break;
5740
5741 case CATCH_EXPR:
5742 gimplify_to_stmt_list (&CATCH_BODY (*expr_p));
5743 ret = GS_ALL_DONE;
5744 break;
5745
5746 case EH_FILTER_EXPR:
5747 gimplify_to_stmt_list (&EH_FILTER_FAILURE (*expr_p));
5748 ret = GS_ALL_DONE;
5749 break;
5750
5751 case OBJ_TYPE_REF:
5752 {
5753 enum gimplify_status r0, r1;
5754 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p, post_p,
5755 is_gimple_val, fb_rvalue);
5756 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p, post_p,
5757 is_gimple_val, fb_rvalue);
5758 ret = MIN (r0, r1);
5759 }
5760 break;
5761
5762 case LABEL_DECL:
5763 /* We get here when taking the address of a label. We mark
5764 the label as "forced"; meaning it can never be removed and
5765 it is a potential target for any computed goto. */
5766 FORCED_LABEL (*expr_p) = 1;
5767 ret = GS_ALL_DONE;
5768 break;
5769
5770 case STATEMENT_LIST:
5771 ret = gimplify_statement_list (expr_p, pre_p);
5772 break;
5773
5774 case WITH_SIZE_EXPR:
5775 {
5776 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5777 post_p == &internal_post ? NULL : post_p,
5778 gimple_test_f, fallback);
5779 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5780 is_gimple_val, fb_rvalue);
5781 }
5782 break;
5783
5784 case VAR_DECL:
5785 case PARM_DECL:
5786 ret = gimplify_var_or_parm_decl (expr_p);
5787 break;
5788
5789 case RESULT_DECL:
5790 /* When within an OpenMP context, notice uses of variables. */
5791 if (gimplify_omp_ctxp)
5792 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
5793 ret = GS_ALL_DONE;
5794 break;
5795
5796 case SSA_NAME:
5797 /* Allow callbacks into the gimplifier during optimization. */
5798 ret = GS_ALL_DONE;
5799 break;
5800
5801 case OMP_PARALLEL:
5802 ret = gimplify_omp_parallel (expr_p, pre_p);
5803 break;
5804
5805 case OMP_FOR:
5806 ret = gimplify_omp_for (expr_p, pre_p);
5807 break;
5808
5809 case OMP_SECTIONS:
5810 case OMP_SINGLE:
5811 ret = gimplify_omp_workshare (expr_p, pre_p);
5812 break;
5813
5814 case OMP_SECTION:
5815 case OMP_MASTER:
5816 case OMP_ORDERED:
5817 case OMP_CRITICAL:
5818 gimplify_to_stmt_list (&OMP_BODY (*expr_p));
5819 break;
5820
5821 case OMP_ATOMIC:
5822 ret = gimplify_omp_atomic (expr_p, pre_p);
5823 break;
5824
5825 case OMP_RETURN:
5826 case OMP_CONTINUE:
5827 ret = GS_ALL_DONE;
5828 break;
5829
5830 default:
5831 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
5832 {
5833 case tcc_comparison:
5834 /* Handle comparison of objects of non scalar mode aggregates
5835 with a call to memcmp. It would be nice to only have to do
5836 this for variable-sized objects, but then we'd have to allow
5837 the same nest of reference nodes we allow for MODIFY_EXPR and
5838 that's too complex.
5839
5840 Compare scalar mode aggregates as scalar mode values. Using
5841 memcmp for them would be very inefficient at best, and is
5842 plain wrong if bitfields are involved. */
5843
5844 {
5845 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
5846
5847 if (!AGGREGATE_TYPE_P (type))
5848 goto expr_2;
5849 else if (TYPE_MODE (type) != BLKmode)
5850 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
5851 else
5852 ret = gimplify_variable_sized_compare (expr_p);
5853
5854 break;
5855 }
5856
5857 /* If *EXPR_P does not need to be special-cased, handle it
5858 according to its class. */
5859 case tcc_unary:
5860 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5861 post_p, is_gimple_val, fb_rvalue);
5862 break;
5863
5864 case tcc_binary:
5865 expr_2:
5866 {
5867 enum gimplify_status r0, r1;
5868
5869 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
5870 post_p, is_gimple_val, fb_rvalue);
5871 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
5872 post_p, is_gimple_val, fb_rvalue);
5873
5874 ret = MIN (r0, r1);
5875 break;
5876 }
5877
5878 case tcc_declaration:
5879 case tcc_constant:
5880 ret = GS_ALL_DONE;
5881 goto dont_recalculate;
5882
5883 default:
5884 gcc_assert (TREE_CODE (*expr_p) == TRUTH_AND_EXPR
5885 || TREE_CODE (*expr_p) == TRUTH_OR_EXPR
5886 || TREE_CODE (*expr_p) == TRUTH_XOR_EXPR);
5887 goto expr_2;
5888 }
5889
5890 recalculate_side_effects (*expr_p);
5891 dont_recalculate:
5892 break;
5893 }
5894
5895 /* If we replaced *expr_p, gimplify again. */
5896 if (ret == GS_OK && (*expr_p == NULL || *expr_p == save_expr))
5897 ret = GS_ALL_DONE;
5898 }
5899 while (ret == GS_OK);
5900
5901 /* If we encountered an error_mark somewhere nested inside, either
5902 stub out the statement or propagate the error back out. */
5903 if (ret == GS_ERROR)
5904 {
5905 if (is_statement)
5906 *expr_p = NULL;
5907 goto out;
5908 }
5909
5910 /* This was only valid as a return value from the langhook, which
5911 we handled. Make sure it doesn't escape from any other context. */
5912 gcc_assert (ret != GS_UNHANDLED);
5913
5914 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
5915 {
5916 /* We aren't looking for a value, and we don't have a valid
5917 statement. If it doesn't have side-effects, throw it away. */
5918 if (!TREE_SIDE_EFFECTS (*expr_p))
5919 *expr_p = NULL;
5920 else if (!TREE_THIS_VOLATILE (*expr_p))
5921 {
5922 /* This is probably a _REF that contains something nested that
5923 has side effects. Recurse through the operands to find it. */
5924 enum tree_code code = TREE_CODE (*expr_p);
5925
5926 switch (code)
5927 {
5928 case COMPONENT_REF:
5929 case REALPART_EXPR:
5930 case IMAGPART_EXPR:
5931 case VIEW_CONVERT_EXPR:
5932 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5933 gimple_test_f, fallback);
5934 break;
5935
5936 case ARRAY_REF:
5937 case ARRAY_RANGE_REF:
5938 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5939 gimple_test_f, fallback);
5940 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
5941 gimple_test_f, fallback);
5942 break;
5943
5944 default:
5945 /* Anything else with side-effects must be converted to
5946 a valid statement before we get here. */
5947 gcc_unreachable ();
5948 }
5949
5950 *expr_p = NULL;
5951 }
5952 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
5953 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
5954 {
5955 /* Historically, the compiler has treated a bare reference
5956 to a non-BLKmode volatile lvalue as forcing a load. */
5957 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
5958 /* Normally, we do not want to create a temporary for a
5959 TREE_ADDRESSABLE type because such a type should not be
5960 copied by bitwise-assignment. However, we make an
5961 exception here, as all we are doing here is ensuring that
5962 we read the bytes that make up the type. We use
5963 create_tmp_var_raw because create_tmp_var will abort when
5964 given a TREE_ADDRESSABLE type. */
5965 tree tmp = create_tmp_var_raw (type, "vol");
5966 gimple_add_tmp_var (tmp);
5967 *expr_p = build2 (MODIFY_EXPR, type, tmp, *expr_p);
5968 }
5969 else
5970 /* We can't do anything useful with a volatile reference to
5971 an incomplete type, so just throw it away. Likewise for
5972 a BLKmode type, since any implicit inner load should
5973 already have been turned into an explicit one by the
5974 gimplification process. */
5975 *expr_p = NULL;
5976 }
5977
5978 /* If we are gimplifying at the statement level, we're done. Tack
5979 everything together and replace the original statement with the
5980 gimplified form. */
5981 if (fallback == fb_none || is_statement)
5982 {
5983 if (internal_pre || internal_post)
5984 {
5985 append_to_statement_list (*expr_p, &internal_pre);
5986 append_to_statement_list (internal_post, &internal_pre);
5987 annotate_all_with_locus (&internal_pre, input_location);
5988 *expr_p = internal_pre;
5989 }
5990 else if (!*expr_p)
5991 ;
5992 else if (TREE_CODE (*expr_p) == STATEMENT_LIST)
5993 annotate_all_with_locus (expr_p, input_location);
5994 else
5995 annotate_one_with_locus (*expr_p, input_location);
5996 goto out;
5997 }
5998
5999 /* Otherwise we're gimplifying a subexpression, so the resulting value is
6000 interesting. */
6001
6002 /* If it's sufficiently simple already, we're done. Unless we are
6003 handling some post-effects internally; if that's the case, we need to
6004 copy into a temp before adding the post-effects to the tree. */
6005 if (!internal_post && (*gimple_test_f) (*expr_p))
6006 goto out;
6007
6008 /* Otherwise, we need to create a new temporary for the gimplified
6009 expression. */
6010
6011 /* We can't return an lvalue if we have an internal postqueue. The
6012 object the lvalue refers to would (probably) be modified by the
6013 postqueue; we need to copy the value out first, which means an
6014 rvalue. */
6015 if ((fallback & fb_lvalue) && !internal_post
6016 && is_gimple_addressable (*expr_p))
6017 {
6018 /* An lvalue will do. Take the address of the expression, store it
6019 in a temporary, and replace the expression with an INDIRECT_REF of
6020 that temporary. */
6021 tmp = build_fold_addr_expr (*expr_p);
6022 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
6023 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (tmp)), tmp);
6024 }
6025 else if ((fallback & fb_rvalue) && is_gimple_formal_tmp_rhs (*expr_p))
6026 {
6027 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
6028
6029 /* An rvalue will do. Assign the gimplified expression into a new
6030 temporary TMP and replace the original expression with TMP. */
6031
6032 if (internal_post || (fallback & fb_lvalue))
6033 /* The postqueue might change the value of the expression between
6034 the initialization and use of the temporary, so we can't use a
6035 formal temp. FIXME do we care? */
6036 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p);
6037 else
6038 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
6039
6040 if (TREE_CODE (*expr_p) != SSA_NAME)
6041 DECL_GIMPLE_FORMAL_TEMP_P (*expr_p) = 1;
6042 }
6043 else
6044 {
6045 #ifdef ENABLE_CHECKING
6046 if (!(fallback & fb_mayfail))
6047 {
6048 fprintf (stderr, "gimplification failed:\n");
6049 print_generic_expr (stderr, *expr_p, 0);
6050 debug_tree (*expr_p);
6051 internal_error ("gimplification failed");
6052 }
6053 #endif
6054 gcc_assert (fallback & fb_mayfail);
6055 /* If this is an asm statement, and the user asked for the
6056 impossible, don't die. Fail and let gimplify_asm_expr
6057 issue an error. */
6058 ret = GS_ERROR;
6059 goto out;
6060 }
6061
6062 /* Make sure the temporary matches our predicate. */
6063 gcc_assert ((*gimple_test_f) (*expr_p));
6064
6065 if (internal_post)
6066 {
6067 annotate_all_with_locus (&internal_post, input_location);
6068 append_to_statement_list (internal_post, pre_p);
6069 }
6070
6071 out:
6072 input_location = saved_location;
6073 return ret;
6074 }
6075
6076 /* Look through TYPE for variable-sized objects and gimplify each such
6077 size that we find. Add to LIST_P any statements generated. */
6078
6079 void
gimplify_type_sizes(tree type,tree * list_p)6080 gimplify_type_sizes (tree type, tree *list_p)
6081 {
6082 tree field, t;
6083
6084 if (type == NULL || type == error_mark_node)
6085 return;
6086
6087 /* We first do the main variant, then copy into any other variants. */
6088 type = TYPE_MAIN_VARIANT (type);
6089
6090 /* Avoid infinite recursion. */
6091 if (TYPE_SIZES_GIMPLIFIED (type))
6092 return;
6093
6094 TYPE_SIZES_GIMPLIFIED (type) = 1;
6095
6096 switch (TREE_CODE (type))
6097 {
6098 case INTEGER_TYPE:
6099 case ENUMERAL_TYPE:
6100 case BOOLEAN_TYPE:
6101 case REAL_TYPE:
6102 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
6103 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
6104
6105 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6106 {
6107 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
6108 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
6109 }
6110 break;
6111
6112 case ARRAY_TYPE:
6113 /* These types may not have declarations, so handle them here. */
6114 gimplify_type_sizes (TREE_TYPE (type), list_p);
6115 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
6116 break;
6117
6118 case RECORD_TYPE:
6119 case UNION_TYPE:
6120 case QUAL_UNION_TYPE:
6121 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
6122 if (TREE_CODE (field) == FIELD_DECL)
6123 {
6124 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
6125 gimplify_type_sizes (TREE_TYPE (field), list_p);
6126 }
6127 break;
6128
6129 case POINTER_TYPE:
6130 case REFERENCE_TYPE:
6131 /* We used to recurse on the pointed-to type here, which turned out to
6132 be incorrect because its definition might refer to variables not
6133 yet initialized at this point if a forward declaration is involved.
6134
6135 It was actually useful for anonymous pointed-to types to ensure
6136 that the sizes evaluation dominates every possible later use of the
6137 values. Restricting to such types here would be safe since there
6138 is no possible forward declaration around, but would introduce an
6139 undesirable middle-end semantic to anonymity. We then defer to
6140 front-ends the responsibility of ensuring that the sizes are
6141 evaluated both early and late enough, e.g. by attaching artificial
6142 type declarations to the tree. */
6143 break;
6144
6145 default:
6146 break;
6147 }
6148
6149 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
6150 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
6151
6152 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
6153 {
6154 TYPE_SIZE (t) = TYPE_SIZE (type);
6155 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
6156 TYPE_SIZES_GIMPLIFIED (t) = 1;
6157 }
6158 }
6159
6160 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
6161 a size or position, has had all of its SAVE_EXPRs evaluated.
6162 We add any required statements to STMT_P. */
6163
6164 void
gimplify_one_sizepos(tree * expr_p,tree * stmt_p)6165 gimplify_one_sizepos (tree *expr_p, tree *stmt_p)
6166 {
6167 tree type, expr = *expr_p;
6168
6169 /* We don't do anything if the value isn't there, is constant, or contains
6170 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
6171 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
6172 will want to replace it with a new variable, but that will cause problems
6173 if this type is from outside the function. It's OK to have that here. */
6174 if (expr == NULL_TREE || TREE_CONSTANT (expr)
6175 || TREE_CODE (expr) == VAR_DECL
6176 || CONTAINS_PLACEHOLDER_P (expr))
6177 return;
6178
6179 type = TREE_TYPE (expr);
6180 *expr_p = unshare_expr (expr);
6181
6182 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue);
6183 expr = *expr_p;
6184
6185 /* Verify that we've an exact type match with the original expression.
6186 In particular, we do not wish to drop a "sizetype" in favour of a
6187 type of similar dimensions. We don't want to pollute the generic
6188 type-stripping code with this knowledge because it doesn't matter
6189 for the bulk of GENERIC/GIMPLE. It only matters that TYPE_SIZE_UNIT
6190 and friends retain their "sizetype-ness". */
6191 if (TREE_TYPE (expr) != type
6192 && TREE_CODE (type) == INTEGER_TYPE
6193 && TYPE_IS_SIZETYPE (type))
6194 {
6195 tree tmp;
6196
6197 *expr_p = create_tmp_var (type, NULL);
6198 tmp = build1 (NOP_EXPR, type, expr);
6199 tmp = build2 (MODIFY_EXPR, type, *expr_p, tmp);
6200 if (EXPR_HAS_LOCATION (expr))
6201 SET_EXPR_LOCUS (tmp, EXPR_LOCUS (expr));
6202 else
6203 SET_EXPR_LOCATION (tmp, input_location);
6204
6205 gimplify_and_add (tmp, stmt_p);
6206 }
6207 }
6208
6209 #ifdef ENABLE_CHECKING
6210 /* Compare types A and B for a "close enough" match. */
6211
6212 static bool
cpt_same_type(tree a,tree b)6213 cpt_same_type (tree a, tree b)
6214 {
6215 if (lang_hooks.types_compatible_p (a, b))
6216 return true;
6217
6218 /* ??? The C++ FE decomposes METHOD_TYPES to FUNCTION_TYPES and doesn't
6219 link them together. This routine is intended to catch type errors
6220 that will affect the optimizers, and the optimizers don't add new
6221 dereferences of function pointers, so ignore it. */
6222 if ((TREE_CODE (a) == FUNCTION_TYPE || TREE_CODE (a) == METHOD_TYPE)
6223 && (TREE_CODE (b) == FUNCTION_TYPE || TREE_CODE (b) == METHOD_TYPE))
6224 return true;
6225
6226 /* ??? The C FE pushes type qualifiers after the fact into the type of
6227 the element from the type of the array. See build_unary_op's handling
6228 of ADDR_EXPR. This seems wrong -- if we were going to do this, we
6229 should have done it when creating the variable in the first place.
6230 Alternately, why aren't the two array types made variants? */
6231 if (TREE_CODE (a) == ARRAY_TYPE && TREE_CODE (b) == ARRAY_TYPE)
6232 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6233
6234 /* And because of those, we have to recurse down through pointers. */
6235 if (POINTER_TYPE_P (a) && POINTER_TYPE_P (b))
6236 return cpt_same_type (TREE_TYPE (a), TREE_TYPE (b));
6237
6238 return false;
6239 }
6240
6241 /* Check for some cases of the front end missing cast expressions.
6242 The type of a dereference should correspond to the pointer type;
6243 similarly the type of an address should match its object. */
6244
6245 static tree
check_pointer_types_r(tree * tp,int * walk_subtrees ATTRIBUTE_UNUSED,void * data ATTRIBUTE_UNUSED)6246 check_pointer_types_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
6247 void *data ATTRIBUTE_UNUSED)
6248 {
6249 tree t = *tp;
6250 tree ptype, otype, dtype;
6251
6252 switch (TREE_CODE (t))
6253 {
6254 case INDIRECT_REF:
6255 case ARRAY_REF:
6256 otype = TREE_TYPE (t);
6257 ptype = TREE_TYPE (TREE_OPERAND (t, 0));
6258 dtype = TREE_TYPE (ptype);
6259 gcc_assert (cpt_same_type (otype, dtype));
6260 break;
6261
6262 case ADDR_EXPR:
6263 ptype = TREE_TYPE (t);
6264 otype = TREE_TYPE (TREE_OPERAND (t, 0));
6265 dtype = TREE_TYPE (ptype);
6266 if (!cpt_same_type (otype, dtype))
6267 {
6268 /* &array is allowed to produce a pointer to the element, rather than
6269 a pointer to the array type. We must allow this in order to
6270 properly represent assigning the address of an array in C into
6271 pointer to the element type. */
6272 gcc_assert (TREE_CODE (otype) == ARRAY_TYPE
6273 && POINTER_TYPE_P (ptype)
6274 && cpt_same_type (TREE_TYPE (otype), dtype));
6275 break;
6276 }
6277 break;
6278
6279 default:
6280 return NULL_TREE;
6281 }
6282
6283
6284 return NULL_TREE;
6285 }
6286 #endif
6287
6288 /* Gimplify the body of statements pointed to by BODY_P. FNDECL is the
6289 function decl containing BODY. */
6290
6291 void
gimplify_body(tree * body_p,tree fndecl,bool do_parms)6292 gimplify_body (tree *body_p, tree fndecl, bool do_parms)
6293 {
6294 location_t saved_location = input_location;
6295 tree body, parm_stmts;
6296
6297 timevar_push (TV_TREE_GIMPLIFY);
6298
6299 gcc_assert (gimplify_ctxp == NULL);
6300 push_gimplify_context ();
6301
6302 /* Unshare most shared trees in the body and in that of any nested functions.
6303 It would seem we don't have to do this for nested functions because
6304 they are supposed to be output and then the outer function gimplified
6305 first, but the g++ front end doesn't always do it that way. */
6306 unshare_body (body_p, fndecl);
6307 unvisit_body (body_p, fndecl);
6308
6309 /* Make sure input_location isn't set to something wierd. */
6310 input_location = DECL_SOURCE_LOCATION (fndecl);
6311
6312 /* Resolve callee-copies. This has to be done before processing
6313 the body so that DECL_VALUE_EXPR gets processed correctly. */
6314 parm_stmts = do_parms ? gimplify_parameters () : NULL;
6315
6316 /* Gimplify the function's body. */
6317 gimplify_stmt (body_p);
6318 body = *body_p;
6319
6320 if (!body)
6321 body = alloc_stmt_list ();
6322 else if (TREE_CODE (body) == STATEMENT_LIST)
6323 {
6324 tree t = expr_only (*body_p);
6325 if (t)
6326 body = t;
6327 }
6328
6329 /* If there isn't an outer BIND_EXPR, add one. */
6330 if (TREE_CODE (body) != BIND_EXPR)
6331 {
6332 tree b = build3 (BIND_EXPR, void_type_node, NULL_TREE,
6333 NULL_TREE, NULL_TREE);
6334 TREE_SIDE_EFFECTS (b) = 1;
6335 append_to_statement_list_force (body, &BIND_EXPR_BODY (b));
6336 body = b;
6337 }
6338
6339 /* If we had callee-copies statements, insert them at the beginning
6340 of the function. */
6341 if (parm_stmts)
6342 {
6343 append_to_statement_list_force (BIND_EXPR_BODY (body), &parm_stmts);
6344 BIND_EXPR_BODY (body) = parm_stmts;
6345 }
6346
6347 /* Unshare again, in case gimplification was sloppy. */
6348 unshare_all_trees (body);
6349
6350 *body_p = body;
6351
6352 pop_gimplify_context (body);
6353 gcc_assert (gimplify_ctxp == NULL);
6354
6355 #ifdef ENABLE_CHECKING
6356 walk_tree (body_p, check_pointer_types_r, NULL, NULL);
6357 #endif
6358
6359 timevar_pop (TV_TREE_GIMPLIFY);
6360 input_location = saved_location;
6361 }
6362
6363 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
6364 node for the function we want to gimplify. */
6365
6366 void
gimplify_function_tree(tree fndecl)6367 gimplify_function_tree (tree fndecl)
6368 {
6369 tree oldfn, parm, ret;
6370
6371 oldfn = current_function_decl;
6372 current_function_decl = fndecl;
6373 cfun = DECL_STRUCT_FUNCTION (fndecl);
6374 if (cfun == NULL)
6375 allocate_struct_function (fndecl);
6376
6377 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = TREE_CHAIN (parm))
6378 {
6379 /* Preliminarily mark non-addressed complex variables as eligible
6380 for promotion to gimple registers. We'll transform their uses
6381 as we find them. */
6382 if (TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
6383 && !TREE_THIS_VOLATILE (parm)
6384 && !needs_to_live_in_memory (parm))
6385 DECL_COMPLEX_GIMPLE_REG_P (parm) = 1;
6386 }
6387
6388 ret = DECL_RESULT (fndecl);
6389 if (TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
6390 && !needs_to_live_in_memory (ret))
6391 DECL_COMPLEX_GIMPLE_REG_P (ret) = 1;
6392
6393 gimplify_body (&DECL_SAVED_TREE (fndecl), fndecl, true);
6394
6395 /* If we're instrumenting function entry/exit, then prepend the call to
6396 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
6397 catch the exit hook. */
6398 /* ??? Add some way to ignore exceptions for this TFE. */
6399 if (flag_instrument_function_entry_exit
6400 && ! DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl))
6401 {
6402 tree tf, x, bind;
6403
6404 tf = build2 (TRY_FINALLY_EXPR, void_type_node, NULL, NULL);
6405 TREE_SIDE_EFFECTS (tf) = 1;
6406 x = DECL_SAVED_TREE (fndecl);
6407 append_to_statement_list (x, &TREE_OPERAND (tf, 0));
6408 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_EXIT];
6409 x = build_function_call_expr (x, NULL);
6410 append_to_statement_list (x, &TREE_OPERAND (tf, 1));
6411
6412 bind = build3 (BIND_EXPR, void_type_node, NULL, NULL, NULL);
6413 TREE_SIDE_EFFECTS (bind) = 1;
6414 x = implicit_built_in_decls[BUILT_IN_PROFILE_FUNC_ENTER];
6415 x = build_function_call_expr (x, NULL);
6416 append_to_statement_list (x, &BIND_EXPR_BODY (bind));
6417 append_to_statement_list (tf, &BIND_EXPR_BODY (bind));
6418
6419 DECL_SAVED_TREE (fndecl) = bind;
6420 }
6421
6422 current_function_decl = oldfn;
6423 cfun = oldfn ? DECL_STRUCT_FUNCTION (oldfn) : NULL;
6424 }
6425
6426
6427 /* Expands EXPR to list of gimple statements STMTS. If SIMPLE is true,
6428 force the result to be either ssa_name or an invariant, otherwise
6429 just force it to be a rhs expression. If VAR is not NULL, make the
6430 base variable of the final destination be VAR if suitable. */
6431
6432 tree
force_gimple_operand(tree expr,tree * stmts,bool simple,tree var)6433 force_gimple_operand (tree expr, tree *stmts, bool simple, tree var)
6434 {
6435 tree t;
6436 enum gimplify_status ret;
6437 gimple_predicate gimple_test_f;
6438
6439 *stmts = NULL_TREE;
6440
6441 if (is_gimple_val (expr))
6442 return expr;
6443
6444 gimple_test_f = simple ? is_gimple_val : is_gimple_reg_rhs;
6445
6446 push_gimplify_context ();
6447 gimplify_ctxp->into_ssa = in_ssa_p;
6448
6449 if (var)
6450 expr = build2 (MODIFY_EXPR, TREE_TYPE (var), var, expr);
6451
6452 ret = gimplify_expr (&expr, stmts, NULL,
6453 gimple_test_f, fb_rvalue);
6454 gcc_assert (ret != GS_ERROR);
6455
6456 if (referenced_vars)
6457 {
6458 for (t = gimplify_ctxp->temps; t ; t = TREE_CHAIN (t))
6459 add_referenced_var (t);
6460 }
6461
6462 pop_gimplify_context (NULL);
6463
6464 return expr;
6465 }
6466
6467 /* Invokes force_gimple_operand for EXPR with parameters SIMPLE_P and VAR. If
6468 some statements are produced, emits them before BSI. */
6469
6470 tree
force_gimple_operand_bsi(block_stmt_iterator * bsi,tree expr,bool simple_p,tree var)6471 force_gimple_operand_bsi (block_stmt_iterator *bsi, tree expr,
6472 bool simple_p, tree var)
6473 {
6474 tree stmts;
6475
6476 expr = force_gimple_operand (expr, &stmts, simple_p, var);
6477 if (stmts)
6478 bsi_insert_before (bsi, stmts, BSI_SAME_STMT);
6479
6480 return expr;
6481 }
6482
6483 #include "gt-gimplify.h"
6484