1 /* Tree lowering pass. This pass converts the GENERIC functions-as-trees
2 tree representation into the GIMPLE form.
3 Copyright (C) 2002-2018 Free Software Foundation, Inc.
4 Major work done by Sebastian Pop <s.pop@laposte.net>,
5 Diego Novillo <dnovillo@redhat.com> and Jason Merrill <jason@redhat.com>.
6
7 This file is part of GCC.
8
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
13
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
18
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "memmodel.h"
31 #include "tm_p.h"
32 #include "gimple.h"
33 #include "gimple-predict.h"
34 #include "tree-pass.h" /* FIXME: only for PROP_gimple_any */
35 #include "ssa.h"
36 #include "cgraph.h"
37 #include "tree-pretty-print.h"
38 #include "diagnostic-core.h"
39 #include "alias.h"
40 #include "fold-const.h"
41 #include "calls.h"
42 #include "varasm.h"
43 #include "stmt.h"
44 #include "expr.h"
45 #include "gimple-fold.h"
46 #include "tree-eh.h"
47 #include "gimplify.h"
48 #include "gimple-iterator.h"
49 #include "stor-layout.h"
50 #include "print-tree.h"
51 #include "tree-iterator.h"
52 #include "tree-inline.h"
53 #include "langhooks.h"
54 #include "tree-cfg.h"
55 #include "tree-ssa.h"
56 #include "omp-general.h"
57 #include "omp-low.h"
58 #include "gimple-low.h"
59 #include "gomp-constants.h"
60 #include "splay-tree.h"
61 #include "gimple-walk.h"
62 #include "langhooks-def.h" /* FIXME: for lhd_set_decl_assembler_name */
63 #include "builtins.h"
64 #include "stringpool.h"
65 #include "attribs.h"
66 #include "asan.h"
67 #include "dbgcnt.h"
68
69 /* Hash set of poisoned variables in a bind expr. */
70 static hash_set<tree> *asan_poisoned_variables = NULL;
71
72 enum gimplify_omp_var_data
73 {
74 GOVD_SEEN = 1,
75 GOVD_EXPLICIT = 2,
76 GOVD_SHARED = 4,
77 GOVD_PRIVATE = 8,
78 GOVD_FIRSTPRIVATE = 16,
79 GOVD_LASTPRIVATE = 32,
80 GOVD_REDUCTION = 64,
81 GOVD_LOCAL = 128,
82 GOVD_MAP = 256,
83 GOVD_DEBUG_PRIVATE = 512,
84 GOVD_PRIVATE_OUTER_REF = 1024,
85 GOVD_LINEAR = 2048,
86 GOVD_ALIGNED = 4096,
87
88 /* Flag for GOVD_MAP: don't copy back. */
89 GOVD_MAP_TO_ONLY = 8192,
90
91 /* Flag for GOVD_LINEAR or GOVD_LASTPRIVATE: no outer reference. */
92 GOVD_LINEAR_LASTPRIVATE_NO_OUTER = 16384,
93
94 GOVD_MAP_0LEN_ARRAY = 32768,
95
96 /* Flag for GOVD_MAP, if it is always, to or always, tofrom mapping. */
97 GOVD_MAP_ALWAYS_TO = 65536,
98
99 /* Flag for shared vars that are or might be stored to in the region. */
100 GOVD_WRITTEN = 131072,
101
102 /* Flag for GOVD_MAP, if it is a forced mapping. */
103 GOVD_MAP_FORCE = 262144,
104
105 /* Flag for GOVD_MAP: must be present already. */
106 GOVD_MAP_FORCE_PRESENT = 524288,
107
108 GOVD_DATA_SHARE_CLASS = (GOVD_SHARED | GOVD_PRIVATE | GOVD_FIRSTPRIVATE
109 | GOVD_LASTPRIVATE | GOVD_REDUCTION | GOVD_LINEAR
110 | GOVD_LOCAL)
111 };
112
113
114 enum omp_region_type
115 {
116 ORT_WORKSHARE = 0x00,
117 ORT_SIMD = 0x01,
118
119 ORT_PARALLEL = 0x02,
120 ORT_COMBINED_PARALLEL = 0x03,
121
122 ORT_TASK = 0x04,
123 ORT_UNTIED_TASK = 0x05,
124
125 ORT_TEAMS = 0x08,
126 ORT_COMBINED_TEAMS = 0x09,
127
128 /* Data region. */
129 ORT_TARGET_DATA = 0x10,
130
131 /* Data region with offloading. */
132 ORT_TARGET = 0x20,
133 ORT_COMBINED_TARGET = 0x21,
134
135 /* OpenACC variants. */
136 ORT_ACC = 0x40, /* A generic OpenACC region. */
137 ORT_ACC_DATA = ORT_ACC | ORT_TARGET_DATA, /* Data construct. */
138 ORT_ACC_PARALLEL = ORT_ACC | ORT_TARGET, /* Parallel construct */
139 ORT_ACC_KERNELS = ORT_ACC | ORT_TARGET | 0x80, /* Kernels construct. */
140 ORT_ACC_HOST_DATA = ORT_ACC | ORT_TARGET_DATA | 0x80, /* Host data. */
141
142 /* Dummy OpenMP region, used to disable expansion of
143 DECL_VALUE_EXPRs in taskloop pre body. */
144 ORT_NONE = 0x100
145 };
146
147 /* Gimplify hashtable helper. */
148
149 struct gimplify_hasher : free_ptr_hash <elt_t>
150 {
151 static inline hashval_t hash (const elt_t *);
152 static inline bool equal (const elt_t *, const elt_t *);
153 };
154
155 struct gimplify_ctx
156 {
157 struct gimplify_ctx *prev_context;
158
159 vec<gbind *> bind_expr_stack;
160 tree temps;
161 gimple_seq conditional_cleanups;
162 tree exit_label;
163 tree return_temp;
164
165 vec<tree> case_labels;
166 hash_set<tree> *live_switch_vars;
167 /* The formal temporary table. Should this be persistent? */
168 hash_table<gimplify_hasher> *temp_htab;
169
170 int conditions;
171 unsigned into_ssa : 1;
172 unsigned allow_rhs_cond_expr : 1;
173 unsigned in_cleanup_point_expr : 1;
174 unsigned keep_stack : 1;
175 unsigned save_stack : 1;
176 unsigned in_switch_expr : 1;
177 };
178
179 struct gimplify_omp_ctx
180 {
181 struct gimplify_omp_ctx *outer_context;
182 splay_tree variables;
183 hash_set<tree> *privatized_types;
184 /* Iteration variables in an OMP_FOR. */
185 vec<tree> loop_iter_var;
186 location_t location;
187 enum omp_clause_default_kind default_kind;
188 enum omp_region_type region_type;
189 bool combined_loop;
190 bool distribute;
191 bool target_map_scalars_firstprivate;
192 bool target_map_pointers_as_0len_arrays;
193 bool target_firstprivatize_array_bases;
194 bool add_safelen1;
195 };
196
197 static struct gimplify_ctx *gimplify_ctxp;
198 static struct gimplify_omp_ctx *gimplify_omp_ctxp;
199
200 /* Forward declaration. */
201 static enum gimplify_status gimplify_compound_expr (tree *, gimple_seq *, bool);
202 static hash_map<tree, tree> *oacc_declare_returns;
203 static enum gimplify_status gimplify_expr (tree *, gimple_seq *, gimple_seq *,
204 bool (*) (tree), fallback_t, bool);
205
206 /* Shorter alias name for the above function for use in gimplify.c
207 only. */
208
209 static inline void
gimplify_seq_add_stmt(gimple_seq * seq_p,gimple * gs)210 gimplify_seq_add_stmt (gimple_seq *seq_p, gimple *gs)
211 {
212 gimple_seq_add_stmt_without_update (seq_p, gs);
213 }
214
215 /* Append sequence SRC to the end of sequence *DST_P. If *DST_P is
216 NULL, a new sequence is allocated. This function is
217 similar to gimple_seq_add_seq, but does not scan the operands.
218 During gimplification, we need to manipulate statement sequences
219 before the def/use vectors have been constructed. */
220
221 static void
gimplify_seq_add_seq(gimple_seq * dst_p,gimple_seq src)222 gimplify_seq_add_seq (gimple_seq *dst_p, gimple_seq src)
223 {
224 gimple_stmt_iterator si;
225
226 if (src == NULL)
227 return;
228
229 si = gsi_last (*dst_p);
230 gsi_insert_seq_after_without_update (&si, src, GSI_NEW_STMT);
231 }
232
233
234 /* Pointer to a list of allocated gimplify_ctx structs to be used for pushing
235 and popping gimplify contexts. */
236
237 static struct gimplify_ctx *ctx_pool = NULL;
238
239 /* Return a gimplify context struct from the pool. */
240
241 static inline struct gimplify_ctx *
ctx_alloc(void)242 ctx_alloc (void)
243 {
244 struct gimplify_ctx * c = ctx_pool;
245
246 if (c)
247 ctx_pool = c->prev_context;
248 else
249 c = XNEW (struct gimplify_ctx);
250
251 memset (c, '\0', sizeof (*c));
252 return c;
253 }
254
255 /* Put gimplify context C back into the pool. */
256
257 static inline void
ctx_free(struct gimplify_ctx * c)258 ctx_free (struct gimplify_ctx *c)
259 {
260 c->prev_context = ctx_pool;
261 ctx_pool = c;
262 }
263
264 /* Free allocated ctx stack memory. */
265
266 void
free_gimplify_stack(void)267 free_gimplify_stack (void)
268 {
269 struct gimplify_ctx *c;
270
271 while ((c = ctx_pool))
272 {
273 ctx_pool = c->prev_context;
274 free (c);
275 }
276 }
277
278
279 /* Set up a context for the gimplifier. */
280
281 void
push_gimplify_context(bool in_ssa,bool rhs_cond_ok)282 push_gimplify_context (bool in_ssa, bool rhs_cond_ok)
283 {
284 struct gimplify_ctx *c = ctx_alloc ();
285
286 c->prev_context = gimplify_ctxp;
287 gimplify_ctxp = c;
288 gimplify_ctxp->into_ssa = in_ssa;
289 gimplify_ctxp->allow_rhs_cond_expr = rhs_cond_ok;
290 }
291
292 /* Tear down a context for the gimplifier. If BODY is non-null, then
293 put the temporaries into the outer BIND_EXPR. Otherwise, put them
294 in the local_decls.
295
296 BODY is not a sequence, but the first tuple in a sequence. */
297
298 void
pop_gimplify_context(gimple * body)299 pop_gimplify_context (gimple *body)
300 {
301 struct gimplify_ctx *c = gimplify_ctxp;
302
303 gcc_assert (c
304 && (!c->bind_expr_stack.exists ()
305 || c->bind_expr_stack.is_empty ()));
306 c->bind_expr_stack.release ();
307 gimplify_ctxp = c->prev_context;
308
309 if (body)
310 declare_vars (c->temps, body, false);
311 else
312 record_vars (c->temps);
313
314 delete c->temp_htab;
315 c->temp_htab = NULL;
316 ctx_free (c);
317 }
318
319 /* Push a GIMPLE_BIND tuple onto the stack of bindings. */
320
321 static void
gimple_push_bind_expr(gbind * bind_stmt)322 gimple_push_bind_expr (gbind *bind_stmt)
323 {
324 gimplify_ctxp->bind_expr_stack.reserve (8);
325 gimplify_ctxp->bind_expr_stack.safe_push (bind_stmt);
326 }
327
328 /* Pop the first element off the stack of bindings. */
329
330 static void
gimple_pop_bind_expr(void)331 gimple_pop_bind_expr (void)
332 {
333 gimplify_ctxp->bind_expr_stack.pop ();
334 }
335
336 /* Return the first element of the stack of bindings. */
337
338 gbind *
gimple_current_bind_expr(void)339 gimple_current_bind_expr (void)
340 {
341 return gimplify_ctxp->bind_expr_stack.last ();
342 }
343
344 /* Return the stack of bindings created during gimplification. */
345
346 vec<gbind *>
gimple_bind_expr_stack(void)347 gimple_bind_expr_stack (void)
348 {
349 return gimplify_ctxp->bind_expr_stack;
350 }
351
352 /* Return true iff there is a COND_EXPR between us and the innermost
353 CLEANUP_POINT_EXPR. This info is used by gimple_push_cleanup. */
354
355 static bool
gimple_conditional_context(void)356 gimple_conditional_context (void)
357 {
358 return gimplify_ctxp->conditions > 0;
359 }
360
361 /* Note that we've entered a COND_EXPR. */
362
363 static void
gimple_push_condition(void)364 gimple_push_condition (void)
365 {
366 #ifdef ENABLE_GIMPLE_CHECKING
367 if (gimplify_ctxp->conditions == 0)
368 gcc_assert (gimple_seq_empty_p (gimplify_ctxp->conditional_cleanups));
369 #endif
370 ++(gimplify_ctxp->conditions);
371 }
372
373 /* Note that we've left a COND_EXPR. If we're back at unconditional scope
374 now, add any conditional cleanups we've seen to the prequeue. */
375
376 static void
gimple_pop_condition(gimple_seq * pre_p)377 gimple_pop_condition (gimple_seq *pre_p)
378 {
379 int conds = --(gimplify_ctxp->conditions);
380
381 gcc_assert (conds >= 0);
382 if (conds == 0)
383 {
384 gimplify_seq_add_seq (pre_p, gimplify_ctxp->conditional_cleanups);
385 gimplify_ctxp->conditional_cleanups = NULL;
386 }
387 }
388
389 /* A stable comparison routine for use with splay trees and DECLs. */
390
391 static int
splay_tree_compare_decl_uid(splay_tree_key xa,splay_tree_key xb)392 splay_tree_compare_decl_uid (splay_tree_key xa, splay_tree_key xb)
393 {
394 tree a = (tree) xa;
395 tree b = (tree) xb;
396
397 return DECL_UID (a) - DECL_UID (b);
398 }
399
400 /* Create a new omp construct that deals with variable remapping. */
401
402 static struct gimplify_omp_ctx *
new_omp_context(enum omp_region_type region_type)403 new_omp_context (enum omp_region_type region_type)
404 {
405 struct gimplify_omp_ctx *c;
406
407 c = XCNEW (struct gimplify_omp_ctx);
408 c->outer_context = gimplify_omp_ctxp;
409 c->variables = splay_tree_new (splay_tree_compare_decl_uid, 0, 0);
410 c->privatized_types = new hash_set<tree>;
411 c->location = input_location;
412 c->region_type = region_type;
413 if ((region_type & ORT_TASK) == 0)
414 c->default_kind = OMP_CLAUSE_DEFAULT_SHARED;
415 else
416 c->default_kind = OMP_CLAUSE_DEFAULT_UNSPECIFIED;
417
418 return c;
419 }
420
421 /* Destroy an omp construct that deals with variable remapping. */
422
423 static void
delete_omp_context(struct gimplify_omp_ctx * c)424 delete_omp_context (struct gimplify_omp_ctx *c)
425 {
426 splay_tree_delete (c->variables);
427 delete c->privatized_types;
428 c->loop_iter_var.release ();
429 XDELETE (c);
430 }
431
432 static void omp_add_variable (struct gimplify_omp_ctx *, tree, unsigned int);
433 static bool omp_notice_variable (struct gimplify_omp_ctx *, tree, bool);
434
435 /* Both gimplify the statement T and append it to *SEQ_P. This function
436 behaves exactly as gimplify_stmt, but you don't have to pass T as a
437 reference. */
438
439 void
gimplify_and_add(tree t,gimple_seq * seq_p)440 gimplify_and_add (tree t, gimple_seq *seq_p)
441 {
442 gimplify_stmt (&t, seq_p);
443 }
444
445 /* Gimplify statement T into sequence *SEQ_P, and return the first
446 tuple in the sequence of generated tuples for this statement.
447 Return NULL if gimplifying T produced no tuples. */
448
449 static gimple *
gimplify_and_return_first(tree t,gimple_seq * seq_p)450 gimplify_and_return_first (tree t, gimple_seq *seq_p)
451 {
452 gimple_stmt_iterator last = gsi_last (*seq_p);
453
454 gimplify_and_add (t, seq_p);
455
456 if (!gsi_end_p (last))
457 {
458 gsi_next (&last);
459 return gsi_stmt (last);
460 }
461 else
462 return gimple_seq_first_stmt (*seq_p);
463 }
464
465 /* Returns true iff T is a valid RHS for an assignment to an un-renamed
466 LHS, or for a call argument. */
467
468 static bool
is_gimple_mem_rhs(tree t)469 is_gimple_mem_rhs (tree t)
470 {
471 /* If we're dealing with a renamable type, either source or dest must be
472 a renamed variable. */
473 if (is_gimple_reg_type (TREE_TYPE (t)))
474 return is_gimple_val (t);
475 else
476 return is_gimple_val (t) || is_gimple_lvalue (t);
477 }
478
479 /* Return true if T is a CALL_EXPR or an expression that can be
480 assigned to a temporary. Note that this predicate should only be
481 used during gimplification. See the rationale for this in
482 gimplify_modify_expr. */
483
484 static bool
is_gimple_reg_rhs_or_call(tree t)485 is_gimple_reg_rhs_or_call (tree t)
486 {
487 return (get_gimple_rhs_class (TREE_CODE (t)) != GIMPLE_INVALID_RHS
488 || TREE_CODE (t) == CALL_EXPR);
489 }
490
491 /* Return true if T is a valid memory RHS or a CALL_EXPR. Note that
492 this predicate should only be used during gimplification. See the
493 rationale for this in gimplify_modify_expr. */
494
495 static bool
is_gimple_mem_rhs_or_call(tree t)496 is_gimple_mem_rhs_or_call (tree t)
497 {
498 /* If we're dealing with a renamable type, either source or dest must be
499 a renamed variable. */
500 if (is_gimple_reg_type (TREE_TYPE (t)))
501 return is_gimple_val (t);
502 else
503 return (is_gimple_val (t)
504 || is_gimple_lvalue (t)
505 || TREE_CLOBBER_P (t)
506 || TREE_CODE (t) == CALL_EXPR);
507 }
508
509 /* Create a temporary with a name derived from VAL. Subroutine of
510 lookup_tmp_var; nobody else should call this function. */
511
512 static inline tree
create_tmp_from_val(tree val)513 create_tmp_from_val (tree val)
514 {
515 /* Drop all qualifiers and address-space information from the value type. */
516 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (val));
517 tree var = create_tmp_var (type, get_name (val));
518 if (TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE
519 || TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE)
520 DECL_GIMPLE_REG_P (var) = 1;
521 return var;
522 }
523
524 /* Create a temporary to hold the value of VAL. If IS_FORMAL, try to reuse
525 an existing expression temporary. */
526
527 static tree
lookup_tmp_var(tree val,bool is_formal)528 lookup_tmp_var (tree val, bool is_formal)
529 {
530 tree ret;
531
532 /* If not optimizing, never really reuse a temporary. local-alloc
533 won't allocate any variable that is used in more than one basic
534 block, which means it will go into memory, causing much extra
535 work in reload and final and poorer code generation, outweighing
536 the extra memory allocation here. */
537 if (!optimize || !is_formal || TREE_SIDE_EFFECTS (val))
538 ret = create_tmp_from_val (val);
539 else
540 {
541 elt_t elt, *elt_p;
542 elt_t **slot;
543
544 elt.val = val;
545 if (!gimplify_ctxp->temp_htab)
546 gimplify_ctxp->temp_htab = new hash_table<gimplify_hasher> (1000);
547 slot = gimplify_ctxp->temp_htab->find_slot (&elt, INSERT);
548 if (*slot == NULL)
549 {
550 elt_p = XNEW (elt_t);
551 elt_p->val = val;
552 elt_p->temp = ret = create_tmp_from_val (val);
553 *slot = elt_p;
554 }
555 else
556 {
557 elt_p = *slot;
558 ret = elt_p->temp;
559 }
560 }
561
562 return ret;
563 }
564
565 /* Helper for get_formal_tmp_var and get_initialized_tmp_var. */
566
567 static tree
internal_get_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool is_formal,bool allow_ssa)568 internal_get_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
569 bool is_formal, bool allow_ssa)
570 {
571 tree t, mod;
572
573 /* Notice that we explicitly allow VAL to be a CALL_EXPR so that we
574 can create an INIT_EXPR and convert it into a GIMPLE_CALL below. */
575 gimplify_expr (&val, pre_p, post_p, is_gimple_reg_rhs_or_call,
576 fb_rvalue);
577
578 if (allow_ssa
579 && gimplify_ctxp->into_ssa
580 && is_gimple_reg_type (TREE_TYPE (val)))
581 {
582 t = make_ssa_name (TYPE_MAIN_VARIANT (TREE_TYPE (val)));
583 if (! gimple_in_ssa_p (cfun))
584 {
585 const char *name = get_name (val);
586 if (name)
587 SET_SSA_NAME_VAR_OR_IDENTIFIER (t, create_tmp_var_name (name));
588 }
589 }
590 else
591 t = lookup_tmp_var (val, is_formal);
592
593 mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
594
595 SET_EXPR_LOCATION (mod, EXPR_LOC_OR_LOC (val, input_location));
596
597 /* gimplify_modify_expr might want to reduce this further. */
598 gimplify_and_add (mod, pre_p);
599 ggc_free (mod);
600
601 return t;
602 }
603
604 /* Return a formal temporary variable initialized with VAL. PRE_P is as
605 in gimplify_expr. Only use this function if:
606
607 1) The value of the unfactored expression represented by VAL will not
608 change between the initialization and use of the temporary, and
609 2) The temporary will not be otherwise modified.
610
611 For instance, #1 means that this is inappropriate for SAVE_EXPR temps,
612 and #2 means it is inappropriate for && temps.
613
614 For other cases, use get_initialized_tmp_var instead. */
615
616 tree
get_formal_tmp_var(tree val,gimple_seq * pre_p)617 get_formal_tmp_var (tree val, gimple_seq *pre_p)
618 {
619 return internal_get_tmp_var (val, pre_p, NULL, true, true);
620 }
621
622 /* Return a temporary variable initialized with VAL. PRE_P and POST_P
623 are as in gimplify_expr. */
624
625 tree
get_initialized_tmp_var(tree val,gimple_seq * pre_p,gimple_seq * post_p,bool allow_ssa)626 get_initialized_tmp_var (tree val, gimple_seq *pre_p, gimple_seq *post_p,
627 bool allow_ssa)
628 {
629 return internal_get_tmp_var (val, pre_p, post_p, false, allow_ssa);
630 }
631
632 /* Declare all the variables in VARS in SCOPE. If DEBUG_INFO is true,
633 generate debug info for them; otherwise don't. */
634
635 void
declare_vars(tree vars,gimple * gs,bool debug_info)636 declare_vars (tree vars, gimple *gs, bool debug_info)
637 {
638 tree last = vars;
639 if (last)
640 {
641 tree temps, block;
642
643 gbind *scope = as_a <gbind *> (gs);
644
645 temps = nreverse (last);
646
647 block = gimple_bind_block (scope);
648 gcc_assert (!block || TREE_CODE (block) == BLOCK);
649 if (!block || !debug_info)
650 {
651 DECL_CHAIN (last) = gimple_bind_vars (scope);
652 gimple_bind_set_vars (scope, temps);
653 }
654 else
655 {
656 /* We need to attach the nodes both to the BIND_EXPR and to its
657 associated BLOCK for debugging purposes. The key point here
658 is that the BLOCK_VARS of the BIND_EXPR_BLOCK of a BIND_EXPR
659 is a subchain of the BIND_EXPR_VARS of the BIND_EXPR. */
660 if (BLOCK_VARS (block))
661 BLOCK_VARS (block) = chainon (BLOCK_VARS (block), temps);
662 else
663 {
664 gimple_bind_set_vars (scope,
665 chainon (gimple_bind_vars (scope), temps));
666 BLOCK_VARS (block) = temps;
667 }
668 }
669 }
670 }
671
672 /* For VAR a VAR_DECL of variable size, try to find a constant upper bound
673 for the size and adjust DECL_SIZE/DECL_SIZE_UNIT accordingly. Abort if
674 no such upper bound can be obtained. */
675
676 static void
force_constant_size(tree var)677 force_constant_size (tree var)
678 {
679 /* The only attempt we make is by querying the maximum size of objects
680 of the variable's type. */
681
682 HOST_WIDE_INT max_size;
683
684 gcc_assert (VAR_P (var));
685
686 max_size = max_int_size_in_bytes (TREE_TYPE (var));
687
688 gcc_assert (max_size >= 0);
689
690 DECL_SIZE_UNIT (var)
691 = build_int_cst (TREE_TYPE (DECL_SIZE_UNIT (var)), max_size);
692 DECL_SIZE (var)
693 = build_int_cst (TREE_TYPE (DECL_SIZE (var)), max_size * BITS_PER_UNIT);
694 }
695
696 /* Push the temporary variable TMP into the current binding. */
697
698 void
gimple_add_tmp_var_fn(struct function * fn,tree tmp)699 gimple_add_tmp_var_fn (struct function *fn, tree tmp)
700 {
701 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
702
703 /* Later processing assumes that the object size is constant, which might
704 not be true at this point. Force the use of a constant upper bound in
705 this case. */
706 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
707 force_constant_size (tmp);
708
709 DECL_CONTEXT (tmp) = fn->decl;
710 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
711
712 record_vars_into (tmp, fn->decl);
713 }
714
715 /* Push the temporary variable TMP into the current binding. */
716
717 void
gimple_add_tmp_var(tree tmp)718 gimple_add_tmp_var (tree tmp)
719 {
720 gcc_assert (!DECL_CHAIN (tmp) && !DECL_SEEN_IN_BIND_EXPR_P (tmp));
721
722 /* Later processing assumes that the object size is constant, which might
723 not be true at this point. Force the use of a constant upper bound in
724 this case. */
725 if (!tree_fits_poly_uint64_p (DECL_SIZE_UNIT (tmp)))
726 force_constant_size (tmp);
727
728 DECL_CONTEXT (tmp) = current_function_decl;
729 DECL_SEEN_IN_BIND_EXPR_P (tmp) = 1;
730
731 if (gimplify_ctxp)
732 {
733 DECL_CHAIN (tmp) = gimplify_ctxp->temps;
734 gimplify_ctxp->temps = tmp;
735
736 /* Mark temporaries local within the nearest enclosing parallel. */
737 if (gimplify_omp_ctxp)
738 {
739 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
740 while (ctx
741 && (ctx->region_type == ORT_WORKSHARE
742 || ctx->region_type == ORT_SIMD
743 || ctx->region_type == ORT_ACC))
744 ctx = ctx->outer_context;
745 if (ctx)
746 omp_add_variable (ctx, tmp, GOVD_LOCAL | GOVD_SEEN);
747 }
748 }
749 else if (cfun)
750 record_vars (tmp);
751 else
752 {
753 gimple_seq body_seq;
754
755 /* This case is for nested functions. We need to expose the locals
756 they create. */
757 body_seq = gimple_body (current_function_decl);
758 declare_vars (tmp, gimple_seq_first_stmt (body_seq), false);
759 }
760 }
761
762
763
764 /* This page contains routines to unshare tree nodes, i.e. to duplicate tree
765 nodes that are referenced more than once in GENERIC functions. This is
766 necessary because gimplification (translation into GIMPLE) is performed
767 by modifying tree nodes in-place, so gimplication of a shared node in a
768 first context could generate an invalid GIMPLE form in a second context.
769
770 This is achieved with a simple mark/copy/unmark algorithm that walks the
771 GENERIC representation top-down, marks nodes with TREE_VISITED the first
772 time it encounters them, duplicates them if they already have TREE_VISITED
773 set, and finally removes the TREE_VISITED marks it has set.
774
775 The algorithm works only at the function level, i.e. it generates a GENERIC
776 representation of a function with no nodes shared within the function when
777 passed a GENERIC function (except for nodes that are allowed to be shared).
778
779 At the global level, it is also necessary to unshare tree nodes that are
780 referenced in more than one function, for the same aforementioned reason.
781 This requires some cooperation from the front-end. There are 2 strategies:
782
783 1. Manual unsharing. The front-end needs to call unshare_expr on every
784 expression that might end up being shared across functions.
785
786 2. Deep unsharing. This is an extension of regular unsharing. Instead
787 of calling unshare_expr on expressions that might be shared across
788 functions, the front-end pre-marks them with TREE_VISITED. This will
789 ensure that they are unshared on the first reference within functions
790 when the regular unsharing algorithm runs. The counterpart is that
791 this algorithm must look deeper than for manual unsharing, which is
792 specified by LANG_HOOKS_DEEP_UNSHARING.
793
794 If there are only few specific cases of node sharing across functions, it is
795 probably easier for a front-end to unshare the expressions manually. On the
796 contrary, if the expressions generated at the global level are as widespread
797 as expressions generated within functions, deep unsharing is very likely the
798 way to go. */
799
800 /* Similar to copy_tree_r but do not copy SAVE_EXPR or TARGET_EXPR nodes.
801 These nodes model computations that must be done once. If we were to
802 unshare something like SAVE_EXPR(i++), the gimplification process would
803 create wrong code. However, if DATA is non-null, it must hold a pointer
804 set that is used to unshare the subtrees of these nodes. */
805
806 static tree
mostly_copy_tree_r(tree * tp,int * walk_subtrees,void * data)807 mostly_copy_tree_r (tree *tp, int *walk_subtrees, void *data)
808 {
809 tree t = *tp;
810 enum tree_code code = TREE_CODE (t);
811
812 /* Do not copy SAVE_EXPR, TARGET_EXPR or BIND_EXPR nodes themselves, but
813 copy their subtrees if we can make sure to do it only once. */
814 if (code == SAVE_EXPR || code == TARGET_EXPR || code == BIND_EXPR)
815 {
816 if (data && !((hash_set<tree> *)data)->add (t))
817 ;
818 else
819 *walk_subtrees = 0;
820 }
821
822 /* Stop at types, decls, constants like copy_tree_r. */
823 else if (TREE_CODE_CLASS (code) == tcc_type
824 || TREE_CODE_CLASS (code) == tcc_declaration
825 || TREE_CODE_CLASS (code) == tcc_constant)
826 *walk_subtrees = 0;
827
828 /* Cope with the statement expression extension. */
829 else if (code == STATEMENT_LIST)
830 ;
831
832 /* Leave the bulk of the work to copy_tree_r itself. */
833 else
834 copy_tree_r (tp, walk_subtrees, NULL);
835
836 return NULL_TREE;
837 }
838
839 /* Callback for walk_tree to unshare most of the shared trees rooted at *TP.
840 If *TP has been visited already, then *TP is deeply copied by calling
841 mostly_copy_tree_r. DATA is passed to mostly_copy_tree_r unmodified. */
842
843 static tree
copy_if_shared_r(tree * tp,int * walk_subtrees,void * data)844 copy_if_shared_r (tree *tp, int *walk_subtrees, void *data)
845 {
846 tree t = *tp;
847 enum tree_code code = TREE_CODE (t);
848
849 /* Skip types, decls, and constants. But we do want to look at their
850 types and the bounds of types. Mark them as visited so we properly
851 unmark their subtrees on the unmark pass. If we've already seen them,
852 don't look down further. */
853 if (TREE_CODE_CLASS (code) == tcc_type
854 || TREE_CODE_CLASS (code) == tcc_declaration
855 || TREE_CODE_CLASS (code) == tcc_constant)
856 {
857 if (TREE_VISITED (t))
858 *walk_subtrees = 0;
859 else
860 TREE_VISITED (t) = 1;
861 }
862
863 /* If this node has been visited already, unshare it and don't look
864 any deeper. */
865 else if (TREE_VISITED (t))
866 {
867 walk_tree (tp, mostly_copy_tree_r, data, NULL);
868 *walk_subtrees = 0;
869 }
870
871 /* Otherwise, mark the node as visited and keep looking. */
872 else
873 TREE_VISITED (t) = 1;
874
875 return NULL_TREE;
876 }
877
878 /* Unshare most of the shared trees rooted at *TP. DATA is passed to the
879 copy_if_shared_r callback unmodified. */
880
881 static inline void
copy_if_shared(tree * tp,void * data)882 copy_if_shared (tree *tp, void *data)
883 {
884 walk_tree (tp, copy_if_shared_r, data, NULL);
885 }
886
887 /* Unshare all the trees in the body of FNDECL, as well as in the bodies of
888 any nested functions. */
889
890 static void
unshare_body(tree fndecl)891 unshare_body (tree fndecl)
892 {
893 struct cgraph_node *cgn = cgraph_node::get (fndecl);
894 /* If the language requires deep unsharing, we need a pointer set to make
895 sure we don't repeatedly unshare subtrees of unshareable nodes. */
896 hash_set<tree> *visited
897 = lang_hooks.deep_unsharing ? new hash_set<tree> : NULL;
898
899 copy_if_shared (&DECL_SAVED_TREE (fndecl), visited);
900 copy_if_shared (&DECL_SIZE (DECL_RESULT (fndecl)), visited);
901 copy_if_shared (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)), visited);
902
903 delete visited;
904
905 if (cgn)
906 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
907 unshare_body (cgn->decl);
908 }
909
910 /* Callback for walk_tree to unmark the visited trees rooted at *TP.
911 Subtrees are walked until the first unvisited node is encountered. */
912
913 static tree
unmark_visited_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)914 unmark_visited_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
915 {
916 tree t = *tp;
917
918 /* If this node has been visited, unmark it and keep looking. */
919 if (TREE_VISITED (t))
920 TREE_VISITED (t) = 0;
921
922 /* Otherwise, don't look any deeper. */
923 else
924 *walk_subtrees = 0;
925
926 return NULL_TREE;
927 }
928
929 /* Unmark the visited trees rooted at *TP. */
930
931 static inline void
unmark_visited(tree * tp)932 unmark_visited (tree *tp)
933 {
934 walk_tree (tp, unmark_visited_r, NULL, NULL);
935 }
936
937 /* Likewise, but mark all trees as not visited. */
938
939 static void
unvisit_body(tree fndecl)940 unvisit_body (tree fndecl)
941 {
942 struct cgraph_node *cgn = cgraph_node::get (fndecl);
943
944 unmark_visited (&DECL_SAVED_TREE (fndecl));
945 unmark_visited (&DECL_SIZE (DECL_RESULT (fndecl)));
946 unmark_visited (&DECL_SIZE_UNIT (DECL_RESULT (fndecl)));
947
948 if (cgn)
949 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
950 unvisit_body (cgn->decl);
951 }
952
953 /* Unconditionally make an unshared copy of EXPR. This is used when using
954 stored expressions which span multiple functions, such as BINFO_VTABLE,
955 as the normal unsharing process can't tell that they're shared. */
956
957 tree
unshare_expr(tree expr)958 unshare_expr (tree expr)
959 {
960 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
961 return expr;
962 }
963
964 /* Worker for unshare_expr_without_location. */
965
966 static tree
prune_expr_location(tree * tp,int * walk_subtrees,void *)967 prune_expr_location (tree *tp, int *walk_subtrees, void *)
968 {
969 if (EXPR_P (*tp))
970 SET_EXPR_LOCATION (*tp, UNKNOWN_LOCATION);
971 else
972 *walk_subtrees = 0;
973 return NULL_TREE;
974 }
975
976 /* Similar to unshare_expr but also prune all expression locations
977 from EXPR. */
978
979 tree
unshare_expr_without_location(tree expr)980 unshare_expr_without_location (tree expr)
981 {
982 walk_tree (&expr, mostly_copy_tree_r, NULL, NULL);
983 if (EXPR_P (expr))
984 walk_tree (&expr, prune_expr_location, NULL, NULL);
985 return expr;
986 }
987
988 /* Return the EXPR_LOCATION of EXPR, if it (maybe recursively) has
989 one, OR_ELSE otherwise. The location of a STATEMENT_LISTs
990 comprising at least one DEBUG_BEGIN_STMT followed by exactly one
991 EXPR is the location of the EXPR. */
992
993 static location_t
994 rexpr_location (tree expr, location_t or_else = UNKNOWN_LOCATION)
995 {
996 if (!expr)
997 return or_else;
998
999 if (EXPR_HAS_LOCATION (expr))
1000 return EXPR_LOCATION (expr);
1001
1002 if (TREE_CODE (expr) != STATEMENT_LIST)
1003 return or_else;
1004
1005 tree_stmt_iterator i = tsi_start (expr);
1006
1007 bool found = false;
1008 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
1009 {
1010 found = true;
1011 tsi_next (&i);
1012 }
1013
1014 if (!found || !tsi_one_before_end_p (i))
1015 return or_else;
1016
1017 return rexpr_location (tsi_stmt (i), or_else);
1018 }
1019
1020 /* Return TRUE iff EXPR (maybe recursively) has a location; see
1021 rexpr_location for the potential recursion. */
1022
1023 static inline bool
rexpr_has_location(tree expr)1024 rexpr_has_location (tree expr)
1025 {
1026 return rexpr_location (expr) != UNKNOWN_LOCATION;
1027 }
1028
1029
1030 /* WRAPPER is a code such as BIND_EXPR or CLEANUP_POINT_EXPR which can both
1031 contain statements and have a value. Assign its value to a temporary
1032 and give it void_type_node. Return the temporary, or NULL_TREE if
1033 WRAPPER was already void. */
1034
1035 tree
voidify_wrapper_expr(tree wrapper,tree temp)1036 voidify_wrapper_expr (tree wrapper, tree temp)
1037 {
1038 tree type = TREE_TYPE (wrapper);
1039 if (type && !VOID_TYPE_P (type))
1040 {
1041 tree *p;
1042
1043 /* Set p to point to the body of the wrapper. Loop until we find
1044 something that isn't a wrapper. */
1045 for (p = &wrapper; p && *p; )
1046 {
1047 switch (TREE_CODE (*p))
1048 {
1049 case BIND_EXPR:
1050 TREE_SIDE_EFFECTS (*p) = 1;
1051 TREE_TYPE (*p) = void_type_node;
1052 /* For a BIND_EXPR, the body is operand 1. */
1053 p = &BIND_EXPR_BODY (*p);
1054 break;
1055
1056 case CLEANUP_POINT_EXPR:
1057 case TRY_FINALLY_EXPR:
1058 case TRY_CATCH_EXPR:
1059 TREE_SIDE_EFFECTS (*p) = 1;
1060 TREE_TYPE (*p) = void_type_node;
1061 p = &TREE_OPERAND (*p, 0);
1062 break;
1063
1064 case STATEMENT_LIST:
1065 {
1066 tree_stmt_iterator i = tsi_last (*p);
1067 TREE_SIDE_EFFECTS (*p) = 1;
1068 TREE_TYPE (*p) = void_type_node;
1069 p = tsi_end_p (i) ? NULL : tsi_stmt_ptr (i);
1070 }
1071 break;
1072
1073 case COMPOUND_EXPR:
1074 /* Advance to the last statement. Set all container types to
1075 void. */
1076 for (; TREE_CODE (*p) == COMPOUND_EXPR; p = &TREE_OPERAND (*p, 1))
1077 {
1078 TREE_SIDE_EFFECTS (*p) = 1;
1079 TREE_TYPE (*p) = void_type_node;
1080 }
1081 break;
1082
1083 case TRANSACTION_EXPR:
1084 TREE_SIDE_EFFECTS (*p) = 1;
1085 TREE_TYPE (*p) = void_type_node;
1086 p = &TRANSACTION_EXPR_BODY (*p);
1087 break;
1088
1089 default:
1090 /* Assume that any tree upon which voidify_wrapper_expr is
1091 directly called is a wrapper, and that its body is op0. */
1092 if (p == &wrapper)
1093 {
1094 TREE_SIDE_EFFECTS (*p) = 1;
1095 TREE_TYPE (*p) = void_type_node;
1096 p = &TREE_OPERAND (*p, 0);
1097 break;
1098 }
1099 goto out;
1100 }
1101 }
1102
1103 out:
1104 if (p == NULL || IS_EMPTY_STMT (*p))
1105 temp = NULL_TREE;
1106 else if (temp)
1107 {
1108 /* The wrapper is on the RHS of an assignment that we're pushing
1109 down. */
1110 gcc_assert (TREE_CODE (temp) == INIT_EXPR
1111 || TREE_CODE (temp) == MODIFY_EXPR);
1112 TREE_OPERAND (temp, 1) = *p;
1113 *p = temp;
1114 }
1115 else
1116 {
1117 temp = create_tmp_var (type, "retval");
1118 *p = build2 (INIT_EXPR, type, temp, *p);
1119 }
1120
1121 return temp;
1122 }
1123
1124 return NULL_TREE;
1125 }
1126
1127 /* Prepare calls to builtins to SAVE and RESTORE the stack as well as
1128 a temporary through which they communicate. */
1129
1130 static void
build_stack_save_restore(gcall ** save,gcall ** restore)1131 build_stack_save_restore (gcall **save, gcall **restore)
1132 {
1133 tree tmp_var;
1134
1135 *save = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_SAVE), 0);
1136 tmp_var = create_tmp_var (ptr_type_node, "saved_stack");
1137 gimple_call_set_lhs (*save, tmp_var);
1138
1139 *restore
1140 = gimple_build_call (builtin_decl_implicit (BUILT_IN_STACK_RESTORE),
1141 1, tmp_var);
1142 }
1143
1144 /* Generate IFN_ASAN_MARK call that poisons shadow of a for DECL variable. */
1145
1146 static tree
build_asan_poison_call_expr(tree decl)1147 build_asan_poison_call_expr (tree decl)
1148 {
1149 /* Do not poison variables that have size equal to zero. */
1150 tree unit_size = DECL_SIZE_UNIT (decl);
1151 if (zerop (unit_size))
1152 return NULL_TREE;
1153
1154 tree base = build_fold_addr_expr (decl);
1155
1156 return build_call_expr_internal_loc (UNKNOWN_LOCATION, IFN_ASAN_MARK,
1157 void_type_node, 3,
1158 build_int_cst (integer_type_node,
1159 ASAN_MARK_POISON),
1160 base, unit_size);
1161 }
1162
1163 /* Generate IFN_ASAN_MARK call that would poison or unpoison, depending
1164 on POISON flag, shadow memory of a DECL variable. The call will be
1165 put on location identified by IT iterator, where BEFORE flag drives
1166 position where the stmt will be put. */
1167
1168 static void
asan_poison_variable(tree decl,bool poison,gimple_stmt_iterator * it,bool before)1169 asan_poison_variable (tree decl, bool poison, gimple_stmt_iterator *it,
1170 bool before)
1171 {
1172 tree unit_size = DECL_SIZE_UNIT (decl);
1173 tree base = build_fold_addr_expr (decl);
1174
1175 /* Do not poison variables that have size equal to zero. */
1176 if (zerop (unit_size))
1177 return;
1178
1179 /* It's necessary to have all stack variables aligned to ASAN granularity
1180 bytes. */
1181 if (DECL_ALIGN_UNIT (decl) <= ASAN_SHADOW_GRANULARITY)
1182 SET_DECL_ALIGN (decl, BITS_PER_UNIT * ASAN_SHADOW_GRANULARITY);
1183
1184 HOST_WIDE_INT flags = poison ? ASAN_MARK_POISON : ASAN_MARK_UNPOISON;
1185
1186 gimple *g
1187 = gimple_build_call_internal (IFN_ASAN_MARK, 3,
1188 build_int_cst (integer_type_node, flags),
1189 base, unit_size);
1190
1191 if (before)
1192 gsi_insert_before (it, g, GSI_NEW_STMT);
1193 else
1194 gsi_insert_after (it, g, GSI_NEW_STMT);
1195 }
1196
1197 /* Generate IFN_ASAN_MARK internal call that depending on POISON flag
1198 either poisons or unpoisons a DECL. Created statement is appended
1199 to SEQ_P gimple sequence. */
1200
1201 static void
asan_poison_variable(tree decl,bool poison,gimple_seq * seq_p)1202 asan_poison_variable (tree decl, bool poison, gimple_seq *seq_p)
1203 {
1204 gimple_stmt_iterator it = gsi_last (*seq_p);
1205 bool before = false;
1206
1207 if (gsi_end_p (it))
1208 before = true;
1209
1210 asan_poison_variable (decl, poison, &it, before);
1211 }
1212
1213 /* Sort pair of VAR_DECLs A and B by DECL_UID. */
1214
1215 static int
sort_by_decl_uid(const void * a,const void * b)1216 sort_by_decl_uid (const void *a, const void *b)
1217 {
1218 const tree *t1 = (const tree *)a;
1219 const tree *t2 = (const tree *)b;
1220
1221 int uid1 = DECL_UID (*t1);
1222 int uid2 = DECL_UID (*t2);
1223
1224 if (uid1 < uid2)
1225 return -1;
1226 else if (uid1 > uid2)
1227 return 1;
1228 else
1229 return 0;
1230 }
1231
1232 /* Generate IFN_ASAN_MARK internal call for all VARIABLES
1233 depending on POISON flag. Created statement is appended
1234 to SEQ_P gimple sequence. */
1235
1236 static void
asan_poison_variables(hash_set<tree> * variables,bool poison,gimple_seq * seq_p)1237 asan_poison_variables (hash_set<tree> *variables, bool poison, gimple_seq *seq_p)
1238 {
1239 unsigned c = variables->elements ();
1240 if (c == 0)
1241 return;
1242
1243 auto_vec<tree> sorted_variables (c);
1244
1245 for (hash_set<tree>::iterator it = variables->begin ();
1246 it != variables->end (); ++it)
1247 sorted_variables.safe_push (*it);
1248
1249 sorted_variables.qsort (sort_by_decl_uid);
1250
1251 unsigned i;
1252 tree var;
1253 FOR_EACH_VEC_ELT (sorted_variables, i, var)
1254 {
1255 asan_poison_variable (var, poison, seq_p);
1256
1257 /* Add use_after_scope_memory attribute for the variable in order
1258 to prevent re-written into SSA. */
1259 if (!lookup_attribute (ASAN_USE_AFTER_SCOPE_ATTRIBUTE,
1260 DECL_ATTRIBUTES (var)))
1261 DECL_ATTRIBUTES (var)
1262 = tree_cons (get_identifier (ASAN_USE_AFTER_SCOPE_ATTRIBUTE),
1263 integer_one_node,
1264 DECL_ATTRIBUTES (var));
1265 }
1266 }
1267
1268 /* Gimplify a BIND_EXPR. Just voidify and recurse. */
1269
1270 static enum gimplify_status
gimplify_bind_expr(tree * expr_p,gimple_seq * pre_p)1271 gimplify_bind_expr (tree *expr_p, gimple_seq *pre_p)
1272 {
1273 tree bind_expr = *expr_p;
1274 bool old_keep_stack = gimplify_ctxp->keep_stack;
1275 bool old_save_stack = gimplify_ctxp->save_stack;
1276 tree t;
1277 gbind *bind_stmt;
1278 gimple_seq body, cleanup;
1279 gcall *stack_save;
1280 location_t start_locus = 0, end_locus = 0;
1281 tree ret_clauses = NULL;
1282
1283 tree temp = voidify_wrapper_expr (bind_expr, NULL);
1284
1285 /* Mark variables seen in this bind expr. */
1286 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1287 {
1288 if (VAR_P (t))
1289 {
1290 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
1291
1292 /* Mark variable as local. */
1293 if (ctx && ctx->region_type != ORT_NONE && !DECL_EXTERNAL (t)
1294 && (! DECL_SEEN_IN_BIND_EXPR_P (t)
1295 || splay_tree_lookup (ctx->variables,
1296 (splay_tree_key) t) == NULL))
1297 {
1298 int flag = GOVD_LOCAL;
1299 if (ctx->region_type == ORT_SIMD
1300 && TREE_ADDRESSABLE (t)
1301 && !TREE_STATIC (t))
1302 {
1303 if (TREE_CODE (DECL_SIZE_UNIT (t)) != INTEGER_CST)
1304 ctx->add_safelen1 = true;
1305 else
1306 flag = GOVD_PRIVATE;
1307 }
1308 omp_add_variable (ctx, t, flag | GOVD_SEEN);
1309 }
1310
1311 DECL_SEEN_IN_BIND_EXPR_P (t) = 1;
1312
1313 if (DECL_HARD_REGISTER (t) && !is_global_var (t) && cfun)
1314 cfun->has_local_explicit_reg_vars = true;
1315 }
1316
1317 /* Preliminarily mark non-addressed complex variables as eligible
1318 for promotion to gimple registers. We'll transform their uses
1319 as we find them. */
1320 if ((TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
1321 || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
1322 && !TREE_THIS_VOLATILE (t)
1323 && (VAR_P (t) && !DECL_HARD_REGISTER (t))
1324 && !needs_to_live_in_memory (t))
1325 DECL_GIMPLE_REG_P (t) = 1;
1326 }
1327
1328 bind_stmt = gimple_build_bind (BIND_EXPR_VARS (bind_expr), NULL,
1329 BIND_EXPR_BLOCK (bind_expr));
1330 gimple_push_bind_expr (bind_stmt);
1331
1332 gimplify_ctxp->keep_stack = false;
1333 gimplify_ctxp->save_stack = false;
1334
1335 /* Gimplify the body into the GIMPLE_BIND tuple's body. */
1336 body = NULL;
1337 gimplify_stmt (&BIND_EXPR_BODY (bind_expr), &body);
1338 gimple_bind_set_body (bind_stmt, body);
1339
1340 /* Source location wise, the cleanup code (stack_restore and clobbers)
1341 belongs to the end of the block, so propagate what we have. The
1342 stack_save operation belongs to the beginning of block, which we can
1343 infer from the bind_expr directly if the block has no explicit
1344 assignment. */
1345 if (BIND_EXPR_BLOCK (bind_expr))
1346 {
1347 end_locus = BLOCK_SOURCE_END_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1348 start_locus = BLOCK_SOURCE_LOCATION (BIND_EXPR_BLOCK (bind_expr));
1349 }
1350 if (start_locus == 0)
1351 start_locus = EXPR_LOCATION (bind_expr);
1352
1353 cleanup = NULL;
1354 stack_save = NULL;
1355
1356 /* If the code both contains VLAs and calls alloca, then we cannot reclaim
1357 the stack space allocated to the VLAs. */
1358 if (gimplify_ctxp->save_stack && !gimplify_ctxp->keep_stack)
1359 {
1360 gcall *stack_restore;
1361
1362 /* Save stack on entry and restore it on exit. Add a try_finally
1363 block to achieve this. */
1364 build_stack_save_restore (&stack_save, &stack_restore);
1365
1366 gimple_set_location (stack_save, start_locus);
1367 gimple_set_location (stack_restore, end_locus);
1368
1369 gimplify_seq_add_stmt (&cleanup, stack_restore);
1370 }
1371
1372 /* Add clobbers for all variables that go out of scope. */
1373 for (t = BIND_EXPR_VARS (bind_expr); t ; t = DECL_CHAIN (t))
1374 {
1375 if (VAR_P (t)
1376 && !is_global_var (t)
1377 && DECL_CONTEXT (t) == current_function_decl)
1378 {
1379 if (!DECL_HARD_REGISTER (t)
1380 && !TREE_THIS_VOLATILE (t)
1381 && !DECL_HAS_VALUE_EXPR_P (t)
1382 /* Only care for variables that have to be in memory. Others
1383 will be rewritten into SSA names, hence moved to the
1384 top-level. */
1385 && !is_gimple_reg (t)
1386 && flag_stack_reuse != SR_NONE)
1387 {
1388 tree clobber = build_constructor (TREE_TYPE (t), NULL);
1389 gimple *clobber_stmt;
1390 TREE_THIS_VOLATILE (clobber) = 1;
1391 clobber_stmt = gimple_build_assign (t, clobber);
1392 gimple_set_location (clobber_stmt, end_locus);
1393 gimplify_seq_add_stmt (&cleanup, clobber_stmt);
1394 }
1395
1396 if (flag_openacc && oacc_declare_returns != NULL)
1397 {
1398 tree *c = oacc_declare_returns->get (t);
1399 if (c != NULL)
1400 {
1401 if (ret_clauses)
1402 OMP_CLAUSE_CHAIN (*c) = ret_clauses;
1403
1404 ret_clauses = *c;
1405
1406 oacc_declare_returns->remove (t);
1407
1408 if (oacc_declare_returns->elements () == 0)
1409 {
1410 delete oacc_declare_returns;
1411 oacc_declare_returns = NULL;
1412 }
1413 }
1414 }
1415 }
1416
1417 if (asan_poisoned_variables != NULL
1418 && asan_poisoned_variables->contains (t))
1419 {
1420 asan_poisoned_variables->remove (t);
1421 asan_poison_variable (t, true, &cleanup);
1422 }
1423
1424 if (gimplify_ctxp->live_switch_vars != NULL
1425 && gimplify_ctxp->live_switch_vars->contains (t))
1426 gimplify_ctxp->live_switch_vars->remove (t);
1427 }
1428
1429 if (ret_clauses)
1430 {
1431 gomp_target *stmt;
1432 gimple_stmt_iterator si = gsi_start (cleanup);
1433
1434 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
1435 ret_clauses);
1436 gsi_insert_seq_before_without_update (&si, stmt, GSI_NEW_STMT);
1437 }
1438
1439 if (cleanup)
1440 {
1441 gtry *gs;
1442 gimple_seq new_body;
1443
1444 new_body = NULL;
1445 gs = gimple_build_try (gimple_bind_body (bind_stmt), cleanup,
1446 GIMPLE_TRY_FINALLY);
1447
1448 if (stack_save)
1449 gimplify_seq_add_stmt (&new_body, stack_save);
1450 gimplify_seq_add_stmt (&new_body, gs);
1451 gimple_bind_set_body (bind_stmt, new_body);
1452 }
1453
1454 /* keep_stack propagates all the way up to the outermost BIND_EXPR. */
1455 if (!gimplify_ctxp->keep_stack)
1456 gimplify_ctxp->keep_stack = old_keep_stack;
1457 gimplify_ctxp->save_stack = old_save_stack;
1458
1459 gimple_pop_bind_expr ();
1460
1461 gimplify_seq_add_stmt (pre_p, bind_stmt);
1462
1463 if (temp)
1464 {
1465 *expr_p = temp;
1466 return GS_OK;
1467 }
1468
1469 *expr_p = NULL_TREE;
1470 return GS_ALL_DONE;
1471 }
1472
1473 /* Maybe add early return predict statement to PRE_P sequence. */
1474
1475 static void
maybe_add_early_return_predict_stmt(gimple_seq * pre_p)1476 maybe_add_early_return_predict_stmt (gimple_seq *pre_p)
1477 {
1478 /* If we are not in a conditional context, add PREDICT statement. */
1479 if (gimple_conditional_context ())
1480 {
1481 gimple *predict = gimple_build_predict (PRED_TREE_EARLY_RETURN,
1482 NOT_TAKEN);
1483 gimplify_seq_add_stmt (pre_p, predict);
1484 }
1485 }
1486
1487 /* Gimplify a RETURN_EXPR. If the expression to be returned is not a
1488 GIMPLE value, it is assigned to a new temporary and the statement is
1489 re-written to return the temporary.
1490
1491 PRE_P points to the sequence where side effects that must happen before
1492 STMT should be stored. */
1493
1494 static enum gimplify_status
gimplify_return_expr(tree stmt,gimple_seq * pre_p)1495 gimplify_return_expr (tree stmt, gimple_seq *pre_p)
1496 {
1497 greturn *ret;
1498 tree ret_expr = TREE_OPERAND (stmt, 0);
1499 tree result_decl, result;
1500
1501 if (ret_expr == error_mark_node)
1502 return GS_ERROR;
1503
1504 if (!ret_expr
1505 || TREE_CODE (ret_expr) == RESULT_DECL)
1506 {
1507 maybe_add_early_return_predict_stmt (pre_p);
1508 greturn *ret = gimple_build_return (ret_expr);
1509 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1510 gimplify_seq_add_stmt (pre_p, ret);
1511 return GS_ALL_DONE;
1512 }
1513
1514 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl))))
1515 result_decl = NULL_TREE;
1516 else
1517 {
1518 result_decl = TREE_OPERAND (ret_expr, 0);
1519
1520 /* See through a return by reference. */
1521 if (TREE_CODE (result_decl) == INDIRECT_REF)
1522 result_decl = TREE_OPERAND (result_decl, 0);
1523
1524 gcc_assert ((TREE_CODE (ret_expr) == MODIFY_EXPR
1525 || TREE_CODE (ret_expr) == INIT_EXPR)
1526 && TREE_CODE (result_decl) == RESULT_DECL);
1527 }
1528
1529 /* If aggregate_value_p is true, then we can return the bare RESULT_DECL.
1530 Recall that aggregate_value_p is FALSE for any aggregate type that is
1531 returned in registers. If we're returning values in registers, then
1532 we don't want to extend the lifetime of the RESULT_DECL, particularly
1533 across another call. In addition, for those aggregates for which
1534 hard_function_value generates a PARALLEL, we'll die during normal
1535 expansion of structure assignments; there's special code in expand_return
1536 to handle this case that does not exist in expand_expr. */
1537 if (!result_decl)
1538 result = NULL_TREE;
1539 else if (aggregate_value_p (result_decl, TREE_TYPE (current_function_decl)))
1540 {
1541 if (TREE_CODE (DECL_SIZE (result_decl)) != INTEGER_CST)
1542 {
1543 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (result_decl)))
1544 gimplify_type_sizes (TREE_TYPE (result_decl), pre_p);
1545 /* Note that we don't use gimplify_vla_decl because the RESULT_DECL
1546 should be effectively allocated by the caller, i.e. all calls to
1547 this function must be subject to the Return Slot Optimization. */
1548 gimplify_one_sizepos (&DECL_SIZE (result_decl), pre_p);
1549 gimplify_one_sizepos (&DECL_SIZE_UNIT (result_decl), pre_p);
1550 }
1551 result = result_decl;
1552 }
1553 else if (gimplify_ctxp->return_temp)
1554 result = gimplify_ctxp->return_temp;
1555 else
1556 {
1557 result = create_tmp_reg (TREE_TYPE (result_decl));
1558
1559 /* ??? With complex control flow (usually involving abnormal edges),
1560 we can wind up warning about an uninitialized value for this. Due
1561 to how this variable is constructed and initialized, this is never
1562 true. Give up and never warn. */
1563 TREE_NO_WARNING (result) = 1;
1564
1565 gimplify_ctxp->return_temp = result;
1566 }
1567
1568 /* Smash the lhs of the MODIFY_EXPR to the temporary we plan to use.
1569 Then gimplify the whole thing. */
1570 if (result != result_decl)
1571 TREE_OPERAND (ret_expr, 0) = result;
1572
1573 gimplify_and_add (TREE_OPERAND (stmt, 0), pre_p);
1574
1575 maybe_add_early_return_predict_stmt (pre_p);
1576 ret = gimple_build_return (result);
1577 gimple_set_no_warning (ret, TREE_NO_WARNING (stmt));
1578 gimplify_seq_add_stmt (pre_p, ret);
1579
1580 return GS_ALL_DONE;
1581 }
1582
1583 /* Gimplify a variable-length array DECL. */
1584
1585 static void
gimplify_vla_decl(tree decl,gimple_seq * seq_p)1586 gimplify_vla_decl (tree decl, gimple_seq *seq_p)
1587 {
1588 /* This is a variable-sized decl. Simplify its size and mark it
1589 for deferred expansion. */
1590 tree t, addr, ptr_type;
1591
1592 gimplify_one_sizepos (&DECL_SIZE (decl), seq_p);
1593 gimplify_one_sizepos (&DECL_SIZE_UNIT (decl), seq_p);
1594
1595 /* Don't mess with a DECL_VALUE_EXPR set by the front-end. */
1596 if (DECL_HAS_VALUE_EXPR_P (decl))
1597 return;
1598
1599 /* All occurrences of this decl in final gimplified code will be
1600 replaced by indirection. Setting DECL_VALUE_EXPR does two
1601 things: First, it lets the rest of the gimplifier know what
1602 replacement to use. Second, it lets the debug info know
1603 where to find the value. */
1604 ptr_type = build_pointer_type (TREE_TYPE (decl));
1605 addr = create_tmp_var (ptr_type, get_name (decl));
1606 DECL_IGNORED_P (addr) = 0;
1607 t = build_fold_indirect_ref (addr);
1608 TREE_THIS_NOTRAP (t) = 1;
1609 SET_DECL_VALUE_EXPR (decl, t);
1610 DECL_HAS_VALUE_EXPR_P (decl) = 1;
1611
1612 t = build_alloca_call_expr (DECL_SIZE_UNIT (decl), DECL_ALIGN (decl),
1613 max_int_size_in_bytes (TREE_TYPE (decl)));
1614 /* The call has been built for a variable-sized object. */
1615 CALL_ALLOCA_FOR_VAR_P (t) = 1;
1616 t = fold_convert (ptr_type, t);
1617 t = build2 (MODIFY_EXPR, TREE_TYPE (addr), addr, t);
1618
1619 gimplify_and_add (t, seq_p);
1620 }
1621
1622 /* A helper function to be called via walk_tree. Mark all labels under *TP
1623 as being forced. To be called for DECL_INITIAL of static variables. */
1624
1625 static tree
force_labels_r(tree * tp,int * walk_subtrees,void * data ATTRIBUTE_UNUSED)1626 force_labels_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
1627 {
1628 if (TYPE_P (*tp))
1629 *walk_subtrees = 0;
1630 if (TREE_CODE (*tp) == LABEL_DECL)
1631 {
1632 FORCED_LABEL (*tp) = 1;
1633 cfun->has_forced_label_in_static = 1;
1634 }
1635
1636 return NULL_TREE;
1637 }
1638
1639 /* Gimplify a DECL_EXPR node *STMT_P by making any necessary allocation
1640 and initialization explicit. */
1641
1642 static enum gimplify_status
gimplify_decl_expr(tree * stmt_p,gimple_seq * seq_p)1643 gimplify_decl_expr (tree *stmt_p, gimple_seq *seq_p)
1644 {
1645 tree stmt = *stmt_p;
1646 tree decl = DECL_EXPR_DECL (stmt);
1647
1648 *stmt_p = NULL_TREE;
1649
1650 if (TREE_TYPE (decl) == error_mark_node)
1651 return GS_ERROR;
1652
1653 if ((TREE_CODE (decl) == TYPE_DECL
1654 || VAR_P (decl))
1655 && !TYPE_SIZES_GIMPLIFIED (TREE_TYPE (decl)))
1656 {
1657 gimplify_type_sizes (TREE_TYPE (decl), seq_p);
1658 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE)
1659 gimplify_type_sizes (TREE_TYPE (TREE_TYPE (decl)), seq_p);
1660 }
1661
1662 /* ??? DECL_ORIGINAL_TYPE is streamed for LTO so it needs to be gimplified
1663 in case its size expressions contain problematic nodes like CALL_EXPR. */
1664 if (TREE_CODE (decl) == TYPE_DECL
1665 && DECL_ORIGINAL_TYPE (decl)
1666 && !TYPE_SIZES_GIMPLIFIED (DECL_ORIGINAL_TYPE (decl)))
1667 {
1668 gimplify_type_sizes (DECL_ORIGINAL_TYPE (decl), seq_p);
1669 if (TREE_CODE (DECL_ORIGINAL_TYPE (decl)) == REFERENCE_TYPE)
1670 gimplify_type_sizes (TREE_TYPE (DECL_ORIGINAL_TYPE (decl)), seq_p);
1671 }
1672
1673 if (VAR_P (decl) && !DECL_EXTERNAL (decl))
1674 {
1675 tree init = DECL_INITIAL (decl);
1676 bool is_vla = false;
1677
1678 if (TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
1679 || (!TREE_STATIC (decl)
1680 && flag_stack_check == GENERIC_STACK_CHECK
1681 && compare_tree_int (DECL_SIZE_UNIT (decl),
1682 STACK_CHECK_MAX_VAR_SIZE) > 0))
1683 {
1684 gimplify_vla_decl (decl, seq_p);
1685 is_vla = true;
1686 }
1687
1688 if (asan_poisoned_variables
1689 && !is_vla
1690 && TREE_ADDRESSABLE (decl)
1691 && !TREE_STATIC (decl)
1692 && !DECL_HAS_VALUE_EXPR_P (decl)
1693 && DECL_ALIGN (decl) <= MAX_SUPPORTED_STACK_ALIGNMENT
1694 && dbg_cnt (asan_use_after_scope)
1695 && !gimplify_omp_ctxp)
1696 {
1697 asan_poisoned_variables->add (decl);
1698 asan_poison_variable (decl, false, seq_p);
1699 if (!DECL_ARTIFICIAL (decl) && gimplify_ctxp->live_switch_vars)
1700 gimplify_ctxp->live_switch_vars->add (decl);
1701 }
1702
1703 /* Some front ends do not explicitly declare all anonymous
1704 artificial variables. We compensate here by declaring the
1705 variables, though it would be better if the front ends would
1706 explicitly declare them. */
1707 if (!DECL_SEEN_IN_BIND_EXPR_P (decl)
1708 && DECL_ARTIFICIAL (decl) && DECL_NAME (decl) == NULL_TREE)
1709 gimple_add_tmp_var (decl);
1710
1711 if (init && init != error_mark_node)
1712 {
1713 if (!TREE_STATIC (decl))
1714 {
1715 DECL_INITIAL (decl) = NULL_TREE;
1716 init = build2 (INIT_EXPR, void_type_node, decl, init);
1717 gimplify_and_add (init, seq_p);
1718 ggc_free (init);
1719 }
1720 else
1721 /* We must still examine initializers for static variables
1722 as they may contain a label address. */
1723 walk_tree (&init, force_labels_r, NULL, NULL);
1724 }
1725 }
1726
1727 return GS_ALL_DONE;
1728 }
1729
1730 /* Gimplify a LOOP_EXPR. Normally this just involves gimplifying the body
1731 and replacing the LOOP_EXPR with goto, but if the loop contains an
1732 EXIT_EXPR, we need to append a label for it to jump to. */
1733
1734 static enum gimplify_status
gimplify_loop_expr(tree * expr_p,gimple_seq * pre_p)1735 gimplify_loop_expr (tree *expr_p, gimple_seq *pre_p)
1736 {
1737 tree saved_label = gimplify_ctxp->exit_label;
1738 tree start_label = create_artificial_label (UNKNOWN_LOCATION);
1739
1740 gimplify_seq_add_stmt (pre_p, gimple_build_label (start_label));
1741
1742 gimplify_ctxp->exit_label = NULL_TREE;
1743
1744 gimplify_and_add (LOOP_EXPR_BODY (*expr_p), pre_p);
1745
1746 gimplify_seq_add_stmt (pre_p, gimple_build_goto (start_label));
1747
1748 if (gimplify_ctxp->exit_label)
1749 gimplify_seq_add_stmt (pre_p,
1750 gimple_build_label (gimplify_ctxp->exit_label));
1751
1752 gimplify_ctxp->exit_label = saved_label;
1753
1754 *expr_p = NULL;
1755 return GS_ALL_DONE;
1756 }
1757
1758 /* Gimplify a statement list onto a sequence. These may be created either
1759 by an enlightened front-end, or by shortcut_cond_expr. */
1760
1761 static enum gimplify_status
gimplify_statement_list(tree * expr_p,gimple_seq * pre_p)1762 gimplify_statement_list (tree *expr_p, gimple_seq *pre_p)
1763 {
1764 tree temp = voidify_wrapper_expr (*expr_p, NULL);
1765
1766 tree_stmt_iterator i = tsi_start (*expr_p);
1767
1768 while (!tsi_end_p (i))
1769 {
1770 gimplify_stmt (tsi_stmt_ptr (i), pre_p);
1771 tsi_delink (&i);
1772 }
1773
1774 if (temp)
1775 {
1776 *expr_p = temp;
1777 return GS_OK;
1778 }
1779
1780 return GS_ALL_DONE;
1781 }
1782
1783 /* Callback for walk_gimple_seq. */
1784
1785 static tree
warn_switch_unreachable_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)1786 warn_switch_unreachable_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
1787 struct walk_stmt_info *wi)
1788 {
1789 gimple *stmt = gsi_stmt (*gsi_p);
1790
1791 *handled_ops_p = true;
1792 switch (gimple_code (stmt))
1793 {
1794 case GIMPLE_TRY:
1795 /* A compiler-generated cleanup or a user-written try block.
1796 If it's empty, don't dive into it--that would result in
1797 worse location info. */
1798 if (gimple_try_eval (stmt) == NULL)
1799 {
1800 wi->info = stmt;
1801 return integer_zero_node;
1802 }
1803 /* Fall through. */
1804 case GIMPLE_BIND:
1805 case GIMPLE_CATCH:
1806 case GIMPLE_EH_FILTER:
1807 case GIMPLE_TRANSACTION:
1808 /* Walk the sub-statements. */
1809 *handled_ops_p = false;
1810 break;
1811
1812 case GIMPLE_DEBUG:
1813 /* Ignore these. We may generate them before declarations that
1814 are never executed. If there's something to warn about,
1815 there will be non-debug stmts too, and we'll catch those. */
1816 break;
1817
1818 case GIMPLE_CALL:
1819 if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
1820 {
1821 *handled_ops_p = false;
1822 break;
1823 }
1824 /* Fall through. */
1825 default:
1826 /* Save the first "real" statement (not a decl/lexical scope/...). */
1827 wi->info = stmt;
1828 return integer_zero_node;
1829 }
1830 return NULL_TREE;
1831 }
1832
1833 /* Possibly warn about unreachable statements between switch's controlling
1834 expression and the first case. SEQ is the body of a switch expression. */
1835
1836 static void
maybe_warn_switch_unreachable(gimple_seq seq)1837 maybe_warn_switch_unreachable (gimple_seq seq)
1838 {
1839 if (!warn_switch_unreachable
1840 /* This warning doesn't play well with Fortran when optimizations
1841 are on. */
1842 || lang_GNU_Fortran ()
1843 || seq == NULL)
1844 return;
1845
1846 struct walk_stmt_info wi;
1847 memset (&wi, 0, sizeof (wi));
1848 walk_gimple_seq (seq, warn_switch_unreachable_r, NULL, &wi);
1849 gimple *stmt = (gimple *) wi.info;
1850
1851 if (stmt && gimple_code (stmt) != GIMPLE_LABEL)
1852 {
1853 if (gimple_code (stmt) == GIMPLE_GOTO
1854 && TREE_CODE (gimple_goto_dest (stmt)) == LABEL_DECL
1855 && DECL_ARTIFICIAL (gimple_goto_dest (stmt)))
1856 /* Don't warn for compiler-generated gotos. These occur
1857 in Duff's devices, for example. */;
1858 else
1859 warning_at (gimple_location (stmt), OPT_Wswitch_unreachable,
1860 "statement will never be executed");
1861 }
1862 }
1863
1864
1865 /* A label entry that pairs label and a location. */
1866 struct label_entry
1867 {
1868 tree label;
1869 location_t loc;
1870 };
1871
1872 /* Find LABEL in vector of label entries VEC. */
1873
1874 static struct label_entry *
find_label_entry(const auto_vec<struct label_entry> * vec,tree label)1875 find_label_entry (const auto_vec<struct label_entry> *vec, tree label)
1876 {
1877 unsigned int i;
1878 struct label_entry *l;
1879
1880 FOR_EACH_VEC_ELT (*vec, i, l)
1881 if (l->label == label)
1882 return l;
1883 return NULL;
1884 }
1885
1886 /* Return true if LABEL, a LABEL_DECL, represents a case label
1887 in a vector of labels CASES. */
1888
1889 static bool
case_label_p(const vec<tree> * cases,tree label)1890 case_label_p (const vec<tree> *cases, tree label)
1891 {
1892 unsigned int i;
1893 tree l;
1894
1895 FOR_EACH_VEC_ELT (*cases, i, l)
1896 if (CASE_LABEL (l) == label)
1897 return true;
1898 return false;
1899 }
1900
1901 /* Find the last nondebug statement in a scope STMT. */
1902
1903 static gimple *
last_stmt_in_scope(gimple * stmt)1904 last_stmt_in_scope (gimple *stmt)
1905 {
1906 if (!stmt)
1907 return NULL;
1908
1909 switch (gimple_code (stmt))
1910 {
1911 case GIMPLE_BIND:
1912 {
1913 gbind *bind = as_a <gbind *> (stmt);
1914 stmt = gimple_seq_last_nondebug_stmt (gimple_bind_body (bind));
1915 return last_stmt_in_scope (stmt);
1916 }
1917
1918 case GIMPLE_TRY:
1919 {
1920 gtry *try_stmt = as_a <gtry *> (stmt);
1921 stmt = gimple_seq_last_nondebug_stmt (gimple_try_eval (try_stmt));
1922 gimple *last_eval = last_stmt_in_scope (stmt);
1923 if (gimple_stmt_may_fallthru (last_eval)
1924 && (last_eval == NULL
1925 || !gimple_call_internal_p (last_eval, IFN_FALLTHROUGH))
1926 && gimple_try_kind (try_stmt) == GIMPLE_TRY_FINALLY)
1927 {
1928 stmt = gimple_seq_last_nondebug_stmt (gimple_try_cleanup (try_stmt));
1929 return last_stmt_in_scope (stmt);
1930 }
1931 else
1932 return last_eval;
1933 }
1934
1935 case GIMPLE_DEBUG:
1936 gcc_unreachable ();
1937
1938 default:
1939 return stmt;
1940 }
1941 }
1942
1943 /* Collect interesting labels in LABELS and return the statement preceding
1944 another case label, or a user-defined label. */
1945
1946 static gimple *
collect_fallthrough_labels(gimple_stmt_iterator * gsi_p,auto_vec<struct label_entry> * labels)1947 collect_fallthrough_labels (gimple_stmt_iterator *gsi_p,
1948 auto_vec <struct label_entry> *labels)
1949 {
1950 gimple *prev = NULL;
1951
1952 do
1953 {
1954 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND)
1955 {
1956 /* Recognize the special GIMPLE_BIND added by gimplify_switch_expr,
1957 which starts on a GIMPLE_SWITCH and ends with a break label.
1958 Handle that as a single statement that can fall through. */
1959 gbind *bind = as_a <gbind *> (gsi_stmt (*gsi_p));
1960 gimple *first = gimple_seq_first_stmt (gimple_bind_body (bind));
1961 gimple *last = gimple_seq_last_stmt (gimple_bind_body (bind));
1962 if (last
1963 && gimple_code (first) == GIMPLE_SWITCH
1964 && gimple_code (last) == GIMPLE_LABEL)
1965 {
1966 tree label = gimple_label_label (as_a <glabel *> (last));
1967 if (SWITCH_BREAK_LABEL_P (label))
1968 {
1969 prev = bind;
1970 gsi_next (gsi_p);
1971 continue;
1972 }
1973 }
1974 }
1975 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_BIND
1976 || gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_TRY)
1977 {
1978 /* Nested scope. Only look at the last statement of
1979 the innermost scope. */
1980 location_t bind_loc = gimple_location (gsi_stmt (*gsi_p));
1981 gimple *last = last_stmt_in_scope (gsi_stmt (*gsi_p));
1982 if (last)
1983 {
1984 prev = last;
1985 /* It might be a label without a location. Use the
1986 location of the scope then. */
1987 if (!gimple_has_location (prev))
1988 gimple_set_location (prev, bind_loc);
1989 }
1990 gsi_next (gsi_p);
1991 continue;
1992 }
1993
1994 /* Ifs are tricky. */
1995 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_COND)
1996 {
1997 gcond *cond_stmt = as_a <gcond *> (gsi_stmt (*gsi_p));
1998 tree false_lab = gimple_cond_false_label (cond_stmt);
1999 location_t if_loc = gimple_location (cond_stmt);
2000
2001 /* If we have e.g.
2002 if (i > 1) goto <D.2259>; else goto D;
2003 we can't do much with the else-branch. */
2004 if (!DECL_ARTIFICIAL (false_lab))
2005 break;
2006
2007 /* Go on until the false label, then one step back. */
2008 for (; !gsi_end_p (*gsi_p); gsi_next (gsi_p))
2009 {
2010 gimple *stmt = gsi_stmt (*gsi_p);
2011 if (gimple_code (stmt) == GIMPLE_LABEL
2012 && gimple_label_label (as_a <glabel *> (stmt)) == false_lab)
2013 break;
2014 }
2015
2016 /* Not found? Oops. */
2017 if (gsi_end_p (*gsi_p))
2018 break;
2019
2020 struct label_entry l = { false_lab, if_loc };
2021 labels->safe_push (l);
2022
2023 /* Go to the last statement of the then branch. */
2024 gsi_prev (gsi_p);
2025
2026 /* if (i != 0) goto <D.1759>; else goto <D.1760>;
2027 <D.1759>:
2028 <stmt>;
2029 goto <D.1761>;
2030 <D.1760>:
2031 */
2032 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_GOTO
2033 && !gimple_has_location (gsi_stmt (*gsi_p)))
2034 {
2035 /* Look at the statement before, it might be
2036 attribute fallthrough, in which case don't warn. */
2037 gsi_prev (gsi_p);
2038 bool fallthru_before_dest
2039 = gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_FALLTHROUGH);
2040 gsi_next (gsi_p);
2041 tree goto_dest = gimple_goto_dest (gsi_stmt (*gsi_p));
2042 if (!fallthru_before_dest)
2043 {
2044 struct label_entry l = { goto_dest, if_loc };
2045 labels->safe_push (l);
2046 }
2047 }
2048 /* And move back. */
2049 gsi_next (gsi_p);
2050 }
2051
2052 /* Remember the last statement. Skip labels that are of no interest
2053 to us. */
2054 if (gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2055 {
2056 tree label = gimple_label_label (as_a <glabel *> (gsi_stmt (*gsi_p)));
2057 if (find_label_entry (labels, label))
2058 prev = gsi_stmt (*gsi_p);
2059 }
2060 else if (gimple_call_internal_p (gsi_stmt (*gsi_p), IFN_ASAN_MARK))
2061 ;
2062 else if (!is_gimple_debug (gsi_stmt (*gsi_p)))
2063 prev = gsi_stmt (*gsi_p);
2064 gsi_next (gsi_p);
2065 }
2066 while (!gsi_end_p (*gsi_p)
2067 /* Stop if we find a case or a user-defined label. */
2068 && (gimple_code (gsi_stmt (*gsi_p)) != GIMPLE_LABEL
2069 || !gimple_has_location (gsi_stmt (*gsi_p))));
2070
2071 return prev;
2072 }
2073
2074 /* Return true if the switch fallthough warning should occur. LABEL is
2075 the label statement that we're falling through to. */
2076
2077 static bool
should_warn_for_implicit_fallthrough(gimple_stmt_iterator * gsi_p,tree label)2078 should_warn_for_implicit_fallthrough (gimple_stmt_iterator *gsi_p, tree label)
2079 {
2080 gimple_stmt_iterator gsi = *gsi_p;
2081
2082 /* Don't warn if the label is marked with a "falls through" comment. */
2083 if (FALLTHROUGH_LABEL_P (label))
2084 return false;
2085
2086 /* Don't warn for non-case labels followed by a statement:
2087 case 0:
2088 foo ();
2089 label:
2090 bar ();
2091 as these are likely intentional. */
2092 if (!case_label_p (&gimplify_ctxp->case_labels, label))
2093 {
2094 tree l;
2095 while (!gsi_end_p (gsi)
2096 && gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2097 && (l = gimple_label_label (as_a <glabel *> (gsi_stmt (gsi))))
2098 && !case_label_p (&gimplify_ctxp->case_labels, l))
2099 gsi_next_nondebug (&gsi);
2100 if (gsi_end_p (gsi) || gimple_code (gsi_stmt (gsi)) != GIMPLE_LABEL)
2101 return false;
2102 }
2103
2104 /* Don't warn for terminated branches, i.e. when the subsequent case labels
2105 immediately breaks. */
2106 gsi = *gsi_p;
2107
2108 /* Skip all immediately following labels. */
2109 while (!gsi_end_p (gsi)
2110 && (gimple_code (gsi_stmt (gsi)) == GIMPLE_LABEL
2111 || gimple_code (gsi_stmt (gsi)) == GIMPLE_PREDICT))
2112 gsi_next_nondebug (&gsi);
2113
2114 /* { ... something; default:; } */
2115 if (gsi_end_p (gsi)
2116 /* { ... something; default: break; } or
2117 { ... something; default: goto L; } */
2118 || gimple_code (gsi_stmt (gsi)) == GIMPLE_GOTO
2119 /* { ... something; default: return; } */
2120 || gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
2121 return false;
2122
2123 return true;
2124 }
2125
2126 /* Callback for walk_gimple_seq. */
2127
2128 static tree
warn_implicit_fallthrough_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info *)2129 warn_implicit_fallthrough_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2130 struct walk_stmt_info *)
2131 {
2132 gimple *stmt = gsi_stmt (*gsi_p);
2133
2134 *handled_ops_p = true;
2135 switch (gimple_code (stmt))
2136 {
2137 case GIMPLE_TRY:
2138 case GIMPLE_BIND:
2139 case GIMPLE_CATCH:
2140 case GIMPLE_EH_FILTER:
2141 case GIMPLE_TRANSACTION:
2142 /* Walk the sub-statements. */
2143 *handled_ops_p = false;
2144 break;
2145
2146 /* Find a sequence of form:
2147
2148 GIMPLE_LABEL
2149 [...]
2150 <may fallthru stmt>
2151 GIMPLE_LABEL
2152
2153 and possibly warn. */
2154 case GIMPLE_LABEL:
2155 {
2156 /* Found a label. Skip all immediately following labels. */
2157 while (!gsi_end_p (*gsi_p)
2158 && gimple_code (gsi_stmt (*gsi_p)) == GIMPLE_LABEL)
2159 gsi_next_nondebug (gsi_p);
2160
2161 /* There might be no more statements. */
2162 if (gsi_end_p (*gsi_p))
2163 return integer_zero_node;
2164
2165 /* Vector of labels that fall through. */
2166 auto_vec <struct label_entry> labels;
2167 gimple *prev = collect_fallthrough_labels (gsi_p, &labels);
2168
2169 /* There might be no more statements. */
2170 if (gsi_end_p (*gsi_p))
2171 return integer_zero_node;
2172
2173 gimple *next = gsi_stmt (*gsi_p);
2174 tree label;
2175 /* If what follows is a label, then we may have a fallthrough. */
2176 if (gimple_code (next) == GIMPLE_LABEL
2177 && gimple_has_location (next)
2178 && (label = gimple_label_label (as_a <glabel *> (next)))
2179 && prev != NULL)
2180 {
2181 struct label_entry *l;
2182 bool warned_p = false;
2183 if (!should_warn_for_implicit_fallthrough (gsi_p, label))
2184 /* Quiet. */;
2185 else if (gimple_code (prev) == GIMPLE_LABEL
2186 && (label = gimple_label_label (as_a <glabel *> (prev)))
2187 && (l = find_label_entry (&labels, label)))
2188 warned_p = warning_at (l->loc, OPT_Wimplicit_fallthrough_,
2189 "this statement may fall through");
2190 else if (!gimple_call_internal_p (prev, IFN_FALLTHROUGH)
2191 /* Try to be clever and don't warn when the statement
2192 can't actually fall through. */
2193 && gimple_stmt_may_fallthru (prev)
2194 && gimple_has_location (prev))
2195 warned_p = warning_at (gimple_location (prev),
2196 OPT_Wimplicit_fallthrough_,
2197 "this statement may fall through");
2198 if (warned_p)
2199 inform (gimple_location (next), "here");
2200
2201 /* Mark this label as processed so as to prevent multiple
2202 warnings in nested switches. */
2203 FALLTHROUGH_LABEL_P (label) = true;
2204
2205 /* So that next warn_implicit_fallthrough_r will start looking for
2206 a new sequence starting with this label. */
2207 gsi_prev (gsi_p);
2208 }
2209 }
2210 break;
2211 default:
2212 break;
2213 }
2214 return NULL_TREE;
2215 }
2216
2217 /* Warn when a switch case falls through. */
2218
2219 static void
maybe_warn_implicit_fallthrough(gimple_seq seq)2220 maybe_warn_implicit_fallthrough (gimple_seq seq)
2221 {
2222 if (!warn_implicit_fallthrough)
2223 return;
2224
2225 /* This warning is meant for C/C++/ObjC/ObjC++ only. */
2226 if (!(lang_GNU_C ()
2227 || lang_GNU_CXX ()
2228 || lang_GNU_OBJC ()))
2229 return;
2230
2231 struct walk_stmt_info wi;
2232 memset (&wi, 0, sizeof (wi));
2233 walk_gimple_seq (seq, warn_implicit_fallthrough_r, NULL, &wi);
2234 }
2235
2236 /* Callback for walk_gimple_seq. */
2237
2238 static tree
expand_FALLTHROUGH_r(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info *)2239 expand_FALLTHROUGH_r (gimple_stmt_iterator *gsi_p, bool *handled_ops_p,
2240 struct walk_stmt_info *)
2241 {
2242 gimple *stmt = gsi_stmt (*gsi_p);
2243
2244 *handled_ops_p = true;
2245 switch (gimple_code (stmt))
2246 {
2247 case GIMPLE_TRY:
2248 case GIMPLE_BIND:
2249 case GIMPLE_CATCH:
2250 case GIMPLE_EH_FILTER:
2251 case GIMPLE_TRANSACTION:
2252 /* Walk the sub-statements. */
2253 *handled_ops_p = false;
2254 break;
2255 case GIMPLE_CALL:
2256 if (gimple_call_internal_p (stmt, IFN_FALLTHROUGH))
2257 {
2258 gsi_remove (gsi_p, true);
2259 if (gsi_end_p (*gsi_p))
2260 return integer_zero_node;
2261
2262 bool found = false;
2263 location_t loc = gimple_location (stmt);
2264
2265 gimple_stmt_iterator gsi2 = *gsi_p;
2266 stmt = gsi_stmt (gsi2);
2267 if (gimple_code (stmt) == GIMPLE_GOTO && !gimple_has_location (stmt))
2268 {
2269 /* Go on until the artificial label. */
2270 tree goto_dest = gimple_goto_dest (stmt);
2271 for (; !gsi_end_p (gsi2); gsi_next (&gsi2))
2272 {
2273 if (gimple_code (gsi_stmt (gsi2)) == GIMPLE_LABEL
2274 && gimple_label_label (as_a <glabel *> (gsi_stmt (gsi2)))
2275 == goto_dest)
2276 break;
2277 }
2278
2279 /* Not found? Stop. */
2280 if (gsi_end_p (gsi2))
2281 break;
2282
2283 /* Look one past it. */
2284 gsi_next (&gsi2);
2285 }
2286
2287 /* We're looking for a case label or default label here. */
2288 while (!gsi_end_p (gsi2))
2289 {
2290 stmt = gsi_stmt (gsi2);
2291 if (gimple_code (stmt) == GIMPLE_LABEL)
2292 {
2293 tree label = gimple_label_label (as_a <glabel *> (stmt));
2294 if (gimple_has_location (stmt) && DECL_ARTIFICIAL (label))
2295 {
2296 found = true;
2297 break;
2298 }
2299 }
2300 else if (gimple_call_internal_p (stmt, IFN_ASAN_MARK))
2301 ;
2302 else if (!is_gimple_debug (stmt))
2303 /* Anything else is not expected. */
2304 break;
2305 gsi_next (&gsi2);
2306 }
2307 if (!found)
2308 warning_at (loc, 0, "attribute %<fallthrough%> not preceding "
2309 "a case label or default label");
2310 }
2311 break;
2312 default:
2313 break;
2314 }
2315 return NULL_TREE;
2316 }
2317
2318 /* Expand all FALLTHROUGH () calls in SEQ. */
2319
2320 static void
expand_FALLTHROUGH(gimple_seq * seq_p)2321 expand_FALLTHROUGH (gimple_seq *seq_p)
2322 {
2323 struct walk_stmt_info wi;
2324 memset (&wi, 0, sizeof (wi));
2325 walk_gimple_seq_mod (seq_p, expand_FALLTHROUGH_r, NULL, &wi);
2326 }
2327
2328
2329 /* Gimplify a SWITCH_EXPR, and collect the vector of labels it can
2330 branch to. */
2331
2332 static enum gimplify_status
gimplify_switch_expr(tree * expr_p,gimple_seq * pre_p)2333 gimplify_switch_expr (tree *expr_p, gimple_seq *pre_p)
2334 {
2335 tree switch_expr = *expr_p;
2336 gimple_seq switch_body_seq = NULL;
2337 enum gimplify_status ret;
2338 tree index_type = TREE_TYPE (switch_expr);
2339 if (index_type == NULL_TREE)
2340 index_type = TREE_TYPE (SWITCH_COND (switch_expr));
2341
2342 ret = gimplify_expr (&SWITCH_COND (switch_expr), pre_p, NULL, is_gimple_val,
2343 fb_rvalue);
2344 if (ret == GS_ERROR || ret == GS_UNHANDLED)
2345 return ret;
2346
2347 if (SWITCH_BODY (switch_expr))
2348 {
2349 vec<tree> labels;
2350 vec<tree> saved_labels;
2351 hash_set<tree> *saved_live_switch_vars = NULL;
2352 tree default_case = NULL_TREE;
2353 gswitch *switch_stmt;
2354
2355 /* Save old labels, get new ones from body, then restore the old
2356 labels. Save all the things from the switch body to append after. */
2357 saved_labels = gimplify_ctxp->case_labels;
2358 gimplify_ctxp->case_labels.create (8);
2359
2360 /* Do not create live_switch_vars if SWITCH_BODY is not a BIND_EXPR. */
2361 saved_live_switch_vars = gimplify_ctxp->live_switch_vars;
2362 tree_code body_type = TREE_CODE (SWITCH_BODY (switch_expr));
2363 if (body_type == BIND_EXPR || body_type == STATEMENT_LIST)
2364 gimplify_ctxp->live_switch_vars = new hash_set<tree> (4);
2365 else
2366 gimplify_ctxp->live_switch_vars = NULL;
2367
2368 bool old_in_switch_expr = gimplify_ctxp->in_switch_expr;
2369 gimplify_ctxp->in_switch_expr = true;
2370
2371 gimplify_stmt (&SWITCH_BODY (switch_expr), &switch_body_seq);
2372
2373 gimplify_ctxp->in_switch_expr = old_in_switch_expr;
2374 maybe_warn_switch_unreachable (switch_body_seq);
2375 maybe_warn_implicit_fallthrough (switch_body_seq);
2376 /* Only do this for the outermost GIMPLE_SWITCH. */
2377 if (!gimplify_ctxp->in_switch_expr)
2378 expand_FALLTHROUGH (&switch_body_seq);
2379
2380 labels = gimplify_ctxp->case_labels;
2381 gimplify_ctxp->case_labels = saved_labels;
2382
2383 if (gimplify_ctxp->live_switch_vars)
2384 {
2385 gcc_assert (gimplify_ctxp->live_switch_vars->elements () == 0);
2386 delete gimplify_ctxp->live_switch_vars;
2387 }
2388 gimplify_ctxp->live_switch_vars = saved_live_switch_vars;
2389
2390 preprocess_case_label_vec_for_gimple (labels, index_type,
2391 &default_case);
2392
2393 bool add_bind = false;
2394 if (!default_case)
2395 {
2396 glabel *new_default;
2397
2398 default_case
2399 = build_case_label (NULL_TREE, NULL_TREE,
2400 create_artificial_label (UNKNOWN_LOCATION));
2401 if (old_in_switch_expr)
2402 {
2403 SWITCH_BREAK_LABEL_P (CASE_LABEL (default_case)) = 1;
2404 add_bind = true;
2405 }
2406 new_default = gimple_build_label (CASE_LABEL (default_case));
2407 gimplify_seq_add_stmt (&switch_body_seq, new_default);
2408 }
2409 else if (old_in_switch_expr)
2410 {
2411 gimple *last = gimple_seq_last_stmt (switch_body_seq);
2412 if (last && gimple_code (last) == GIMPLE_LABEL)
2413 {
2414 tree label = gimple_label_label (as_a <glabel *> (last));
2415 if (SWITCH_BREAK_LABEL_P (label))
2416 add_bind = true;
2417 }
2418 }
2419
2420 switch_stmt = gimple_build_switch (SWITCH_COND (switch_expr),
2421 default_case, labels);
2422 /* For the benefit of -Wimplicit-fallthrough, if switch_body_seq
2423 ends with a GIMPLE_LABEL holding SWITCH_BREAK_LABEL_P LABEL_DECL,
2424 wrap the GIMPLE_SWITCH up to that GIMPLE_LABEL into a GIMPLE_BIND,
2425 so that we can easily find the start and end of the switch
2426 statement. */
2427 if (add_bind)
2428 {
2429 gimple_seq bind_body = NULL;
2430 gimplify_seq_add_stmt (&bind_body, switch_stmt);
2431 gimple_seq_add_seq (&bind_body, switch_body_seq);
2432 gbind *bind = gimple_build_bind (NULL_TREE, bind_body, NULL_TREE);
2433 gimple_set_location (bind, EXPR_LOCATION (switch_expr));
2434 gimplify_seq_add_stmt (pre_p, bind);
2435 }
2436 else
2437 {
2438 gimplify_seq_add_stmt (pre_p, switch_stmt);
2439 gimplify_seq_add_seq (pre_p, switch_body_seq);
2440 }
2441 labels.release ();
2442 }
2443 else
2444 gcc_unreachable ();
2445
2446 return GS_ALL_DONE;
2447 }
2448
2449 /* Gimplify the LABEL_EXPR pointed to by EXPR_P. */
2450
2451 static enum gimplify_status
gimplify_label_expr(tree * expr_p,gimple_seq * pre_p)2452 gimplify_label_expr (tree *expr_p, gimple_seq *pre_p)
2453 {
2454 gcc_assert (decl_function_context (LABEL_EXPR_LABEL (*expr_p))
2455 == current_function_decl);
2456
2457 tree label = LABEL_EXPR_LABEL (*expr_p);
2458 glabel *label_stmt = gimple_build_label (label);
2459 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2460 gimplify_seq_add_stmt (pre_p, label_stmt);
2461
2462 if (lookup_attribute ("cold", DECL_ATTRIBUTES (label)))
2463 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_COLD_LABEL,
2464 NOT_TAKEN));
2465 else if (lookup_attribute ("hot", DECL_ATTRIBUTES (label)))
2466 gimple_seq_add_stmt (pre_p, gimple_build_predict (PRED_HOT_LABEL,
2467 TAKEN));
2468
2469 return GS_ALL_DONE;
2470 }
2471
2472 /* Gimplify the CASE_LABEL_EXPR pointed to by EXPR_P. */
2473
2474 static enum gimplify_status
gimplify_case_label_expr(tree * expr_p,gimple_seq * pre_p)2475 gimplify_case_label_expr (tree *expr_p, gimple_seq *pre_p)
2476 {
2477 struct gimplify_ctx *ctxp;
2478 glabel *label_stmt;
2479
2480 /* Invalid programs can play Duff's Device type games with, for example,
2481 #pragma omp parallel. At least in the C front end, we don't
2482 detect such invalid branches until after gimplification, in the
2483 diagnose_omp_blocks pass. */
2484 for (ctxp = gimplify_ctxp; ; ctxp = ctxp->prev_context)
2485 if (ctxp->case_labels.exists ())
2486 break;
2487
2488 label_stmt = gimple_build_label (CASE_LABEL (*expr_p));
2489 gimple_set_location (label_stmt, EXPR_LOCATION (*expr_p));
2490 ctxp->case_labels.safe_push (*expr_p);
2491 gimplify_seq_add_stmt (pre_p, label_stmt);
2492
2493 return GS_ALL_DONE;
2494 }
2495
2496 /* Build a GOTO to the LABEL_DECL pointed to by LABEL_P, building it first
2497 if necessary. */
2498
2499 tree
build_and_jump(tree * label_p)2500 build_and_jump (tree *label_p)
2501 {
2502 if (label_p == NULL)
2503 /* If there's nowhere to jump, just fall through. */
2504 return NULL_TREE;
2505
2506 if (*label_p == NULL_TREE)
2507 {
2508 tree label = create_artificial_label (UNKNOWN_LOCATION);
2509 *label_p = label;
2510 }
2511
2512 return build1 (GOTO_EXPR, void_type_node, *label_p);
2513 }
2514
2515 /* Gimplify an EXIT_EXPR by converting to a GOTO_EXPR inside a COND_EXPR.
2516 This also involves building a label to jump to and communicating it to
2517 gimplify_loop_expr through gimplify_ctxp->exit_label. */
2518
2519 static enum gimplify_status
gimplify_exit_expr(tree * expr_p)2520 gimplify_exit_expr (tree *expr_p)
2521 {
2522 tree cond = TREE_OPERAND (*expr_p, 0);
2523 tree expr;
2524
2525 expr = build_and_jump (&gimplify_ctxp->exit_label);
2526 expr = build3 (COND_EXPR, void_type_node, cond, expr, NULL_TREE);
2527 *expr_p = expr;
2528
2529 return GS_OK;
2530 }
2531
2532 /* *EXPR_P is a COMPONENT_REF being used as an rvalue. If its type is
2533 different from its canonical type, wrap the whole thing inside a
2534 NOP_EXPR and force the type of the COMPONENT_REF to be the canonical
2535 type.
2536
2537 The canonical type of a COMPONENT_REF is the type of the field being
2538 referenced--unless the field is a bit-field which can be read directly
2539 in a smaller mode, in which case the canonical type is the
2540 sign-appropriate type corresponding to that mode. */
2541
2542 static void
canonicalize_component_ref(tree * expr_p)2543 canonicalize_component_ref (tree *expr_p)
2544 {
2545 tree expr = *expr_p;
2546 tree type;
2547
2548 gcc_assert (TREE_CODE (expr) == COMPONENT_REF);
2549
2550 if (INTEGRAL_TYPE_P (TREE_TYPE (expr)))
2551 type = TREE_TYPE (get_unwidened (expr, NULL_TREE));
2552 else
2553 type = TREE_TYPE (TREE_OPERAND (expr, 1));
2554
2555 /* One could argue that all the stuff below is not necessary for
2556 the non-bitfield case and declare it a FE error if type
2557 adjustment would be needed. */
2558 if (TREE_TYPE (expr) != type)
2559 {
2560 #ifdef ENABLE_TYPES_CHECKING
2561 tree old_type = TREE_TYPE (expr);
2562 #endif
2563 int type_quals;
2564
2565 /* We need to preserve qualifiers and propagate them from
2566 operand 0. */
2567 type_quals = TYPE_QUALS (type)
2568 | TYPE_QUALS (TREE_TYPE (TREE_OPERAND (expr, 0)));
2569 if (TYPE_QUALS (type) != type_quals)
2570 type = build_qualified_type (TYPE_MAIN_VARIANT (type), type_quals);
2571
2572 /* Set the type of the COMPONENT_REF to the underlying type. */
2573 TREE_TYPE (expr) = type;
2574
2575 #ifdef ENABLE_TYPES_CHECKING
2576 /* It is now a FE error, if the conversion from the canonical
2577 type to the original expression type is not useless. */
2578 gcc_assert (useless_type_conversion_p (old_type, type));
2579 #endif
2580 }
2581 }
2582
2583 /* If a NOP conversion is changing a pointer to array of foo to a pointer
2584 to foo, embed that change in the ADDR_EXPR by converting
2585 T array[U];
2586 (T *)&array
2587 ==>
2588 &array[L]
2589 where L is the lower bound. For simplicity, only do this for constant
2590 lower bound.
2591 The constraint is that the type of &array[L] is trivially convertible
2592 to T *. */
2593
2594 static void
canonicalize_addr_expr(tree * expr_p)2595 canonicalize_addr_expr (tree *expr_p)
2596 {
2597 tree expr = *expr_p;
2598 tree addr_expr = TREE_OPERAND (expr, 0);
2599 tree datype, ddatype, pddatype;
2600
2601 /* We simplify only conversions from an ADDR_EXPR to a pointer type. */
2602 if (!POINTER_TYPE_P (TREE_TYPE (expr))
2603 || TREE_CODE (addr_expr) != ADDR_EXPR)
2604 return;
2605
2606 /* The addr_expr type should be a pointer to an array. */
2607 datype = TREE_TYPE (TREE_TYPE (addr_expr));
2608 if (TREE_CODE (datype) != ARRAY_TYPE)
2609 return;
2610
2611 /* The pointer to element type shall be trivially convertible to
2612 the expression pointer type. */
2613 ddatype = TREE_TYPE (datype);
2614 pddatype = build_pointer_type (ddatype);
2615 if (!useless_type_conversion_p (TYPE_MAIN_VARIANT (TREE_TYPE (expr)),
2616 pddatype))
2617 return;
2618
2619 /* The lower bound and element sizes must be constant. */
2620 if (!TYPE_SIZE_UNIT (ddatype)
2621 || TREE_CODE (TYPE_SIZE_UNIT (ddatype)) != INTEGER_CST
2622 || !TYPE_DOMAIN (datype) || !TYPE_MIN_VALUE (TYPE_DOMAIN (datype))
2623 || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (datype))) != INTEGER_CST)
2624 return;
2625
2626 /* All checks succeeded. Build a new node to merge the cast. */
2627 *expr_p = build4 (ARRAY_REF, ddatype, TREE_OPERAND (addr_expr, 0),
2628 TYPE_MIN_VALUE (TYPE_DOMAIN (datype)),
2629 NULL_TREE, NULL_TREE);
2630 *expr_p = build1 (ADDR_EXPR, pddatype, *expr_p);
2631
2632 /* We can have stripped a required restrict qualifier above. */
2633 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
2634 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
2635 }
2636
2637 /* *EXPR_P is a NOP_EXPR or CONVERT_EXPR. Remove it and/or other conversions
2638 underneath as appropriate. */
2639
2640 static enum gimplify_status
gimplify_conversion(tree * expr_p)2641 gimplify_conversion (tree *expr_p)
2642 {
2643 location_t loc = EXPR_LOCATION (*expr_p);
2644 gcc_assert (CONVERT_EXPR_P (*expr_p));
2645
2646 /* Then strip away all but the outermost conversion. */
2647 STRIP_SIGN_NOPS (TREE_OPERAND (*expr_p, 0));
2648
2649 /* And remove the outermost conversion if it's useless. */
2650 if (tree_ssa_useless_type_conversion (*expr_p))
2651 *expr_p = TREE_OPERAND (*expr_p, 0);
2652
2653 /* If we still have a conversion at the toplevel,
2654 then canonicalize some constructs. */
2655 if (CONVERT_EXPR_P (*expr_p))
2656 {
2657 tree sub = TREE_OPERAND (*expr_p, 0);
2658
2659 /* If a NOP conversion is changing the type of a COMPONENT_REF
2660 expression, then canonicalize its type now in order to expose more
2661 redundant conversions. */
2662 if (TREE_CODE (sub) == COMPONENT_REF)
2663 canonicalize_component_ref (&TREE_OPERAND (*expr_p, 0));
2664
2665 /* If a NOP conversion is changing a pointer to array of foo
2666 to a pointer to foo, embed that change in the ADDR_EXPR. */
2667 else if (TREE_CODE (sub) == ADDR_EXPR)
2668 canonicalize_addr_expr (expr_p);
2669 }
2670
2671 /* If we have a conversion to a non-register type force the
2672 use of a VIEW_CONVERT_EXPR instead. */
2673 if (CONVERT_EXPR_P (*expr_p) && !is_gimple_reg_type (TREE_TYPE (*expr_p)))
2674 *expr_p = fold_build1_loc (loc, VIEW_CONVERT_EXPR, TREE_TYPE (*expr_p),
2675 TREE_OPERAND (*expr_p, 0));
2676
2677 /* Canonicalize CONVERT_EXPR to NOP_EXPR. */
2678 if (TREE_CODE (*expr_p) == CONVERT_EXPR)
2679 TREE_SET_CODE (*expr_p, NOP_EXPR);
2680
2681 return GS_OK;
2682 }
2683
2684 /* Nonlocal VLAs seen in the current function. */
2685 static hash_set<tree> *nonlocal_vlas;
2686
2687 /* The VAR_DECLs created for nonlocal VLAs for debug info purposes. */
2688 static tree nonlocal_vla_vars;
2689
2690 /* Gimplify a VAR_DECL or PARM_DECL. Return GS_OK if we expanded a
2691 DECL_VALUE_EXPR, and it's worth re-examining things. */
2692
2693 static enum gimplify_status
gimplify_var_or_parm_decl(tree * expr_p)2694 gimplify_var_or_parm_decl (tree *expr_p)
2695 {
2696 tree decl = *expr_p;
2697
2698 /* ??? If this is a local variable, and it has not been seen in any
2699 outer BIND_EXPR, then it's probably the result of a duplicate
2700 declaration, for which we've already issued an error. It would
2701 be really nice if the front end wouldn't leak these at all.
2702 Currently the only known culprit is C++ destructors, as seen
2703 in g++.old-deja/g++.jason/binding.C. */
2704 if (VAR_P (decl)
2705 && !DECL_SEEN_IN_BIND_EXPR_P (decl)
2706 && !TREE_STATIC (decl) && !DECL_EXTERNAL (decl)
2707 && decl_function_context (decl) == current_function_decl)
2708 {
2709 gcc_assert (seen_error ());
2710 return GS_ERROR;
2711 }
2712
2713 /* When within an OMP context, notice uses of variables. */
2714 if (gimplify_omp_ctxp && omp_notice_variable (gimplify_omp_ctxp, decl, true))
2715 return GS_ALL_DONE;
2716
2717 /* If the decl is an alias for another expression, substitute it now. */
2718 if (DECL_HAS_VALUE_EXPR_P (decl))
2719 {
2720 tree value_expr = DECL_VALUE_EXPR (decl);
2721
2722 /* For referenced nonlocal VLAs add a decl for debugging purposes
2723 to the current function. */
2724 if (VAR_P (decl)
2725 && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST
2726 && nonlocal_vlas != NULL
2727 && TREE_CODE (value_expr) == INDIRECT_REF
2728 && TREE_CODE (TREE_OPERAND (value_expr, 0)) == VAR_DECL
2729 && decl_function_context (decl) != current_function_decl)
2730 {
2731 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
2732 while (ctx
2733 && (ctx->region_type == ORT_WORKSHARE
2734 || ctx->region_type == ORT_SIMD
2735 || ctx->region_type == ORT_ACC))
2736 ctx = ctx->outer_context;
2737 if (!ctx && !nonlocal_vlas->add (decl))
2738 {
2739 tree copy = copy_node (decl);
2740
2741 lang_hooks.dup_lang_specific_decl (copy);
2742 SET_DECL_RTL (copy, 0);
2743 TREE_USED (copy) = 1;
2744 DECL_CHAIN (copy) = nonlocal_vla_vars;
2745 nonlocal_vla_vars = copy;
2746 SET_DECL_VALUE_EXPR (copy, unshare_expr (value_expr));
2747 DECL_HAS_VALUE_EXPR_P (copy) = 1;
2748 }
2749 }
2750
2751 *expr_p = unshare_expr (value_expr);
2752 return GS_OK;
2753 }
2754
2755 return GS_ALL_DONE;
2756 }
2757
2758 /* Recalculate the value of the TREE_SIDE_EFFECTS flag for T. */
2759
2760 static void
recalculate_side_effects(tree t)2761 recalculate_side_effects (tree t)
2762 {
2763 enum tree_code code = TREE_CODE (t);
2764 int len = TREE_OPERAND_LENGTH (t);
2765 int i;
2766
2767 switch (TREE_CODE_CLASS (code))
2768 {
2769 case tcc_expression:
2770 switch (code)
2771 {
2772 case INIT_EXPR:
2773 case MODIFY_EXPR:
2774 case VA_ARG_EXPR:
2775 case PREDECREMENT_EXPR:
2776 case PREINCREMENT_EXPR:
2777 case POSTDECREMENT_EXPR:
2778 case POSTINCREMENT_EXPR:
2779 /* All of these have side-effects, no matter what their
2780 operands are. */
2781 return;
2782
2783 default:
2784 break;
2785 }
2786 /* Fall through. */
2787
2788 case tcc_comparison: /* a comparison expression */
2789 case tcc_unary: /* a unary arithmetic expression */
2790 case tcc_binary: /* a binary arithmetic expression */
2791 case tcc_reference: /* a reference */
2792 case tcc_vl_exp: /* a function call */
2793 TREE_SIDE_EFFECTS (t) = TREE_THIS_VOLATILE (t);
2794 for (i = 0; i < len; ++i)
2795 {
2796 tree op = TREE_OPERAND (t, i);
2797 if (op && TREE_SIDE_EFFECTS (op))
2798 TREE_SIDE_EFFECTS (t) = 1;
2799 }
2800 break;
2801
2802 case tcc_constant:
2803 /* No side-effects. */
2804 return;
2805
2806 default:
2807 gcc_unreachable ();
2808 }
2809 }
2810
2811 /* Gimplify the COMPONENT_REF, ARRAY_REF, REALPART_EXPR or IMAGPART_EXPR
2812 node *EXPR_P.
2813
2814 compound_lval
2815 : min_lval '[' val ']'
2816 | min_lval '.' ID
2817 | compound_lval '[' val ']'
2818 | compound_lval '.' ID
2819
2820 This is not part of the original SIMPLE definition, which separates
2821 array and member references, but it seems reasonable to handle them
2822 together. Also, this way we don't run into problems with union
2823 aliasing; gcc requires that for accesses through a union to alias, the
2824 union reference must be explicit, which was not always the case when we
2825 were splitting up array and member refs.
2826
2827 PRE_P points to the sequence where side effects that must happen before
2828 *EXPR_P should be stored.
2829
2830 POST_P points to the sequence where side effects that must happen after
2831 *EXPR_P should be stored. */
2832
2833 static enum gimplify_status
gimplify_compound_lval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,fallback_t fallback)2834 gimplify_compound_lval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
2835 fallback_t fallback)
2836 {
2837 tree *p;
2838 enum gimplify_status ret = GS_ALL_DONE, tret;
2839 int i;
2840 location_t loc = EXPR_LOCATION (*expr_p);
2841 tree expr = *expr_p;
2842
2843 /* Create a stack of the subexpressions so later we can walk them in
2844 order from inner to outer. */
2845 auto_vec<tree, 10> expr_stack;
2846
2847 /* We can handle anything that get_inner_reference can deal with. */
2848 for (p = expr_p; ; p = &TREE_OPERAND (*p, 0))
2849 {
2850 restart:
2851 /* Fold INDIRECT_REFs now to turn them into ARRAY_REFs. */
2852 if (TREE_CODE (*p) == INDIRECT_REF)
2853 *p = fold_indirect_ref_loc (loc, *p);
2854
2855 if (handled_component_p (*p))
2856 ;
2857 /* Expand DECL_VALUE_EXPR now. In some cases that may expose
2858 additional COMPONENT_REFs. */
2859 else if ((VAR_P (*p) || TREE_CODE (*p) == PARM_DECL)
2860 && gimplify_var_or_parm_decl (p) == GS_OK)
2861 goto restart;
2862 else
2863 break;
2864
2865 expr_stack.safe_push (*p);
2866 }
2867
2868 gcc_assert (expr_stack.length ());
2869
2870 /* Now EXPR_STACK is a stack of pointers to all the refs we've
2871 walked through and P points to the innermost expression.
2872
2873 Java requires that we elaborated nodes in source order. That
2874 means we must gimplify the inner expression followed by each of
2875 the indices, in order. But we can't gimplify the inner
2876 expression until we deal with any variable bounds, sizes, or
2877 positions in order to deal with PLACEHOLDER_EXPRs.
2878
2879 So we do this in three steps. First we deal with the annotations
2880 for any variables in the components, then we gimplify the base,
2881 then we gimplify any indices, from left to right. */
2882 for (i = expr_stack.length () - 1; i >= 0; i--)
2883 {
2884 tree t = expr_stack[i];
2885
2886 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2887 {
2888 /* Gimplify the low bound and element type size and put them into
2889 the ARRAY_REF. If these values are set, they have already been
2890 gimplified. */
2891 if (TREE_OPERAND (t, 2) == NULL_TREE)
2892 {
2893 tree low = unshare_expr (array_ref_low_bound (t));
2894 if (!is_gimple_min_invariant (low))
2895 {
2896 TREE_OPERAND (t, 2) = low;
2897 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2898 post_p, is_gimple_reg,
2899 fb_rvalue);
2900 ret = MIN (ret, tret);
2901 }
2902 }
2903 else
2904 {
2905 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2906 is_gimple_reg, fb_rvalue);
2907 ret = MIN (ret, tret);
2908 }
2909
2910 if (TREE_OPERAND (t, 3) == NULL_TREE)
2911 {
2912 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 0)));
2913 tree elmt_size = unshare_expr (array_ref_element_size (t));
2914 tree factor = size_int (TYPE_ALIGN_UNIT (elmt_type));
2915
2916 /* Divide the element size by the alignment of the element
2917 type (above). */
2918 elmt_size
2919 = size_binop_loc (loc, EXACT_DIV_EXPR, elmt_size, factor);
2920
2921 if (!is_gimple_min_invariant (elmt_size))
2922 {
2923 TREE_OPERAND (t, 3) = elmt_size;
2924 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p,
2925 post_p, is_gimple_reg,
2926 fb_rvalue);
2927 ret = MIN (ret, tret);
2928 }
2929 }
2930 else
2931 {
2932 tret = gimplify_expr (&TREE_OPERAND (t, 3), pre_p, post_p,
2933 is_gimple_reg, fb_rvalue);
2934 ret = MIN (ret, tret);
2935 }
2936 }
2937 else if (TREE_CODE (t) == COMPONENT_REF)
2938 {
2939 /* Set the field offset into T and gimplify it. */
2940 if (TREE_OPERAND (t, 2) == NULL_TREE)
2941 {
2942 tree offset = unshare_expr (component_ref_field_offset (t));
2943 tree field = TREE_OPERAND (t, 1);
2944 tree factor
2945 = size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT);
2946
2947 /* Divide the offset by its alignment. */
2948 offset = size_binop_loc (loc, EXACT_DIV_EXPR, offset, factor);
2949
2950 if (!is_gimple_min_invariant (offset))
2951 {
2952 TREE_OPERAND (t, 2) = offset;
2953 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p,
2954 post_p, is_gimple_reg,
2955 fb_rvalue);
2956 ret = MIN (ret, tret);
2957 }
2958 }
2959 else
2960 {
2961 tret = gimplify_expr (&TREE_OPERAND (t, 2), pre_p, post_p,
2962 is_gimple_reg, fb_rvalue);
2963 ret = MIN (ret, tret);
2964 }
2965 }
2966 }
2967
2968 /* Step 2 is to gimplify the base expression. Make sure lvalue is set
2969 so as to match the min_lval predicate. Failure to do so may result
2970 in the creation of large aggregate temporaries. */
2971 tret = gimplify_expr (p, pre_p, post_p, is_gimple_min_lval,
2972 fallback | fb_lvalue);
2973 ret = MIN (ret, tret);
2974
2975 /* And finally, the indices and operands of ARRAY_REF. During this
2976 loop we also remove any useless conversions. */
2977 for (; expr_stack.length () > 0; )
2978 {
2979 tree t = expr_stack.pop ();
2980
2981 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2982 {
2983 /* Gimplify the dimension. */
2984 if (!is_gimple_min_invariant (TREE_OPERAND (t, 1)))
2985 {
2986 tret = gimplify_expr (&TREE_OPERAND (t, 1), pre_p, post_p,
2987 is_gimple_val, fb_rvalue);
2988 ret = MIN (ret, tret);
2989 }
2990 }
2991
2992 STRIP_USELESS_TYPE_CONVERSION (TREE_OPERAND (t, 0));
2993
2994 /* The innermost expression P may have originally had
2995 TREE_SIDE_EFFECTS set which would have caused all the outer
2996 expressions in *EXPR_P leading to P to also have had
2997 TREE_SIDE_EFFECTS set. */
2998 recalculate_side_effects (t);
2999 }
3000
3001 /* If the outermost expression is a COMPONENT_REF, canonicalize its type. */
3002 if ((fallback & fb_rvalue) && TREE_CODE (*expr_p) == COMPONENT_REF)
3003 {
3004 canonicalize_component_ref (expr_p);
3005 }
3006
3007 expr_stack.release ();
3008
3009 gcc_assert (*expr_p == expr || ret != GS_ALL_DONE);
3010
3011 return ret;
3012 }
3013
3014 /* Gimplify the self modifying expression pointed to by EXPR_P
3015 (++, --, +=, -=).
3016
3017 PRE_P points to the list where side effects that must happen before
3018 *EXPR_P should be stored.
3019
3020 POST_P points to the list where side effects that must happen after
3021 *EXPR_P should be stored.
3022
3023 WANT_VALUE is nonzero iff we want to use the value of this expression
3024 in another expression.
3025
3026 ARITH_TYPE is the type the computation should be performed in. */
3027
3028 enum gimplify_status
gimplify_self_mod_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,tree arith_type)3029 gimplify_self_mod_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
3030 bool want_value, tree arith_type)
3031 {
3032 enum tree_code code;
3033 tree lhs, lvalue, rhs, t1;
3034 gimple_seq post = NULL, *orig_post_p = post_p;
3035 bool postfix;
3036 enum tree_code arith_code;
3037 enum gimplify_status ret;
3038 location_t loc = EXPR_LOCATION (*expr_p);
3039
3040 code = TREE_CODE (*expr_p);
3041
3042 gcc_assert (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR
3043 || code == PREINCREMENT_EXPR || code == PREDECREMENT_EXPR);
3044
3045 /* Prefix or postfix? */
3046 if (code == POSTINCREMENT_EXPR || code == POSTDECREMENT_EXPR)
3047 /* Faster to treat as prefix if result is not used. */
3048 postfix = want_value;
3049 else
3050 postfix = false;
3051
3052 /* For postfix, make sure the inner expression's post side effects
3053 are executed after side effects from this expression. */
3054 if (postfix)
3055 post_p = &post;
3056
3057 /* Add or subtract? */
3058 if (code == PREINCREMENT_EXPR || code == POSTINCREMENT_EXPR)
3059 arith_code = PLUS_EXPR;
3060 else
3061 arith_code = MINUS_EXPR;
3062
3063 /* Gimplify the LHS into a GIMPLE lvalue. */
3064 lvalue = TREE_OPERAND (*expr_p, 0);
3065 ret = gimplify_expr (&lvalue, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
3066 if (ret == GS_ERROR)
3067 return ret;
3068
3069 /* Extract the operands to the arithmetic operation. */
3070 lhs = lvalue;
3071 rhs = TREE_OPERAND (*expr_p, 1);
3072
3073 /* For postfix operator, we evaluate the LHS to an rvalue and then use
3074 that as the result value and in the postqueue operation. */
3075 if (postfix)
3076 {
3077 ret = gimplify_expr (&lhs, pre_p, post_p, is_gimple_val, fb_rvalue);
3078 if (ret == GS_ERROR)
3079 return ret;
3080
3081 lhs = get_initialized_tmp_var (lhs, pre_p, NULL);
3082 }
3083
3084 /* For POINTERs increment, use POINTER_PLUS_EXPR. */
3085 if (POINTER_TYPE_P (TREE_TYPE (lhs)))
3086 {
3087 rhs = convert_to_ptrofftype_loc (loc, rhs);
3088 if (arith_code == MINUS_EXPR)
3089 rhs = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (rhs), rhs);
3090 t1 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (*expr_p), lhs, rhs);
3091 }
3092 else
3093 t1 = fold_convert (TREE_TYPE (*expr_p),
3094 fold_build2 (arith_code, arith_type,
3095 fold_convert (arith_type, lhs),
3096 fold_convert (arith_type, rhs)));
3097
3098 if (postfix)
3099 {
3100 gimplify_assign (lvalue, t1, pre_p);
3101 gimplify_seq_add_seq (orig_post_p, post);
3102 *expr_p = lhs;
3103 return GS_ALL_DONE;
3104 }
3105 else
3106 {
3107 *expr_p = build2 (MODIFY_EXPR, TREE_TYPE (lvalue), lvalue, t1);
3108 return GS_OK;
3109 }
3110 }
3111
3112 /* If *EXPR_P has a variable sized type, wrap it in a WITH_SIZE_EXPR. */
3113
3114 static void
maybe_with_size_expr(tree * expr_p)3115 maybe_with_size_expr (tree *expr_p)
3116 {
3117 tree expr = *expr_p;
3118 tree type = TREE_TYPE (expr);
3119 tree size;
3120
3121 /* If we've already wrapped this or the type is error_mark_node, we can't do
3122 anything. */
3123 if (TREE_CODE (expr) == WITH_SIZE_EXPR
3124 || type == error_mark_node)
3125 return;
3126
3127 /* If the size isn't known or is a constant, we have nothing to do. */
3128 size = TYPE_SIZE_UNIT (type);
3129 if (!size || poly_int_tree_p (size))
3130 return;
3131
3132 /* Otherwise, make a WITH_SIZE_EXPR. */
3133 size = unshare_expr (size);
3134 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, expr);
3135 *expr_p = build2 (WITH_SIZE_EXPR, type, expr, size);
3136 }
3137
3138 /* Helper for gimplify_call_expr. Gimplify a single argument *ARG_P
3139 Store any side-effects in PRE_P. CALL_LOCATION is the location of
3140 the CALL_EXPR. If ALLOW_SSA is set the actual parameter may be
3141 gimplified to an SSA name. */
3142
3143 enum gimplify_status
gimplify_arg(tree * arg_p,gimple_seq * pre_p,location_t call_location,bool allow_ssa)3144 gimplify_arg (tree *arg_p, gimple_seq *pre_p, location_t call_location,
3145 bool allow_ssa)
3146 {
3147 bool (*test) (tree);
3148 fallback_t fb;
3149
3150 /* In general, we allow lvalues for function arguments to avoid
3151 extra overhead of copying large aggregates out of even larger
3152 aggregates into temporaries only to copy the temporaries to
3153 the argument list. Make optimizers happy by pulling out to
3154 temporaries those types that fit in registers. */
3155 if (is_gimple_reg_type (TREE_TYPE (*arg_p)))
3156 test = is_gimple_val, fb = fb_rvalue;
3157 else
3158 {
3159 test = is_gimple_lvalue, fb = fb_either;
3160 /* Also strip a TARGET_EXPR that would force an extra copy. */
3161 if (TREE_CODE (*arg_p) == TARGET_EXPR)
3162 {
3163 tree init = TARGET_EXPR_INITIAL (*arg_p);
3164 if (init
3165 && !VOID_TYPE_P (TREE_TYPE (init)))
3166 *arg_p = init;
3167 }
3168 }
3169
3170 /* If this is a variable sized type, we must remember the size. */
3171 maybe_with_size_expr (arg_p);
3172
3173 /* FIXME diagnostics: This will mess up gcc.dg/Warray-bounds.c. */
3174 /* Make sure arguments have the same location as the function call
3175 itself. */
3176 protected_set_expr_location (*arg_p, call_location);
3177
3178 /* There is a sequence point before a function call. Side effects in
3179 the argument list must occur before the actual call. So, when
3180 gimplifying arguments, force gimplify_expr to use an internal
3181 post queue which is then appended to the end of PRE_P. */
3182 return gimplify_expr (arg_p, pre_p, NULL, test, fb, allow_ssa);
3183 }
3184
3185 /* Don't fold inside offloading or taskreg regions: it can break code by
3186 adding decl references that weren't in the source. We'll do it during
3187 omplower pass instead. */
3188
3189 static bool
maybe_fold_stmt(gimple_stmt_iterator * gsi)3190 maybe_fold_stmt (gimple_stmt_iterator *gsi)
3191 {
3192 struct gimplify_omp_ctx *ctx;
3193 for (ctx = gimplify_omp_ctxp; ctx; ctx = ctx->outer_context)
3194 if ((ctx->region_type & (ORT_TARGET | ORT_PARALLEL | ORT_TASK)) != 0)
3195 return false;
3196 return fold_stmt (gsi);
3197 }
3198
3199 /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
3200 WANT_VALUE is true if the result of the call is desired. */
3201
3202 static enum gimplify_status
gimplify_call_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)3203 gimplify_call_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
3204 {
3205 tree fndecl, parms, p, fnptrtype;
3206 enum gimplify_status ret;
3207 int i, nargs;
3208 gcall *call;
3209 bool builtin_va_start_p = false;
3210 location_t loc = EXPR_LOCATION (*expr_p);
3211
3212 gcc_assert (TREE_CODE (*expr_p) == CALL_EXPR);
3213
3214 /* For reliable diagnostics during inlining, it is necessary that
3215 every call_expr be annotated with file and line. */
3216 if (! EXPR_HAS_LOCATION (*expr_p))
3217 SET_EXPR_LOCATION (*expr_p, input_location);
3218
3219 /* Gimplify internal functions created in the FEs. */
3220 if (CALL_EXPR_FN (*expr_p) == NULL_TREE)
3221 {
3222 if (want_value)
3223 return GS_ALL_DONE;
3224
3225 nargs = call_expr_nargs (*expr_p);
3226 enum internal_fn ifn = CALL_EXPR_IFN (*expr_p);
3227 auto_vec<tree> vargs (nargs);
3228
3229 for (i = 0; i < nargs; i++)
3230 {
3231 gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3232 EXPR_LOCATION (*expr_p));
3233 vargs.quick_push (CALL_EXPR_ARG (*expr_p, i));
3234 }
3235
3236 gcall *call = gimple_build_call_internal_vec (ifn, vargs);
3237 gimple_call_set_nothrow (call, TREE_NOTHROW (*expr_p));
3238 gimplify_seq_add_stmt (pre_p, call);
3239 return GS_ALL_DONE;
3240 }
3241
3242 /* This may be a call to a builtin function.
3243
3244 Builtin function calls may be transformed into different
3245 (and more efficient) builtin function calls under certain
3246 circumstances. Unfortunately, gimplification can muck things
3247 up enough that the builtin expanders are not aware that certain
3248 transformations are still valid.
3249
3250 So we attempt transformation/gimplification of the call before
3251 we gimplify the CALL_EXPR. At this time we do not manage to
3252 transform all calls in the same manner as the expanders do, but
3253 we do transform most of them. */
3254 fndecl = get_callee_fndecl (*expr_p);
3255 if (fndecl
3256 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3257 switch (DECL_FUNCTION_CODE (fndecl))
3258 {
3259 CASE_BUILT_IN_ALLOCA:
3260 /* If the call has been built for a variable-sized object, then we
3261 want to restore the stack level when the enclosing BIND_EXPR is
3262 exited to reclaim the allocated space; otherwise, we precisely
3263 need to do the opposite and preserve the latest stack level. */
3264 if (CALL_ALLOCA_FOR_VAR_P (*expr_p))
3265 gimplify_ctxp->save_stack = true;
3266 else
3267 gimplify_ctxp->keep_stack = true;
3268 break;
3269
3270 case BUILT_IN_VA_START:
3271 {
3272 builtin_va_start_p = TRUE;
3273 if (call_expr_nargs (*expr_p) < 2)
3274 {
3275 error ("too few arguments to function %<va_start%>");
3276 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3277 return GS_OK;
3278 }
3279
3280 if (fold_builtin_next_arg (*expr_p, true))
3281 {
3282 *expr_p = build_empty_stmt (EXPR_LOCATION (*expr_p));
3283 return GS_OK;
3284 }
3285 break;
3286 }
3287
3288 default:
3289 ;
3290 }
3291 if (fndecl && DECL_BUILT_IN (fndecl))
3292 {
3293 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3294 if (new_tree && new_tree != *expr_p)
3295 {
3296 /* There was a transformation of this call which computes the
3297 same value, but in a more efficient way. Return and try
3298 again. */
3299 *expr_p = new_tree;
3300 return GS_OK;
3301 }
3302 }
3303
3304 /* Remember the original function pointer type. */
3305 fnptrtype = TREE_TYPE (CALL_EXPR_FN (*expr_p));
3306
3307 /* There is a sequence point before the call, so any side effects in
3308 the calling expression must occur before the actual call. Force
3309 gimplify_expr to use an internal post queue. */
3310 ret = gimplify_expr (&CALL_EXPR_FN (*expr_p), pre_p, NULL,
3311 is_gimple_call_addr, fb_rvalue);
3312
3313 nargs = call_expr_nargs (*expr_p);
3314
3315 /* Get argument types for verification. */
3316 fndecl = get_callee_fndecl (*expr_p);
3317 parms = NULL_TREE;
3318 if (fndecl)
3319 parms = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
3320 else
3321 parms = TYPE_ARG_TYPES (TREE_TYPE (fnptrtype));
3322
3323 if (fndecl && DECL_ARGUMENTS (fndecl))
3324 p = DECL_ARGUMENTS (fndecl);
3325 else if (parms)
3326 p = parms;
3327 else
3328 p = NULL_TREE;
3329 for (i = 0; i < nargs && p; i++, p = TREE_CHAIN (p))
3330 ;
3331
3332 /* If the last argument is __builtin_va_arg_pack () and it is not
3333 passed as a named argument, decrease the number of CALL_EXPR
3334 arguments and set instead the CALL_EXPR_VA_ARG_PACK flag. */
3335 if (!p
3336 && i < nargs
3337 && TREE_CODE (CALL_EXPR_ARG (*expr_p, nargs - 1)) == CALL_EXPR)
3338 {
3339 tree last_arg = CALL_EXPR_ARG (*expr_p, nargs - 1);
3340 tree last_arg_fndecl = get_callee_fndecl (last_arg);
3341
3342 if (last_arg_fndecl
3343 && TREE_CODE (last_arg_fndecl) == FUNCTION_DECL
3344 && DECL_BUILT_IN_CLASS (last_arg_fndecl) == BUILT_IN_NORMAL
3345 && DECL_FUNCTION_CODE (last_arg_fndecl) == BUILT_IN_VA_ARG_PACK)
3346 {
3347 tree call = *expr_p;
3348
3349 --nargs;
3350 *expr_p = build_call_array_loc (loc, TREE_TYPE (call),
3351 CALL_EXPR_FN (call),
3352 nargs, CALL_EXPR_ARGP (call));
3353
3354 /* Copy all CALL_EXPR flags, location and block, except
3355 CALL_EXPR_VA_ARG_PACK flag. */
3356 CALL_EXPR_STATIC_CHAIN (*expr_p) = CALL_EXPR_STATIC_CHAIN (call);
3357 CALL_EXPR_TAILCALL (*expr_p) = CALL_EXPR_TAILCALL (call);
3358 CALL_EXPR_RETURN_SLOT_OPT (*expr_p)
3359 = CALL_EXPR_RETURN_SLOT_OPT (call);
3360 CALL_FROM_THUNK_P (*expr_p) = CALL_FROM_THUNK_P (call);
3361 SET_EXPR_LOCATION (*expr_p, EXPR_LOCATION (call));
3362
3363 /* Set CALL_EXPR_VA_ARG_PACK. */
3364 CALL_EXPR_VA_ARG_PACK (*expr_p) = 1;
3365 }
3366 }
3367
3368 /* If the call returns twice then after building the CFG the call
3369 argument computations will no longer dominate the call because
3370 we add an abnormal incoming edge to the call. So do not use SSA
3371 vars there. */
3372 bool returns_twice = call_expr_flags (*expr_p) & ECF_RETURNS_TWICE;
3373
3374 /* Gimplify the function arguments. */
3375 if (nargs > 0)
3376 {
3377 for (i = (PUSH_ARGS_REVERSED ? nargs - 1 : 0);
3378 PUSH_ARGS_REVERSED ? i >= 0 : i < nargs;
3379 PUSH_ARGS_REVERSED ? i-- : i++)
3380 {
3381 enum gimplify_status t;
3382
3383 /* Avoid gimplifying the second argument to va_start, which needs to
3384 be the plain PARM_DECL. */
3385 if ((i != 1) || !builtin_va_start_p)
3386 {
3387 t = gimplify_arg (&CALL_EXPR_ARG (*expr_p, i), pre_p,
3388 EXPR_LOCATION (*expr_p), ! returns_twice);
3389
3390 if (t == GS_ERROR)
3391 ret = GS_ERROR;
3392 }
3393 }
3394 }
3395
3396 /* Gimplify the static chain. */
3397 if (CALL_EXPR_STATIC_CHAIN (*expr_p))
3398 {
3399 if (fndecl && !DECL_STATIC_CHAIN (fndecl))
3400 CALL_EXPR_STATIC_CHAIN (*expr_p) = NULL;
3401 else
3402 {
3403 enum gimplify_status t;
3404 t = gimplify_arg (&CALL_EXPR_STATIC_CHAIN (*expr_p), pre_p,
3405 EXPR_LOCATION (*expr_p), ! returns_twice);
3406 if (t == GS_ERROR)
3407 ret = GS_ERROR;
3408 }
3409 }
3410
3411 /* Verify the function result. */
3412 if (want_value && fndecl
3413 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fnptrtype))))
3414 {
3415 error_at (loc, "using result of function returning %<void%>");
3416 ret = GS_ERROR;
3417 }
3418
3419 /* Try this again in case gimplification exposed something. */
3420 if (ret != GS_ERROR)
3421 {
3422 tree new_tree = fold_call_expr (input_location, *expr_p, !want_value);
3423
3424 if (new_tree && new_tree != *expr_p)
3425 {
3426 /* There was a transformation of this call which computes the
3427 same value, but in a more efficient way. Return and try
3428 again. */
3429 *expr_p = new_tree;
3430 return GS_OK;
3431 }
3432 }
3433 else
3434 {
3435 *expr_p = error_mark_node;
3436 return GS_ERROR;
3437 }
3438
3439 /* If the function is "const" or "pure", then clear TREE_SIDE_EFFECTS on its
3440 decl. This allows us to eliminate redundant or useless
3441 calls to "const" functions. */
3442 if (TREE_CODE (*expr_p) == CALL_EXPR)
3443 {
3444 int flags = call_expr_flags (*expr_p);
3445 if (flags & (ECF_CONST | ECF_PURE)
3446 /* An infinite loop is considered a side effect. */
3447 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
3448 TREE_SIDE_EFFECTS (*expr_p) = 0;
3449 }
3450
3451 /* If the value is not needed by the caller, emit a new GIMPLE_CALL
3452 and clear *EXPR_P. Otherwise, leave *EXPR_P in its gimplified
3453 form and delegate the creation of a GIMPLE_CALL to
3454 gimplify_modify_expr. This is always possible because when
3455 WANT_VALUE is true, the caller wants the result of this call into
3456 a temporary, which means that we will emit an INIT_EXPR in
3457 internal_get_tmp_var which will then be handled by
3458 gimplify_modify_expr. */
3459 if (!want_value)
3460 {
3461 /* The CALL_EXPR in *EXPR_P is already in GIMPLE form, so all we
3462 have to do is replicate it as a GIMPLE_CALL tuple. */
3463 gimple_stmt_iterator gsi;
3464 call = gimple_build_call_from_tree (*expr_p, fnptrtype);
3465 notice_special_calls (call);
3466 gimplify_seq_add_stmt (pre_p, call);
3467 gsi = gsi_last (*pre_p);
3468 maybe_fold_stmt (&gsi);
3469 *expr_p = NULL_TREE;
3470 }
3471 else
3472 /* Remember the original function type. */
3473 CALL_EXPR_FN (*expr_p) = build1 (NOP_EXPR, fnptrtype,
3474 CALL_EXPR_FN (*expr_p));
3475
3476 return ret;
3477 }
3478
3479 /* Handle shortcut semantics in the predicate operand of a COND_EXPR by
3480 rewriting it into multiple COND_EXPRs, and possibly GOTO_EXPRs.
3481
3482 TRUE_LABEL_P and FALSE_LABEL_P point to the labels to jump to if the
3483 condition is true or false, respectively. If null, we should generate
3484 our own to skip over the evaluation of this specific expression.
3485
3486 LOCUS is the source location of the COND_EXPR.
3487
3488 This function is the tree equivalent of do_jump.
3489
3490 shortcut_cond_r should only be called by shortcut_cond_expr. */
3491
3492 static tree
shortcut_cond_r(tree pred,tree * true_label_p,tree * false_label_p,location_t locus)3493 shortcut_cond_r (tree pred, tree *true_label_p, tree *false_label_p,
3494 location_t locus)
3495 {
3496 tree local_label = NULL_TREE;
3497 tree t, expr = NULL;
3498
3499 /* OK, it's not a simple case; we need to pull apart the COND_EXPR to
3500 retain the shortcut semantics. Just insert the gotos here;
3501 shortcut_cond_expr will append the real blocks later. */
3502 if (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3503 {
3504 location_t new_locus;
3505
3506 /* Turn if (a && b) into
3507
3508 if (a); else goto no;
3509 if (b) goto yes; else goto no;
3510 (no:) */
3511
3512 if (false_label_p == NULL)
3513 false_label_p = &local_label;
3514
3515 /* Keep the original source location on the first 'if'. */
3516 t = shortcut_cond_r (TREE_OPERAND (pred, 0), NULL, false_label_p, locus);
3517 append_to_statement_list (t, &expr);
3518
3519 /* Set the source location of the && on the second 'if'. */
3520 new_locus = rexpr_location (pred, locus);
3521 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3522 new_locus);
3523 append_to_statement_list (t, &expr);
3524 }
3525 else if (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3526 {
3527 location_t new_locus;
3528
3529 /* Turn if (a || b) into
3530
3531 if (a) goto yes;
3532 if (b) goto yes; else goto no;
3533 (yes:) */
3534
3535 if (true_label_p == NULL)
3536 true_label_p = &local_label;
3537
3538 /* Keep the original source location on the first 'if'. */
3539 t = shortcut_cond_r (TREE_OPERAND (pred, 0), true_label_p, NULL, locus);
3540 append_to_statement_list (t, &expr);
3541
3542 /* Set the source location of the || on the second 'if'. */
3543 new_locus = rexpr_location (pred, locus);
3544 t = shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p, false_label_p,
3545 new_locus);
3546 append_to_statement_list (t, &expr);
3547 }
3548 else if (TREE_CODE (pred) == COND_EXPR
3549 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 1)))
3550 && !VOID_TYPE_P (TREE_TYPE (TREE_OPERAND (pred, 2))))
3551 {
3552 location_t new_locus;
3553
3554 /* As long as we're messing with gotos, turn if (a ? b : c) into
3555 if (a)
3556 if (b) goto yes; else goto no;
3557 else
3558 if (c) goto yes; else goto no;
3559
3560 Don't do this if one of the arms has void type, which can happen
3561 in C++ when the arm is throw. */
3562
3563 /* Keep the original source location on the first 'if'. Set the source
3564 location of the ? on the second 'if'. */
3565 new_locus = rexpr_location (pred, locus);
3566 expr = build3 (COND_EXPR, void_type_node, TREE_OPERAND (pred, 0),
3567 shortcut_cond_r (TREE_OPERAND (pred, 1), true_label_p,
3568 false_label_p, locus),
3569 shortcut_cond_r (TREE_OPERAND (pred, 2), true_label_p,
3570 false_label_p, new_locus));
3571 }
3572 else
3573 {
3574 expr = build3 (COND_EXPR, void_type_node, pred,
3575 build_and_jump (true_label_p),
3576 build_and_jump (false_label_p));
3577 SET_EXPR_LOCATION (expr, locus);
3578 }
3579
3580 if (local_label)
3581 {
3582 t = build1 (LABEL_EXPR, void_type_node, local_label);
3583 append_to_statement_list (t, &expr);
3584 }
3585
3586 return expr;
3587 }
3588
3589 /* If EXPR is a GOTO_EXPR, return it. If it is a STATEMENT_LIST, skip
3590 any of its leading DEBUG_BEGIN_STMTS and recurse on the subsequent
3591 statement, if it is the last one. Otherwise, return NULL. */
3592
3593 static tree
find_goto(tree expr)3594 find_goto (tree expr)
3595 {
3596 if (!expr)
3597 return NULL_TREE;
3598
3599 if (TREE_CODE (expr) == GOTO_EXPR)
3600 return expr;
3601
3602 if (TREE_CODE (expr) != STATEMENT_LIST)
3603 return NULL_TREE;
3604
3605 tree_stmt_iterator i = tsi_start (expr);
3606
3607 while (!tsi_end_p (i) && TREE_CODE (tsi_stmt (i)) == DEBUG_BEGIN_STMT)
3608 tsi_next (&i);
3609
3610 if (!tsi_one_before_end_p (i))
3611 return NULL_TREE;
3612
3613 return find_goto (tsi_stmt (i));
3614 }
3615
3616 /* Same as find_goto, except that it returns NULL if the destination
3617 is not a LABEL_DECL. */
3618
3619 static inline tree
find_goto_label(tree expr)3620 find_goto_label (tree expr)
3621 {
3622 tree dest = find_goto (expr);
3623 if (dest && TREE_CODE (GOTO_DESTINATION (dest)) == LABEL_DECL)
3624 return dest;
3625 return NULL_TREE;
3626 }
3627
3628 /* Given a conditional expression EXPR with short-circuit boolean
3629 predicates using TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR, break the
3630 predicate apart into the equivalent sequence of conditionals. */
3631
3632 static tree
shortcut_cond_expr(tree expr)3633 shortcut_cond_expr (tree expr)
3634 {
3635 tree pred = TREE_OPERAND (expr, 0);
3636 tree then_ = TREE_OPERAND (expr, 1);
3637 tree else_ = TREE_OPERAND (expr, 2);
3638 tree true_label, false_label, end_label, t;
3639 tree *true_label_p;
3640 tree *false_label_p;
3641 bool emit_end, emit_false, jump_over_else;
3642 bool then_se = then_ && TREE_SIDE_EFFECTS (then_);
3643 bool else_se = else_ && TREE_SIDE_EFFECTS (else_);
3644
3645 /* First do simple transformations. */
3646 if (!else_se)
3647 {
3648 /* If there is no 'else', turn
3649 if (a && b) then c
3650 into
3651 if (a) if (b) then c. */
3652 while (TREE_CODE (pred) == TRUTH_ANDIF_EXPR)
3653 {
3654 /* Keep the original source location on the first 'if'. */
3655 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3656 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3657 /* Set the source location of the && on the second 'if'. */
3658 if (rexpr_has_location (pred))
3659 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3660 then_ = shortcut_cond_expr (expr);
3661 then_se = then_ && TREE_SIDE_EFFECTS (then_);
3662 pred = TREE_OPERAND (pred, 0);
3663 expr = build3 (COND_EXPR, void_type_node, pred, then_, NULL_TREE);
3664 SET_EXPR_LOCATION (expr, locus);
3665 }
3666 }
3667
3668 if (!then_se)
3669 {
3670 /* If there is no 'then', turn
3671 if (a || b); else d
3672 into
3673 if (a); else if (b); else d. */
3674 while (TREE_CODE (pred) == TRUTH_ORIF_EXPR)
3675 {
3676 /* Keep the original source location on the first 'if'. */
3677 location_t locus = EXPR_LOC_OR_LOC (expr, input_location);
3678 TREE_OPERAND (expr, 0) = TREE_OPERAND (pred, 1);
3679 /* Set the source location of the || on the second 'if'. */
3680 if (rexpr_has_location (pred))
3681 SET_EXPR_LOCATION (expr, rexpr_location (pred));
3682 else_ = shortcut_cond_expr (expr);
3683 else_se = else_ && TREE_SIDE_EFFECTS (else_);
3684 pred = TREE_OPERAND (pred, 0);
3685 expr = build3 (COND_EXPR, void_type_node, pred, NULL_TREE, else_);
3686 SET_EXPR_LOCATION (expr, locus);
3687 }
3688 }
3689
3690 /* If we're done, great. */
3691 if (TREE_CODE (pred) != TRUTH_ANDIF_EXPR
3692 && TREE_CODE (pred) != TRUTH_ORIF_EXPR)
3693 return expr;
3694
3695 /* Otherwise we need to mess with gotos. Change
3696 if (a) c; else d;
3697 to
3698 if (a); else goto no;
3699 c; goto end;
3700 no: d; end:
3701 and recursively gimplify the condition. */
3702
3703 true_label = false_label = end_label = NULL_TREE;
3704
3705 /* If our arms just jump somewhere, hijack those labels so we don't
3706 generate jumps to jumps. */
3707
3708 if (tree then_goto = find_goto_label (then_))
3709 {
3710 true_label = GOTO_DESTINATION (then_goto);
3711 then_ = NULL;
3712 then_se = false;
3713 }
3714
3715 if (tree else_goto = find_goto_label (else_))
3716 {
3717 false_label = GOTO_DESTINATION (else_goto);
3718 else_ = NULL;
3719 else_se = false;
3720 }
3721
3722 /* If we aren't hijacking a label for the 'then' branch, it falls through. */
3723 if (true_label)
3724 true_label_p = &true_label;
3725 else
3726 true_label_p = NULL;
3727
3728 /* The 'else' branch also needs a label if it contains interesting code. */
3729 if (false_label || else_se)
3730 false_label_p = &false_label;
3731 else
3732 false_label_p = NULL;
3733
3734 /* If there was nothing else in our arms, just forward the label(s). */
3735 if (!then_se && !else_se)
3736 return shortcut_cond_r (pred, true_label_p, false_label_p,
3737 EXPR_LOC_OR_LOC (expr, input_location));
3738
3739 /* If our last subexpression already has a terminal label, reuse it. */
3740 if (else_se)
3741 t = expr_last (else_);
3742 else if (then_se)
3743 t = expr_last (then_);
3744 else
3745 t = NULL;
3746 if (t && TREE_CODE (t) == LABEL_EXPR)
3747 end_label = LABEL_EXPR_LABEL (t);
3748
3749 /* If we don't care about jumping to the 'else' branch, jump to the end
3750 if the condition is false. */
3751 if (!false_label_p)
3752 false_label_p = &end_label;
3753
3754 /* We only want to emit these labels if we aren't hijacking them. */
3755 emit_end = (end_label == NULL_TREE);
3756 emit_false = (false_label == NULL_TREE);
3757
3758 /* We only emit the jump over the else clause if we have to--if the
3759 then clause may fall through. Otherwise we can wind up with a
3760 useless jump and a useless label at the end of gimplified code,
3761 which will cause us to think that this conditional as a whole
3762 falls through even if it doesn't. If we then inline a function
3763 which ends with such a condition, that can cause us to issue an
3764 inappropriate warning about control reaching the end of a
3765 non-void function. */
3766 jump_over_else = block_may_fallthru (then_);
3767
3768 pred = shortcut_cond_r (pred, true_label_p, false_label_p,
3769 EXPR_LOC_OR_LOC (expr, input_location));
3770
3771 expr = NULL;
3772 append_to_statement_list (pred, &expr);
3773
3774 append_to_statement_list (then_, &expr);
3775 if (else_se)
3776 {
3777 if (jump_over_else)
3778 {
3779 tree last = expr_last (expr);
3780 t = build_and_jump (&end_label);
3781 if (rexpr_has_location (last))
3782 SET_EXPR_LOCATION (t, rexpr_location (last));
3783 append_to_statement_list (t, &expr);
3784 }
3785 if (emit_false)
3786 {
3787 t = build1 (LABEL_EXPR, void_type_node, false_label);
3788 append_to_statement_list (t, &expr);
3789 }
3790 append_to_statement_list (else_, &expr);
3791 }
3792 if (emit_end && end_label)
3793 {
3794 t = build1 (LABEL_EXPR, void_type_node, end_label);
3795 append_to_statement_list (t, &expr);
3796 }
3797
3798 return expr;
3799 }
3800
3801 /* EXPR is used in a boolean context; make sure it has BOOLEAN_TYPE. */
3802
3803 tree
gimple_boolify(tree expr)3804 gimple_boolify (tree expr)
3805 {
3806 tree type = TREE_TYPE (expr);
3807 location_t loc = EXPR_LOCATION (expr);
3808
3809 if (TREE_CODE (expr) == NE_EXPR
3810 && TREE_CODE (TREE_OPERAND (expr, 0)) == CALL_EXPR
3811 && integer_zerop (TREE_OPERAND (expr, 1)))
3812 {
3813 tree call = TREE_OPERAND (expr, 0);
3814 tree fn = get_callee_fndecl (call);
3815
3816 /* For __builtin_expect ((long) (x), y) recurse into x as well
3817 if x is truth_value_p. */
3818 if (fn
3819 && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL
3820 && DECL_FUNCTION_CODE (fn) == BUILT_IN_EXPECT
3821 && call_expr_nargs (call) == 2)
3822 {
3823 tree arg = CALL_EXPR_ARG (call, 0);
3824 if (arg)
3825 {
3826 if (TREE_CODE (arg) == NOP_EXPR
3827 && TREE_TYPE (arg) == TREE_TYPE (call))
3828 arg = TREE_OPERAND (arg, 0);
3829 if (truth_value_p (TREE_CODE (arg)))
3830 {
3831 arg = gimple_boolify (arg);
3832 CALL_EXPR_ARG (call, 0)
3833 = fold_convert_loc (loc, TREE_TYPE (call), arg);
3834 }
3835 }
3836 }
3837 }
3838
3839 switch (TREE_CODE (expr))
3840 {
3841 case TRUTH_AND_EXPR:
3842 case TRUTH_OR_EXPR:
3843 case TRUTH_XOR_EXPR:
3844 case TRUTH_ANDIF_EXPR:
3845 case TRUTH_ORIF_EXPR:
3846 /* Also boolify the arguments of truth exprs. */
3847 TREE_OPERAND (expr, 1) = gimple_boolify (TREE_OPERAND (expr, 1));
3848 /* FALLTHRU */
3849
3850 case TRUTH_NOT_EXPR:
3851 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3852
3853 /* These expressions always produce boolean results. */
3854 if (TREE_CODE (type) != BOOLEAN_TYPE)
3855 TREE_TYPE (expr) = boolean_type_node;
3856 return expr;
3857
3858 case ANNOTATE_EXPR:
3859 switch ((enum annot_expr_kind) TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)))
3860 {
3861 case annot_expr_ivdep_kind:
3862 case annot_expr_unroll_kind:
3863 case annot_expr_no_vector_kind:
3864 case annot_expr_vector_kind:
3865 case annot_expr_parallel_kind:
3866 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
3867 if (TREE_CODE (type) != BOOLEAN_TYPE)
3868 TREE_TYPE (expr) = boolean_type_node;
3869 return expr;
3870 default:
3871 gcc_unreachable ();
3872 }
3873
3874 default:
3875 if (COMPARISON_CLASS_P (expr))
3876 {
3877 /* There expressions always prduce boolean results. */
3878 if (TREE_CODE (type) != BOOLEAN_TYPE)
3879 TREE_TYPE (expr) = boolean_type_node;
3880 return expr;
3881 }
3882 /* Other expressions that get here must have boolean values, but
3883 might need to be converted to the appropriate mode. */
3884 if (TREE_CODE (type) == BOOLEAN_TYPE)
3885 return expr;
3886 return fold_convert_loc (loc, boolean_type_node, expr);
3887 }
3888 }
3889
3890 /* Given a conditional expression *EXPR_P without side effects, gimplify
3891 its operands. New statements are inserted to PRE_P. */
3892
3893 static enum gimplify_status
gimplify_pure_cond_expr(tree * expr_p,gimple_seq * pre_p)3894 gimplify_pure_cond_expr (tree *expr_p, gimple_seq *pre_p)
3895 {
3896 tree expr = *expr_p, cond;
3897 enum gimplify_status ret, tret;
3898 enum tree_code code;
3899
3900 cond = gimple_boolify (COND_EXPR_COND (expr));
3901
3902 /* We need to handle && and || specially, as their gimplification
3903 creates pure cond_expr, thus leading to an infinite cycle otherwise. */
3904 code = TREE_CODE (cond);
3905 if (code == TRUTH_ANDIF_EXPR)
3906 TREE_SET_CODE (cond, TRUTH_AND_EXPR);
3907 else if (code == TRUTH_ORIF_EXPR)
3908 TREE_SET_CODE (cond, TRUTH_OR_EXPR);
3909 ret = gimplify_expr (&cond, pre_p, NULL, is_gimple_condexpr, fb_rvalue);
3910 COND_EXPR_COND (*expr_p) = cond;
3911
3912 tret = gimplify_expr (&COND_EXPR_THEN (expr), pre_p, NULL,
3913 is_gimple_val, fb_rvalue);
3914 ret = MIN (ret, tret);
3915 tret = gimplify_expr (&COND_EXPR_ELSE (expr), pre_p, NULL,
3916 is_gimple_val, fb_rvalue);
3917
3918 return MIN (ret, tret);
3919 }
3920
3921 /* Return true if evaluating EXPR could trap.
3922 EXPR is GENERIC, while tree_could_trap_p can be called
3923 only on GIMPLE. */
3924
3925 static bool
generic_expr_could_trap_p(tree expr)3926 generic_expr_could_trap_p (tree expr)
3927 {
3928 unsigned i, n;
3929
3930 if (!expr || is_gimple_val (expr))
3931 return false;
3932
3933 if (!EXPR_P (expr) || tree_could_trap_p (expr))
3934 return true;
3935
3936 n = TREE_OPERAND_LENGTH (expr);
3937 for (i = 0; i < n; i++)
3938 if (generic_expr_could_trap_p (TREE_OPERAND (expr, i)))
3939 return true;
3940
3941 return false;
3942 }
3943
3944 /* Convert the conditional expression pointed to by EXPR_P '(p) ? a : b;'
3945 into
3946
3947 if (p) if (p)
3948 t1 = a; a;
3949 else or else
3950 t1 = b; b;
3951 t1;
3952
3953 The second form is used when *EXPR_P is of type void.
3954
3955 PRE_P points to the list where side effects that must happen before
3956 *EXPR_P should be stored. */
3957
3958 static enum gimplify_status
gimplify_cond_expr(tree * expr_p,gimple_seq * pre_p,fallback_t fallback)3959 gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
3960 {
3961 tree expr = *expr_p;
3962 tree type = TREE_TYPE (expr);
3963 location_t loc = EXPR_LOCATION (expr);
3964 tree tmp, arm1, arm2;
3965 enum gimplify_status ret;
3966 tree label_true, label_false, label_cont;
3967 bool have_then_clause_p, have_else_clause_p;
3968 gcond *cond_stmt;
3969 enum tree_code pred_code;
3970 gimple_seq seq = NULL;
3971
3972 /* If this COND_EXPR has a value, copy the values into a temporary within
3973 the arms. */
3974 if (!VOID_TYPE_P (type))
3975 {
3976 tree then_ = TREE_OPERAND (expr, 1), else_ = TREE_OPERAND (expr, 2);
3977 tree result;
3978
3979 /* If either an rvalue is ok or we do not require an lvalue, create the
3980 temporary. But we cannot do that if the type is addressable. */
3981 if (((fallback & fb_rvalue) || !(fallback & fb_lvalue))
3982 && !TREE_ADDRESSABLE (type))
3983 {
3984 if (gimplify_ctxp->allow_rhs_cond_expr
3985 /* If either branch has side effects or could trap, it can't be
3986 evaluated unconditionally. */
3987 && !TREE_SIDE_EFFECTS (then_)
3988 && !generic_expr_could_trap_p (then_)
3989 && !TREE_SIDE_EFFECTS (else_)
3990 && !generic_expr_could_trap_p (else_))
3991 return gimplify_pure_cond_expr (expr_p, pre_p);
3992
3993 tmp = create_tmp_var (type, "iftmp");
3994 result = tmp;
3995 }
3996
3997 /* Otherwise, only create and copy references to the values. */
3998 else
3999 {
4000 type = build_pointer_type (type);
4001
4002 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4003 then_ = build_fold_addr_expr_loc (loc, then_);
4004
4005 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4006 else_ = build_fold_addr_expr_loc (loc, else_);
4007
4008 expr
4009 = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
4010
4011 tmp = create_tmp_var (type, "iftmp");
4012 result = build_simple_mem_ref_loc (loc, tmp);
4013 }
4014
4015 /* Build the new then clause, `tmp = then_;'. But don't build the
4016 assignment if the value is void; in C++ it can be if it's a throw. */
4017 if (!VOID_TYPE_P (TREE_TYPE (then_)))
4018 TREE_OPERAND (expr, 1) = build2 (MODIFY_EXPR, type, tmp, then_);
4019
4020 /* Similarly, build the new else clause, `tmp = else_;'. */
4021 if (!VOID_TYPE_P (TREE_TYPE (else_)))
4022 TREE_OPERAND (expr, 2) = build2 (MODIFY_EXPR, type, tmp, else_);
4023
4024 TREE_TYPE (expr) = void_type_node;
4025 recalculate_side_effects (expr);
4026
4027 /* Move the COND_EXPR to the prequeue. */
4028 gimplify_stmt (&expr, pre_p);
4029
4030 *expr_p = result;
4031 return GS_ALL_DONE;
4032 }
4033
4034 /* Remove any COMPOUND_EXPR so the following cases will be caught. */
4035 STRIP_TYPE_NOPS (TREE_OPERAND (expr, 0));
4036 if (TREE_CODE (TREE_OPERAND (expr, 0)) == COMPOUND_EXPR)
4037 gimplify_compound_expr (&TREE_OPERAND (expr, 0), pre_p, true);
4038
4039 /* Make sure the condition has BOOLEAN_TYPE. */
4040 TREE_OPERAND (expr, 0) = gimple_boolify (TREE_OPERAND (expr, 0));
4041
4042 /* Break apart && and || conditions. */
4043 if (TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ANDIF_EXPR
4044 || TREE_CODE (TREE_OPERAND (expr, 0)) == TRUTH_ORIF_EXPR)
4045 {
4046 expr = shortcut_cond_expr (expr);
4047
4048 if (expr != *expr_p)
4049 {
4050 *expr_p = expr;
4051
4052 /* We can't rely on gimplify_expr to re-gimplify the expanded
4053 form properly, as cleanups might cause the target labels to be
4054 wrapped in a TRY_FINALLY_EXPR. To prevent that, we need to
4055 set up a conditional context. */
4056 gimple_push_condition ();
4057 gimplify_stmt (expr_p, &seq);
4058 gimple_pop_condition (pre_p);
4059 gimple_seq_add_seq (pre_p, seq);
4060
4061 return GS_ALL_DONE;
4062 }
4063 }
4064
4065 /* Now do the normal gimplification. */
4066
4067 /* Gimplify condition. */
4068 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, NULL, is_gimple_condexpr,
4069 fb_rvalue);
4070 if (ret == GS_ERROR)
4071 return GS_ERROR;
4072 gcc_assert (TREE_OPERAND (expr, 0) != NULL_TREE);
4073
4074 gimple_push_condition ();
4075
4076 have_then_clause_p = have_else_clause_p = false;
4077 label_true = find_goto_label (TREE_OPERAND (expr, 1));
4078 if (label_true
4079 && DECL_CONTEXT (GOTO_DESTINATION (label_true)) == current_function_decl
4080 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4081 have different locations, otherwise we end up with incorrect
4082 location information on the branches. */
4083 && (optimize
4084 || !EXPR_HAS_LOCATION (expr)
4085 || !rexpr_has_location (label_true)
4086 || EXPR_LOCATION (expr) == rexpr_location (label_true)))
4087 {
4088 have_then_clause_p = true;
4089 label_true = GOTO_DESTINATION (label_true);
4090 }
4091 else
4092 label_true = create_artificial_label (UNKNOWN_LOCATION);
4093 label_false = find_goto_label (TREE_OPERAND (expr, 2));
4094 if (label_false
4095 && DECL_CONTEXT (GOTO_DESTINATION (label_false)) == current_function_decl
4096 /* For -O0 avoid this optimization if the COND_EXPR and GOTO_EXPR
4097 have different locations, otherwise we end up with incorrect
4098 location information on the branches. */
4099 && (optimize
4100 || !EXPR_HAS_LOCATION (expr)
4101 || !rexpr_has_location (label_false)
4102 || EXPR_LOCATION (expr) == rexpr_location (label_false)))
4103 {
4104 have_else_clause_p = true;
4105 label_false = GOTO_DESTINATION (label_false);
4106 }
4107 else
4108 label_false = create_artificial_label (UNKNOWN_LOCATION);
4109
4110 gimple_cond_get_ops_from_tree (COND_EXPR_COND (expr), &pred_code, &arm1,
4111 &arm2);
4112 cond_stmt = gimple_build_cond (pred_code, arm1, arm2, label_true,
4113 label_false);
4114 gimple_set_no_warning (cond_stmt, TREE_NO_WARNING (COND_EXPR_COND (expr)));
4115 gimplify_seq_add_stmt (&seq, cond_stmt);
4116 gimple_stmt_iterator gsi = gsi_last (seq);
4117 maybe_fold_stmt (&gsi);
4118
4119 label_cont = NULL_TREE;
4120 if (!have_then_clause_p)
4121 {
4122 /* For if (...) {} else { code; } put label_true after
4123 the else block. */
4124 if (TREE_OPERAND (expr, 1) == NULL_TREE
4125 && !have_else_clause_p
4126 && TREE_OPERAND (expr, 2) != NULL_TREE)
4127 label_cont = label_true;
4128 else
4129 {
4130 gimplify_seq_add_stmt (&seq, gimple_build_label (label_true));
4131 have_then_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 1), &seq);
4132 /* For if (...) { code; } else {} or
4133 if (...) { code; } else goto label; or
4134 if (...) { code; return; } else { ... }
4135 label_cont isn't needed. */
4136 if (!have_else_clause_p
4137 && TREE_OPERAND (expr, 2) != NULL_TREE
4138 && gimple_seq_may_fallthru (seq))
4139 {
4140 gimple *g;
4141 label_cont = create_artificial_label (UNKNOWN_LOCATION);
4142
4143 g = gimple_build_goto (label_cont);
4144
4145 /* GIMPLE_COND's are very low level; they have embedded
4146 gotos. This particular embedded goto should not be marked
4147 with the location of the original COND_EXPR, as it would
4148 correspond to the COND_EXPR's condition, not the ELSE or the
4149 THEN arms. To avoid marking it with the wrong location, flag
4150 it as "no location". */
4151 gimple_set_do_not_emit_location (g);
4152
4153 gimplify_seq_add_stmt (&seq, g);
4154 }
4155 }
4156 }
4157 if (!have_else_clause_p)
4158 {
4159 gimplify_seq_add_stmt (&seq, gimple_build_label (label_false));
4160 have_else_clause_p = gimplify_stmt (&TREE_OPERAND (expr, 2), &seq);
4161 }
4162 if (label_cont)
4163 gimplify_seq_add_stmt (&seq, gimple_build_label (label_cont));
4164
4165 gimple_pop_condition (pre_p);
4166 gimple_seq_add_seq (pre_p, seq);
4167
4168 if (ret == GS_ERROR)
4169 ; /* Do nothing. */
4170 else if (have_then_clause_p || have_else_clause_p)
4171 ret = GS_ALL_DONE;
4172 else
4173 {
4174 /* Both arms are empty; replace the COND_EXPR with its predicate. */
4175 expr = TREE_OPERAND (expr, 0);
4176 gimplify_stmt (&expr, pre_p);
4177 }
4178
4179 *expr_p = NULL;
4180 return ret;
4181 }
4182
4183 /* Prepare the node pointed to by EXPR_P, an is_gimple_addressable expression,
4184 to be marked addressable.
4185
4186 We cannot rely on such an expression being directly markable if a temporary
4187 has been created by the gimplification. In this case, we create another
4188 temporary and initialize it with a copy, which will become a store after we
4189 mark it addressable. This can happen if the front-end passed us something
4190 that it could not mark addressable yet, like a Fortran pass-by-reference
4191 parameter (int) floatvar. */
4192
4193 static void
prepare_gimple_addressable(tree * expr_p,gimple_seq * seq_p)4194 prepare_gimple_addressable (tree *expr_p, gimple_seq *seq_p)
4195 {
4196 while (handled_component_p (*expr_p))
4197 expr_p = &TREE_OPERAND (*expr_p, 0);
4198 if (is_gimple_reg (*expr_p))
4199 {
4200 /* Do not allow an SSA name as the temporary. */
4201 tree var = get_initialized_tmp_var (*expr_p, seq_p, NULL, false);
4202 DECL_GIMPLE_REG_P (var) = 0;
4203 *expr_p = var;
4204 }
4205 }
4206
4207 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4208 a call to __builtin_memcpy. */
4209
4210 static enum gimplify_status
gimplify_modify_expr_to_memcpy(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4211 gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
4212 gimple_seq *seq_p)
4213 {
4214 tree t, to, to_ptr, from, from_ptr;
4215 gcall *gs;
4216 location_t loc = EXPR_LOCATION (*expr_p);
4217
4218 to = TREE_OPERAND (*expr_p, 0);
4219 from = TREE_OPERAND (*expr_p, 1);
4220
4221 /* Mark the RHS addressable. Beware that it may not be possible to do so
4222 directly if a temporary has been created by the gimplification. */
4223 prepare_gimple_addressable (&from, seq_p);
4224
4225 mark_addressable (from);
4226 from_ptr = build_fold_addr_expr_loc (loc, from);
4227 gimplify_arg (&from_ptr, seq_p, loc);
4228
4229 mark_addressable (to);
4230 to_ptr = build_fold_addr_expr_loc (loc, to);
4231 gimplify_arg (&to_ptr, seq_p, loc);
4232
4233 t = builtin_decl_implicit (BUILT_IN_MEMCPY);
4234
4235 gs = gimple_build_call (t, 3, to_ptr, from_ptr, size);
4236
4237 if (want_value)
4238 {
4239 /* tmp = memcpy() */
4240 t = create_tmp_var (TREE_TYPE (to_ptr));
4241 gimple_call_set_lhs (gs, t);
4242 gimplify_seq_add_stmt (seq_p, gs);
4243
4244 *expr_p = build_simple_mem_ref (t);
4245 return GS_ALL_DONE;
4246 }
4247
4248 gimplify_seq_add_stmt (seq_p, gs);
4249 *expr_p = NULL;
4250 return GS_ALL_DONE;
4251 }
4252
4253 /* A subroutine of gimplify_modify_expr. Replace a MODIFY_EXPR with
4254 a call to __builtin_memset. In this case we know that the RHS is
4255 a CONSTRUCTOR with an empty element list. */
4256
4257 static enum gimplify_status
gimplify_modify_expr_to_memset(tree * expr_p,tree size,bool want_value,gimple_seq * seq_p)4258 gimplify_modify_expr_to_memset (tree *expr_p, tree size, bool want_value,
4259 gimple_seq *seq_p)
4260 {
4261 tree t, from, to, to_ptr;
4262 gcall *gs;
4263 location_t loc = EXPR_LOCATION (*expr_p);
4264
4265 /* Assert our assumptions, to abort instead of producing wrong code
4266 silently if they are not met. Beware that the RHS CONSTRUCTOR might
4267 not be immediately exposed. */
4268 from = TREE_OPERAND (*expr_p, 1);
4269 if (TREE_CODE (from) == WITH_SIZE_EXPR)
4270 from = TREE_OPERAND (from, 0);
4271
4272 gcc_assert (TREE_CODE (from) == CONSTRUCTOR
4273 && vec_safe_is_empty (CONSTRUCTOR_ELTS (from)));
4274
4275 /* Now proceed. */
4276 to = TREE_OPERAND (*expr_p, 0);
4277
4278 to_ptr = build_fold_addr_expr_loc (loc, to);
4279 gimplify_arg (&to_ptr, seq_p, loc);
4280 t = builtin_decl_implicit (BUILT_IN_MEMSET);
4281
4282 gs = gimple_build_call (t, 3, to_ptr, integer_zero_node, size);
4283
4284 if (want_value)
4285 {
4286 /* tmp = memset() */
4287 t = create_tmp_var (TREE_TYPE (to_ptr));
4288 gimple_call_set_lhs (gs, t);
4289 gimplify_seq_add_stmt (seq_p, gs);
4290
4291 *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
4292 return GS_ALL_DONE;
4293 }
4294
4295 gimplify_seq_add_stmt (seq_p, gs);
4296 *expr_p = NULL;
4297 return GS_ALL_DONE;
4298 }
4299
4300 /* A subroutine of gimplify_init_ctor_preeval. Called via walk_tree,
4301 determine, cautiously, if a CONSTRUCTOR overlaps the lhs of an
4302 assignment. Return non-null if we detect a potential overlap. */
4303
4304 struct gimplify_init_ctor_preeval_data
4305 {
4306 /* The base decl of the lhs object. May be NULL, in which case we
4307 have to assume the lhs is indirect. */
4308 tree lhs_base_decl;
4309
4310 /* The alias set of the lhs object. */
4311 alias_set_type lhs_alias_set;
4312 };
4313
4314 static tree
gimplify_init_ctor_preeval_1(tree * tp,int * walk_subtrees,void * xdata)4315 gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
4316 {
4317 struct gimplify_init_ctor_preeval_data *data
4318 = (struct gimplify_init_ctor_preeval_data *) xdata;
4319 tree t = *tp;
4320
4321 /* If we find the base object, obviously we have overlap. */
4322 if (data->lhs_base_decl == t)
4323 return t;
4324
4325 /* If the constructor component is indirect, determine if we have a
4326 potential overlap with the lhs. The only bits of information we
4327 have to go on at this point are addressability and alias sets. */
4328 if ((INDIRECT_REF_P (t)
4329 || TREE_CODE (t) == MEM_REF)
4330 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4331 && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
4332 return t;
4333
4334 /* If the constructor component is a call, determine if it can hide a
4335 potential overlap with the lhs through an INDIRECT_REF like above.
4336 ??? Ugh - this is completely broken. In fact this whole analysis
4337 doesn't look conservative. */
4338 if (TREE_CODE (t) == CALL_EXPR)
4339 {
4340 tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
4341
4342 for (type = TYPE_ARG_TYPES (fntype); type; type = TREE_CHAIN (type))
4343 if (POINTER_TYPE_P (TREE_VALUE (type))
4344 && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
4345 && alias_sets_conflict_p (data->lhs_alias_set,
4346 get_alias_set
4347 (TREE_TYPE (TREE_VALUE (type)))))
4348 return t;
4349 }
4350
4351 if (IS_TYPE_OR_DECL_P (t))
4352 *walk_subtrees = 0;
4353 return NULL;
4354 }
4355
4356 /* A subroutine of gimplify_init_constructor. Pre-evaluate EXPR,
4357 force values that overlap with the lhs (as described by *DATA)
4358 into temporaries. */
4359
4360 static void
gimplify_init_ctor_preeval(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,struct gimplify_init_ctor_preeval_data * data)4361 gimplify_init_ctor_preeval (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4362 struct gimplify_init_ctor_preeval_data *data)
4363 {
4364 enum gimplify_status one;
4365
4366 /* If the value is constant, then there's nothing to pre-evaluate. */
4367 if (TREE_CONSTANT (*expr_p))
4368 {
4369 /* Ensure it does not have side effects, it might contain a reference to
4370 the object we're initializing. */
4371 gcc_assert (!TREE_SIDE_EFFECTS (*expr_p));
4372 return;
4373 }
4374
4375 /* If the type has non-trivial constructors, we can't pre-evaluate. */
4376 if (TREE_ADDRESSABLE (TREE_TYPE (*expr_p)))
4377 return;
4378
4379 /* Recurse for nested constructors. */
4380 if (TREE_CODE (*expr_p) == CONSTRUCTOR)
4381 {
4382 unsigned HOST_WIDE_INT ix;
4383 constructor_elt *ce;
4384 vec<constructor_elt, va_gc> *v = CONSTRUCTOR_ELTS (*expr_p);
4385
4386 FOR_EACH_VEC_SAFE_ELT (v, ix, ce)
4387 gimplify_init_ctor_preeval (&ce->value, pre_p, post_p, data);
4388
4389 return;
4390 }
4391
4392 /* If this is a variable sized type, we must remember the size. */
4393 maybe_with_size_expr (expr_p);
4394
4395 /* Gimplify the constructor element to something appropriate for the rhs
4396 of a MODIFY_EXPR. Given that we know the LHS is an aggregate, we know
4397 the gimplifier will consider this a store to memory. Doing this
4398 gimplification now means that we won't have to deal with complicated
4399 language-specific trees, nor trees like SAVE_EXPR that can induce
4400 exponential search behavior. */
4401 one = gimplify_expr (expr_p, pre_p, post_p, is_gimple_mem_rhs, fb_rvalue);
4402 if (one == GS_ERROR)
4403 {
4404 *expr_p = NULL;
4405 return;
4406 }
4407
4408 /* If we gimplified to a bare decl, we can be sure that it doesn't overlap
4409 with the lhs, since "a = { .x=a }" doesn't make sense. This will
4410 always be true for all scalars, since is_gimple_mem_rhs insists on a
4411 temporary variable for them. */
4412 if (DECL_P (*expr_p))
4413 return;
4414
4415 /* If this is of variable size, we have no choice but to assume it doesn't
4416 overlap since we can't make a temporary for it. */
4417 if (TREE_CODE (TYPE_SIZE (TREE_TYPE (*expr_p))) != INTEGER_CST)
4418 return;
4419
4420 /* Otherwise, we must search for overlap ... */
4421 if (!walk_tree (expr_p, gimplify_init_ctor_preeval_1, data, NULL))
4422 return;
4423
4424 /* ... and if found, force the value into a temporary. */
4425 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
4426 }
4427
4428 /* A subroutine of gimplify_init_ctor_eval. Create a loop for
4429 a RANGE_EXPR in a CONSTRUCTOR for an array.
4430
4431 var = lower;
4432 loop_entry:
4433 object[var] = value;
4434 if (var == upper)
4435 goto loop_exit;
4436 var = var + 1;
4437 goto loop_entry;
4438 loop_exit:
4439
4440 We increment var _after_ the loop exit check because we might otherwise
4441 fail if upper == TYPE_MAX_VALUE (type for upper).
4442
4443 Note that we never have to deal with SAVE_EXPRs here, because this has
4444 already been taken care of for us, in gimplify_init_ctor_preeval(). */
4445
4446 static void gimplify_init_ctor_eval (tree, vec<constructor_elt, va_gc> *,
4447 gimple_seq *, bool);
4448
4449 static void
gimplify_init_ctor_eval_range(tree object,tree lower,tree upper,tree value,tree array_elt_type,gimple_seq * pre_p,bool cleared)4450 gimplify_init_ctor_eval_range (tree object, tree lower, tree upper,
4451 tree value, tree array_elt_type,
4452 gimple_seq *pre_p, bool cleared)
4453 {
4454 tree loop_entry_label, loop_exit_label, fall_thru_label;
4455 tree var, var_type, cref, tmp;
4456
4457 loop_entry_label = create_artificial_label (UNKNOWN_LOCATION);
4458 loop_exit_label = create_artificial_label (UNKNOWN_LOCATION);
4459 fall_thru_label = create_artificial_label (UNKNOWN_LOCATION);
4460
4461 /* Create and initialize the index variable. */
4462 var_type = TREE_TYPE (upper);
4463 var = create_tmp_var (var_type);
4464 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, lower));
4465
4466 /* Add the loop entry label. */
4467 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_entry_label));
4468
4469 /* Build the reference. */
4470 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4471 var, NULL_TREE, NULL_TREE);
4472
4473 /* If we are a constructor, just call gimplify_init_ctor_eval to do
4474 the store. Otherwise just assign value to the reference. */
4475
4476 if (TREE_CODE (value) == CONSTRUCTOR)
4477 /* NB we might have to call ourself recursively through
4478 gimplify_init_ctor_eval if the value is a constructor. */
4479 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4480 pre_p, cleared);
4481 else
4482 gimplify_seq_add_stmt (pre_p, gimple_build_assign (cref, value));
4483
4484 /* We exit the loop when the index var is equal to the upper bound. */
4485 gimplify_seq_add_stmt (pre_p,
4486 gimple_build_cond (EQ_EXPR, var, upper,
4487 loop_exit_label, fall_thru_label));
4488
4489 gimplify_seq_add_stmt (pre_p, gimple_build_label (fall_thru_label));
4490
4491 /* Otherwise, increment the index var... */
4492 tmp = build2 (PLUS_EXPR, var_type, var,
4493 fold_convert (var_type, integer_one_node));
4494 gimplify_seq_add_stmt (pre_p, gimple_build_assign (var, tmp));
4495
4496 /* ...and jump back to the loop entry. */
4497 gimplify_seq_add_stmt (pre_p, gimple_build_goto (loop_entry_label));
4498
4499 /* Add the loop exit label. */
4500 gimplify_seq_add_stmt (pre_p, gimple_build_label (loop_exit_label));
4501 }
4502
4503 /* Return true if FDECL is accessing a field that is zero sized. */
4504
4505 static bool
zero_sized_field_decl(const_tree fdecl)4506 zero_sized_field_decl (const_tree fdecl)
4507 {
4508 if (TREE_CODE (fdecl) == FIELD_DECL && DECL_SIZE (fdecl)
4509 && integer_zerop (DECL_SIZE (fdecl)))
4510 return true;
4511 return false;
4512 }
4513
4514 /* Return true if TYPE is zero sized. */
4515
4516 static bool
zero_sized_type(const_tree type)4517 zero_sized_type (const_tree type)
4518 {
4519 if (AGGREGATE_TYPE_P (type) && TYPE_SIZE (type)
4520 && integer_zerop (TYPE_SIZE (type)))
4521 return true;
4522 return false;
4523 }
4524
4525 /* A subroutine of gimplify_init_constructor. Generate individual
4526 MODIFY_EXPRs for a CONSTRUCTOR. OBJECT is the LHS against which the
4527 assignments should happen. ELTS is the CONSTRUCTOR_ELTS of the
4528 CONSTRUCTOR. CLEARED is true if the entire LHS object has been
4529 zeroed first. */
4530
4531 static void
gimplify_init_ctor_eval(tree object,vec<constructor_elt,va_gc> * elts,gimple_seq * pre_p,bool cleared)4532 gimplify_init_ctor_eval (tree object, vec<constructor_elt, va_gc> *elts,
4533 gimple_seq *pre_p, bool cleared)
4534 {
4535 tree array_elt_type = NULL;
4536 unsigned HOST_WIDE_INT ix;
4537 tree purpose, value;
4538
4539 if (TREE_CODE (TREE_TYPE (object)) == ARRAY_TYPE)
4540 array_elt_type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (object)));
4541
4542 FOR_EACH_CONSTRUCTOR_ELT (elts, ix, purpose, value)
4543 {
4544 tree cref;
4545
4546 /* NULL values are created above for gimplification errors. */
4547 if (value == NULL)
4548 continue;
4549
4550 if (cleared && initializer_zerop (value))
4551 continue;
4552
4553 /* ??? Here's to hoping the front end fills in all of the indices,
4554 so we don't have to figure out what's missing ourselves. */
4555 gcc_assert (purpose);
4556
4557 /* Skip zero-sized fields, unless value has side-effects. This can
4558 happen with calls to functions returning a zero-sized type, which
4559 we shouldn't discard. As a number of downstream passes don't
4560 expect sets of zero-sized fields, we rely on the gimplification of
4561 the MODIFY_EXPR we make below to drop the assignment statement. */
4562 if (! TREE_SIDE_EFFECTS (value) && zero_sized_field_decl (purpose))
4563 continue;
4564
4565 /* If we have a RANGE_EXPR, we have to build a loop to assign the
4566 whole range. */
4567 if (TREE_CODE (purpose) == RANGE_EXPR)
4568 {
4569 tree lower = TREE_OPERAND (purpose, 0);
4570 tree upper = TREE_OPERAND (purpose, 1);
4571
4572 /* If the lower bound is equal to upper, just treat it as if
4573 upper was the index. */
4574 if (simple_cst_equal (lower, upper))
4575 purpose = upper;
4576 else
4577 {
4578 gimplify_init_ctor_eval_range (object, lower, upper, value,
4579 array_elt_type, pre_p, cleared);
4580 continue;
4581 }
4582 }
4583
4584 if (array_elt_type)
4585 {
4586 /* Do not use bitsizetype for ARRAY_REF indices. */
4587 if (TYPE_DOMAIN (TREE_TYPE (object)))
4588 purpose
4589 = fold_convert (TREE_TYPE (TYPE_DOMAIN (TREE_TYPE (object))),
4590 purpose);
4591 cref = build4 (ARRAY_REF, array_elt_type, unshare_expr (object),
4592 purpose, NULL_TREE, NULL_TREE);
4593 }
4594 else
4595 {
4596 gcc_assert (TREE_CODE (purpose) == FIELD_DECL);
4597 cref = build3 (COMPONENT_REF, TREE_TYPE (purpose),
4598 unshare_expr (object), purpose, NULL_TREE);
4599 }
4600
4601 if (TREE_CODE (value) == CONSTRUCTOR
4602 && TREE_CODE (TREE_TYPE (value)) != VECTOR_TYPE)
4603 gimplify_init_ctor_eval (cref, CONSTRUCTOR_ELTS (value),
4604 pre_p, cleared);
4605 else
4606 {
4607 tree init = build2 (INIT_EXPR, TREE_TYPE (cref), cref, value);
4608 gimplify_and_add (init, pre_p);
4609 ggc_free (init);
4610 }
4611 }
4612 }
4613
4614 /* Return the appropriate RHS predicate for this LHS. */
4615
4616 gimple_predicate
rhs_predicate_for(tree lhs)4617 rhs_predicate_for (tree lhs)
4618 {
4619 if (is_gimple_reg (lhs))
4620 return is_gimple_reg_rhs_or_call;
4621 else
4622 return is_gimple_mem_rhs_or_call;
4623 }
4624
4625 /* Return the initial guess for an appropriate RHS predicate for this LHS,
4626 before the LHS has been gimplified. */
4627
4628 static gimple_predicate
initial_rhs_predicate_for(tree lhs)4629 initial_rhs_predicate_for (tree lhs)
4630 {
4631 if (is_gimple_reg_type (TREE_TYPE (lhs)))
4632 return is_gimple_reg_rhs_or_call;
4633 else
4634 return is_gimple_mem_rhs_or_call;
4635 }
4636
4637 /* Gimplify a C99 compound literal expression. This just means adding
4638 the DECL_EXPR before the current statement and using its anonymous
4639 decl instead. */
4640
4641 static enum gimplify_status
gimplify_compound_literal_expr(tree * expr_p,gimple_seq * pre_p,bool (* gimple_test_f)(tree),fallback_t fallback)4642 gimplify_compound_literal_expr (tree *expr_p, gimple_seq *pre_p,
4643 bool (*gimple_test_f) (tree),
4644 fallback_t fallback)
4645 {
4646 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (*expr_p);
4647 tree decl = DECL_EXPR_DECL (decl_s);
4648 tree init = DECL_INITIAL (decl);
4649 /* Mark the decl as addressable if the compound literal
4650 expression is addressable now, otherwise it is marked too late
4651 after we gimplify the initialization expression. */
4652 if (TREE_ADDRESSABLE (*expr_p))
4653 TREE_ADDRESSABLE (decl) = 1;
4654 /* Otherwise, if we don't need an lvalue and have a literal directly
4655 substitute it. Check if it matches the gimple predicate, as
4656 otherwise we'd generate a new temporary, and we can as well just
4657 use the decl we already have. */
4658 else if (!TREE_ADDRESSABLE (decl)
4659 && !TREE_THIS_VOLATILE (decl)
4660 && init
4661 && (fallback & fb_lvalue) == 0
4662 && gimple_test_f (init))
4663 {
4664 *expr_p = init;
4665 return GS_OK;
4666 }
4667
4668 /* Preliminarily mark non-addressed complex variables as eligible
4669 for promotion to gimple registers. We'll transform their uses
4670 as we find them. */
4671 if ((TREE_CODE (TREE_TYPE (decl)) == COMPLEX_TYPE
4672 || TREE_CODE (TREE_TYPE (decl)) == VECTOR_TYPE)
4673 && !TREE_THIS_VOLATILE (decl)
4674 && !needs_to_live_in_memory (decl))
4675 DECL_GIMPLE_REG_P (decl) = 1;
4676
4677 /* If the decl is not addressable, then it is being used in some
4678 expression or on the right hand side of a statement, and it can
4679 be put into a readonly data section. */
4680 if (!TREE_ADDRESSABLE (decl) && (fallback & fb_lvalue) == 0)
4681 TREE_READONLY (decl) = 1;
4682
4683 /* This decl isn't mentioned in the enclosing block, so add it to the
4684 list of temps. FIXME it seems a bit of a kludge to say that
4685 anonymous artificial vars aren't pushed, but everything else is. */
4686 if (DECL_NAME (decl) == NULL_TREE && !DECL_SEEN_IN_BIND_EXPR_P (decl))
4687 gimple_add_tmp_var (decl);
4688
4689 gimplify_and_add (decl_s, pre_p);
4690 *expr_p = decl;
4691 return GS_OK;
4692 }
4693
4694 /* Optimize embedded COMPOUND_LITERAL_EXPRs within a CONSTRUCTOR,
4695 return a new CONSTRUCTOR if something changed. */
4696
4697 static tree
optimize_compound_literals_in_ctor(tree orig_ctor)4698 optimize_compound_literals_in_ctor (tree orig_ctor)
4699 {
4700 tree ctor = orig_ctor;
4701 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (ctor);
4702 unsigned int idx, num = vec_safe_length (elts);
4703
4704 for (idx = 0; idx < num; idx++)
4705 {
4706 tree value = (*elts)[idx].value;
4707 tree newval = value;
4708 if (TREE_CODE (value) == CONSTRUCTOR)
4709 newval = optimize_compound_literals_in_ctor (value);
4710 else if (TREE_CODE (value) == COMPOUND_LITERAL_EXPR)
4711 {
4712 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (value);
4713 tree decl = DECL_EXPR_DECL (decl_s);
4714 tree init = DECL_INITIAL (decl);
4715
4716 if (!TREE_ADDRESSABLE (value)
4717 && !TREE_ADDRESSABLE (decl)
4718 && init
4719 && TREE_CODE (init) == CONSTRUCTOR)
4720 newval = optimize_compound_literals_in_ctor (init);
4721 }
4722 if (newval == value)
4723 continue;
4724
4725 if (ctor == orig_ctor)
4726 {
4727 ctor = copy_node (orig_ctor);
4728 CONSTRUCTOR_ELTS (ctor) = vec_safe_copy (elts);
4729 elts = CONSTRUCTOR_ELTS (ctor);
4730 }
4731 (*elts)[idx].value = newval;
4732 }
4733 return ctor;
4734 }
4735
4736 /* A subroutine of gimplify_modify_expr. Break out elements of a
4737 CONSTRUCTOR used as an initializer into separate MODIFY_EXPRs.
4738
4739 Note that we still need to clear any elements that don't have explicit
4740 initializers, so if not all elements are initialized we keep the
4741 original MODIFY_EXPR, we just remove all of the constructor elements.
4742
4743 If NOTIFY_TEMP_CREATION is true, do not gimplify, just return
4744 GS_ERROR if we would have to create a temporary when gimplifying
4745 this constructor. Otherwise, return GS_OK.
4746
4747 If NOTIFY_TEMP_CREATION is false, just do the gimplification. */
4748
4749 static enum gimplify_status
gimplify_init_constructor(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value,bool notify_temp_creation)4750 gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
4751 bool want_value, bool notify_temp_creation)
4752 {
4753 tree object, ctor, type;
4754 enum gimplify_status ret;
4755 vec<constructor_elt, va_gc> *elts;
4756
4757 gcc_assert (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == CONSTRUCTOR);
4758
4759 if (!notify_temp_creation)
4760 {
4761 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
4762 is_gimple_lvalue, fb_lvalue);
4763 if (ret == GS_ERROR)
4764 return ret;
4765 }
4766
4767 object = TREE_OPERAND (*expr_p, 0);
4768 ctor = TREE_OPERAND (*expr_p, 1)
4769 = optimize_compound_literals_in_ctor (TREE_OPERAND (*expr_p, 1));
4770 type = TREE_TYPE (ctor);
4771 elts = CONSTRUCTOR_ELTS (ctor);
4772 ret = GS_ALL_DONE;
4773
4774 switch (TREE_CODE (type))
4775 {
4776 case RECORD_TYPE:
4777 case UNION_TYPE:
4778 case QUAL_UNION_TYPE:
4779 case ARRAY_TYPE:
4780 {
4781 struct gimplify_init_ctor_preeval_data preeval_data;
4782 HOST_WIDE_INT num_ctor_elements, num_nonzero_elements;
4783 HOST_WIDE_INT num_unique_nonzero_elements;
4784 bool cleared, complete_p, valid_const_initializer;
4785 /* Use readonly data for initializers of this or smaller size
4786 regardless of the num_nonzero_elements / num_unique_nonzero_elements
4787 ratio. */
4788 const HOST_WIDE_INT min_unique_size = 64;
4789 /* If num_nonzero_elements / num_unique_nonzero_elements ratio
4790 is smaller than this, use readonly data. */
4791 const int unique_nonzero_ratio = 8;
4792
4793 /* Aggregate types must lower constructors to initialization of
4794 individual elements. The exception is that a CONSTRUCTOR node
4795 with no elements indicates zero-initialization of the whole. */
4796 if (vec_safe_is_empty (elts))
4797 {
4798 if (notify_temp_creation)
4799 return GS_OK;
4800 break;
4801 }
4802
4803 /* Fetch information about the constructor to direct later processing.
4804 We might want to make static versions of it in various cases, and
4805 can only do so if it known to be a valid constant initializer. */
4806 valid_const_initializer
4807 = categorize_ctor_elements (ctor, &num_nonzero_elements,
4808 &num_unique_nonzero_elements,
4809 &num_ctor_elements, &complete_p);
4810
4811 /* If a const aggregate variable is being initialized, then it
4812 should never be a lose to promote the variable to be static. */
4813 if (valid_const_initializer
4814 && num_nonzero_elements > 1
4815 && TREE_READONLY (object)
4816 && VAR_P (object)
4817 && (flag_merge_constants >= 2 || !TREE_ADDRESSABLE (object))
4818 /* For ctors that have many repeated nonzero elements
4819 represented through RANGE_EXPRs, prefer initializing
4820 those through runtime loops over copies of large amounts
4821 of data from readonly data section. */
4822 && (num_unique_nonzero_elements
4823 > num_nonzero_elements / unique_nonzero_ratio
4824 || ((unsigned HOST_WIDE_INT) int_size_in_bytes (type)
4825 <= (unsigned HOST_WIDE_INT) min_unique_size)))
4826 {
4827 if (notify_temp_creation)
4828 return GS_ERROR;
4829 DECL_INITIAL (object) = ctor;
4830 TREE_STATIC (object) = 1;
4831 if (!DECL_NAME (object))
4832 DECL_NAME (object) = create_tmp_var_name ("C");
4833 walk_tree (&DECL_INITIAL (object), force_labels_r, NULL, NULL);
4834
4835 /* ??? C++ doesn't automatically append a .<number> to the
4836 assembler name, and even when it does, it looks at FE private
4837 data structures to figure out what that number should be,
4838 which are not set for this variable. I suppose this is
4839 important for local statics for inline functions, which aren't
4840 "local" in the object file sense. So in order to get a unique
4841 TU-local symbol, we must invoke the lhd version now. */
4842 lhd_set_decl_assembler_name (object);
4843
4844 *expr_p = NULL_TREE;
4845 break;
4846 }
4847
4848 /* If there are "lots" of initialized elements, even discounting
4849 those that are not address constants (and thus *must* be
4850 computed at runtime), then partition the constructor into
4851 constant and non-constant parts. Block copy the constant
4852 parts in, then generate code for the non-constant parts. */
4853 /* TODO. There's code in cp/typeck.c to do this. */
4854
4855 if (int_size_in_bytes (TREE_TYPE (ctor)) < 0)
4856 /* store_constructor will ignore the clearing of variable-sized
4857 objects. Initializers for such objects must explicitly set
4858 every field that needs to be set. */
4859 cleared = false;
4860 else if (!complete_p)
4861 /* If the constructor isn't complete, clear the whole object
4862 beforehand, unless CONSTRUCTOR_NO_CLEARING is set on it.
4863
4864 ??? This ought not to be needed. For any element not present
4865 in the initializer, we should simply set them to zero. Except
4866 we'd need to *find* the elements that are not present, and that
4867 requires trickery to avoid quadratic compile-time behavior in
4868 large cases or excessive memory use in small cases. */
4869 cleared = !CONSTRUCTOR_NO_CLEARING (ctor);
4870 else if (num_ctor_elements - num_nonzero_elements
4871 > CLEAR_RATIO (optimize_function_for_speed_p (cfun))
4872 && num_nonzero_elements < num_ctor_elements / 4)
4873 /* If there are "lots" of zeros, it's more efficient to clear
4874 the memory and then set the nonzero elements. */
4875 cleared = true;
4876 else
4877 cleared = false;
4878
4879 /* If there are "lots" of initialized elements, and all of them
4880 are valid address constants, then the entire initializer can
4881 be dropped to memory, and then memcpy'd out. Don't do this
4882 for sparse arrays, though, as it's more efficient to follow
4883 the standard CONSTRUCTOR behavior of memset followed by
4884 individual element initialization. Also don't do this for small
4885 all-zero initializers (which aren't big enough to merit
4886 clearing), and don't try to make bitwise copies of
4887 TREE_ADDRESSABLE types.
4888
4889 We cannot apply such transformation when compiling chkp static
4890 initializer because creation of initializer image in the memory
4891 will require static initialization of bounds for it. It should
4892 result in another gimplification of similar initializer and we
4893 may fall into infinite loop. */
4894 if (valid_const_initializer
4895 && !(cleared || num_nonzero_elements == 0)
4896 && !TREE_ADDRESSABLE (type)
4897 && (!current_function_decl
4898 || !lookup_attribute ("chkp ctor",
4899 DECL_ATTRIBUTES (current_function_decl))))
4900 {
4901 HOST_WIDE_INT size = int_size_in_bytes (type);
4902 unsigned int align;
4903
4904 /* ??? We can still get unbounded array types, at least
4905 from the C++ front end. This seems wrong, but attempt
4906 to work around it for now. */
4907 if (size < 0)
4908 {
4909 size = int_size_in_bytes (TREE_TYPE (object));
4910 if (size >= 0)
4911 TREE_TYPE (ctor) = type = TREE_TYPE (object);
4912 }
4913
4914 /* Find the maximum alignment we can assume for the object. */
4915 /* ??? Make use of DECL_OFFSET_ALIGN. */
4916 if (DECL_P (object))
4917 align = DECL_ALIGN (object);
4918 else
4919 align = TYPE_ALIGN (type);
4920
4921 /* Do a block move either if the size is so small as to make
4922 each individual move a sub-unit move on average, or if it
4923 is so large as to make individual moves inefficient. */
4924 if (size > 0
4925 && num_nonzero_elements > 1
4926 /* For ctors that have many repeated nonzero elements
4927 represented through RANGE_EXPRs, prefer initializing
4928 those through runtime loops over copies of large amounts
4929 of data from readonly data section. */
4930 && (num_unique_nonzero_elements
4931 > num_nonzero_elements / unique_nonzero_ratio
4932 || size <= min_unique_size)
4933 && (size < num_nonzero_elements
4934 || !can_move_by_pieces (size, align)))
4935 {
4936 if (notify_temp_creation)
4937 return GS_ERROR;
4938
4939 walk_tree (&ctor, force_labels_r, NULL, NULL);
4940 ctor = tree_output_constant_def (ctor);
4941 if (!useless_type_conversion_p (type, TREE_TYPE (ctor)))
4942 ctor = build1 (VIEW_CONVERT_EXPR, type, ctor);
4943 TREE_OPERAND (*expr_p, 1) = ctor;
4944
4945 /* This is no longer an assignment of a CONSTRUCTOR, but
4946 we still may have processing to do on the LHS. So
4947 pretend we didn't do anything here to let that happen. */
4948 return GS_UNHANDLED;
4949 }
4950 }
4951
4952 /* If the target is volatile, we have non-zero elements and more than
4953 one field to assign, initialize the target from a temporary. */
4954 if (TREE_THIS_VOLATILE (object)
4955 && !TREE_ADDRESSABLE (type)
4956 && (num_nonzero_elements > 0 || !cleared)
4957 && vec_safe_length (elts) > 1)
4958 {
4959 tree temp = create_tmp_var (TYPE_MAIN_VARIANT (type));
4960 TREE_OPERAND (*expr_p, 0) = temp;
4961 *expr_p = build2 (COMPOUND_EXPR, TREE_TYPE (*expr_p),
4962 *expr_p,
4963 build2 (MODIFY_EXPR, void_type_node,
4964 object, temp));
4965 return GS_OK;
4966 }
4967
4968 if (notify_temp_creation)
4969 return GS_OK;
4970
4971 /* If there are nonzero elements and if needed, pre-evaluate to capture
4972 elements overlapping with the lhs into temporaries. We must do this
4973 before clearing to fetch the values before they are zeroed-out. */
4974 if (num_nonzero_elements > 0 && TREE_CODE (*expr_p) != INIT_EXPR)
4975 {
4976 preeval_data.lhs_base_decl = get_base_address (object);
4977 if (!DECL_P (preeval_data.lhs_base_decl))
4978 preeval_data.lhs_base_decl = NULL;
4979 preeval_data.lhs_alias_set = get_alias_set (object);
4980
4981 gimplify_init_ctor_preeval (&TREE_OPERAND (*expr_p, 1),
4982 pre_p, post_p, &preeval_data);
4983 }
4984
4985 bool ctor_has_side_effects_p
4986 = TREE_SIDE_EFFECTS (TREE_OPERAND (*expr_p, 1));
4987
4988 if (cleared)
4989 {
4990 /* Zap the CONSTRUCTOR element list, which simplifies this case.
4991 Note that we still have to gimplify, in order to handle the
4992 case of variable sized types. Avoid shared tree structures. */
4993 CONSTRUCTOR_ELTS (ctor) = NULL;
4994 TREE_SIDE_EFFECTS (ctor) = 0;
4995 object = unshare_expr (object);
4996 gimplify_stmt (expr_p, pre_p);
4997 }
4998
4999 /* If we have not block cleared the object, or if there are nonzero
5000 elements in the constructor, or if the constructor has side effects,
5001 add assignments to the individual scalar fields of the object. */
5002 if (!cleared
5003 || num_nonzero_elements > 0
5004 || ctor_has_side_effects_p)
5005 gimplify_init_ctor_eval (object, elts, pre_p, cleared);
5006
5007 *expr_p = NULL_TREE;
5008 }
5009 break;
5010
5011 case COMPLEX_TYPE:
5012 {
5013 tree r, i;
5014
5015 if (notify_temp_creation)
5016 return GS_OK;
5017
5018 /* Extract the real and imaginary parts out of the ctor. */
5019 gcc_assert (elts->length () == 2);
5020 r = (*elts)[0].value;
5021 i = (*elts)[1].value;
5022 if (r == NULL || i == NULL)
5023 {
5024 tree zero = build_zero_cst (TREE_TYPE (type));
5025 if (r == NULL)
5026 r = zero;
5027 if (i == NULL)
5028 i = zero;
5029 }
5030
5031 /* Complex types have either COMPLEX_CST or COMPLEX_EXPR to
5032 represent creation of a complex value. */
5033 if (TREE_CONSTANT (r) && TREE_CONSTANT (i))
5034 {
5035 ctor = build_complex (type, r, i);
5036 TREE_OPERAND (*expr_p, 1) = ctor;
5037 }
5038 else
5039 {
5040 ctor = build2 (COMPLEX_EXPR, type, r, i);
5041 TREE_OPERAND (*expr_p, 1) = ctor;
5042 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1),
5043 pre_p,
5044 post_p,
5045 rhs_predicate_for (TREE_OPERAND (*expr_p, 0)),
5046 fb_rvalue);
5047 }
5048 }
5049 break;
5050
5051 case VECTOR_TYPE:
5052 {
5053 unsigned HOST_WIDE_INT ix;
5054 constructor_elt *ce;
5055
5056 if (notify_temp_creation)
5057 return GS_OK;
5058
5059 /* Go ahead and simplify constant constructors to VECTOR_CST. */
5060 if (TREE_CONSTANT (ctor))
5061 {
5062 bool constant_p = true;
5063 tree value;
5064
5065 /* Even when ctor is constant, it might contain non-*_CST
5066 elements, such as addresses or trapping values like
5067 1.0/0.0 - 1.0/0.0. Such expressions don't belong
5068 in VECTOR_CST nodes. */
5069 FOR_EACH_CONSTRUCTOR_VALUE (elts, ix, value)
5070 if (!CONSTANT_CLASS_P (value))
5071 {
5072 constant_p = false;
5073 break;
5074 }
5075
5076 if (constant_p)
5077 {
5078 TREE_OPERAND (*expr_p, 1) = build_vector_from_ctor (type, elts);
5079 break;
5080 }
5081
5082 TREE_CONSTANT (ctor) = 0;
5083 }
5084
5085 /* Vector types use CONSTRUCTOR all the way through gimple
5086 compilation as a general initializer. */
5087 FOR_EACH_VEC_SAFE_ELT (elts, ix, ce)
5088 {
5089 enum gimplify_status tret;
5090 tret = gimplify_expr (&ce->value, pre_p, post_p, is_gimple_val,
5091 fb_rvalue);
5092 if (tret == GS_ERROR)
5093 ret = GS_ERROR;
5094 else if (TREE_STATIC (ctor)
5095 && !initializer_constant_valid_p (ce->value,
5096 TREE_TYPE (ce->value)))
5097 TREE_STATIC (ctor) = 0;
5098 }
5099 if (!is_gimple_reg (TREE_OPERAND (*expr_p, 0)))
5100 TREE_OPERAND (*expr_p, 1) = get_formal_tmp_var (ctor, pre_p);
5101 }
5102 break;
5103
5104 default:
5105 /* So how did we get a CONSTRUCTOR for a scalar type? */
5106 gcc_unreachable ();
5107 }
5108
5109 if (ret == GS_ERROR)
5110 return GS_ERROR;
5111 /* If we have gimplified both sides of the initializer but have
5112 not emitted an assignment, do so now. */
5113 if (*expr_p)
5114 {
5115 tree lhs = TREE_OPERAND (*expr_p, 0);
5116 tree rhs = TREE_OPERAND (*expr_p, 1);
5117 if (want_value && object == lhs)
5118 lhs = unshare_expr (lhs);
5119 gassign *init = gimple_build_assign (lhs, rhs);
5120 gimplify_seq_add_stmt (pre_p, init);
5121 }
5122 if (want_value)
5123 {
5124 *expr_p = object;
5125 return GS_OK;
5126 }
5127 else
5128 {
5129 *expr_p = NULL;
5130 return GS_ALL_DONE;
5131 }
5132 }
5133
5134 /* Given a pointer value OP0, return a simplified version of an
5135 indirection through OP0, or NULL_TREE if no simplification is
5136 possible. This may only be applied to a rhs of an expression.
5137 Note that the resulting type may be different from the type pointed
5138 to in the sense that it is still compatible from the langhooks
5139 point of view. */
5140
5141 static tree
gimple_fold_indirect_ref_rhs(tree t)5142 gimple_fold_indirect_ref_rhs (tree t)
5143 {
5144 return gimple_fold_indirect_ref (t);
5145 }
5146
5147 /* Subroutine of gimplify_modify_expr to do simplifications of
5148 MODIFY_EXPRs based on the code of the RHS. We loop for as long as
5149 something changes. */
5150
5151 static enum gimplify_status
gimplify_modify_expr_rhs(tree * expr_p,tree * from_p,tree * to_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5152 gimplify_modify_expr_rhs (tree *expr_p, tree *from_p, tree *to_p,
5153 gimple_seq *pre_p, gimple_seq *post_p,
5154 bool want_value)
5155 {
5156 enum gimplify_status ret = GS_UNHANDLED;
5157 bool changed;
5158
5159 do
5160 {
5161 changed = false;
5162 switch (TREE_CODE (*from_p))
5163 {
5164 case VAR_DECL:
5165 /* If we're assigning from a read-only variable initialized with
5166 a constructor, do the direct assignment from the constructor,
5167 but only if neither source nor target are volatile since this
5168 latter assignment might end up being done on a per-field basis. */
5169 if (DECL_INITIAL (*from_p)
5170 && TREE_READONLY (*from_p)
5171 && !TREE_THIS_VOLATILE (*from_p)
5172 && !TREE_THIS_VOLATILE (*to_p)
5173 && TREE_CODE (DECL_INITIAL (*from_p)) == CONSTRUCTOR)
5174 {
5175 tree old_from = *from_p;
5176 enum gimplify_status subret;
5177
5178 /* Move the constructor into the RHS. */
5179 *from_p = unshare_expr (DECL_INITIAL (*from_p));
5180
5181 /* Let's see if gimplify_init_constructor will need to put
5182 it in memory. */
5183 subret = gimplify_init_constructor (expr_p, NULL, NULL,
5184 false, true);
5185 if (subret == GS_ERROR)
5186 {
5187 /* If so, revert the change. */
5188 *from_p = old_from;
5189 }
5190 else
5191 {
5192 ret = GS_OK;
5193 changed = true;
5194 }
5195 }
5196 break;
5197 case INDIRECT_REF:
5198 {
5199 /* If we have code like
5200
5201 *(const A*)(A*)&x
5202
5203 where the type of "x" is a (possibly cv-qualified variant
5204 of "A"), treat the entire expression as identical to "x".
5205 This kind of code arises in C++ when an object is bound
5206 to a const reference, and if "x" is a TARGET_EXPR we want
5207 to take advantage of the optimization below. */
5208 bool volatile_p = TREE_THIS_VOLATILE (*from_p);
5209 tree t = gimple_fold_indirect_ref_rhs (TREE_OPERAND (*from_p, 0));
5210 if (t)
5211 {
5212 if (TREE_THIS_VOLATILE (t) != volatile_p)
5213 {
5214 if (DECL_P (t))
5215 t = build_simple_mem_ref_loc (EXPR_LOCATION (*from_p),
5216 build_fold_addr_expr (t));
5217 if (REFERENCE_CLASS_P (t))
5218 TREE_THIS_VOLATILE (t) = volatile_p;
5219 }
5220 *from_p = t;
5221 ret = GS_OK;
5222 changed = true;
5223 }
5224 break;
5225 }
5226
5227 case TARGET_EXPR:
5228 {
5229 /* If we are initializing something from a TARGET_EXPR, strip the
5230 TARGET_EXPR and initialize it directly, if possible. This can't
5231 be done if the initializer is void, since that implies that the
5232 temporary is set in some non-trivial way.
5233
5234 ??? What about code that pulls out the temp and uses it
5235 elsewhere? I think that such code never uses the TARGET_EXPR as
5236 an initializer. If I'm wrong, we'll die because the temp won't
5237 have any RTL. In that case, I guess we'll need to replace
5238 references somehow. */
5239 tree init = TARGET_EXPR_INITIAL (*from_p);
5240
5241 if (init
5242 && (TREE_CODE (*expr_p) != MODIFY_EXPR
5243 || !TARGET_EXPR_NO_ELIDE (*from_p))
5244 && !VOID_TYPE_P (TREE_TYPE (init)))
5245 {
5246 *from_p = init;
5247 ret = GS_OK;
5248 changed = true;
5249 }
5250 }
5251 break;
5252
5253 case COMPOUND_EXPR:
5254 /* Remove any COMPOUND_EXPR in the RHS so the following cases will be
5255 caught. */
5256 gimplify_compound_expr (from_p, pre_p, true);
5257 ret = GS_OK;
5258 changed = true;
5259 break;
5260
5261 case CONSTRUCTOR:
5262 /* If we already made some changes, let the front end have a
5263 crack at this before we break it down. */
5264 if (ret != GS_UNHANDLED)
5265 break;
5266 /* If we're initializing from a CONSTRUCTOR, break this into
5267 individual MODIFY_EXPRs. */
5268 return gimplify_init_constructor (expr_p, pre_p, post_p, want_value,
5269 false);
5270
5271 case COND_EXPR:
5272 /* If we're assigning to a non-register type, push the assignment
5273 down into the branches. This is mandatory for ADDRESSABLE types,
5274 since we cannot generate temporaries for such, but it saves a
5275 copy in other cases as well. */
5276 if (!is_gimple_reg_type (TREE_TYPE (*from_p)))
5277 {
5278 /* This code should mirror the code in gimplify_cond_expr. */
5279 enum tree_code code = TREE_CODE (*expr_p);
5280 tree cond = *from_p;
5281 tree result = *to_p;
5282
5283 ret = gimplify_expr (&result, pre_p, post_p,
5284 is_gimple_lvalue, fb_lvalue);
5285 if (ret != GS_ERROR)
5286 ret = GS_OK;
5287
5288 /* If we are going to write RESULT more than once, clear
5289 TREE_READONLY flag, otherwise we might incorrectly promote
5290 the variable to static const and initialize it at compile
5291 time in one of the branches. */
5292 if (VAR_P (result)
5293 && TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node
5294 && TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5295 TREE_READONLY (result) = 0;
5296 if (TREE_TYPE (TREE_OPERAND (cond, 1)) != void_type_node)
5297 TREE_OPERAND (cond, 1)
5298 = build2 (code, void_type_node, result,
5299 TREE_OPERAND (cond, 1));
5300 if (TREE_TYPE (TREE_OPERAND (cond, 2)) != void_type_node)
5301 TREE_OPERAND (cond, 2)
5302 = build2 (code, void_type_node, unshare_expr (result),
5303 TREE_OPERAND (cond, 2));
5304
5305 TREE_TYPE (cond) = void_type_node;
5306 recalculate_side_effects (cond);
5307
5308 if (want_value)
5309 {
5310 gimplify_and_add (cond, pre_p);
5311 *expr_p = unshare_expr (result);
5312 }
5313 else
5314 *expr_p = cond;
5315 return ret;
5316 }
5317 break;
5318
5319 case CALL_EXPR:
5320 /* For calls that return in memory, give *to_p as the CALL_EXPR's
5321 return slot so that we don't generate a temporary. */
5322 if (!CALL_EXPR_RETURN_SLOT_OPT (*from_p)
5323 && aggregate_value_p (*from_p, *from_p))
5324 {
5325 bool use_target;
5326
5327 if (!(rhs_predicate_for (*to_p))(*from_p))
5328 /* If we need a temporary, *to_p isn't accurate. */
5329 use_target = false;
5330 /* It's OK to use the return slot directly unless it's an NRV. */
5331 else if (TREE_CODE (*to_p) == RESULT_DECL
5332 && DECL_NAME (*to_p) == NULL_TREE
5333 && needs_to_live_in_memory (*to_p))
5334 use_target = true;
5335 else if (is_gimple_reg_type (TREE_TYPE (*to_p))
5336 || (DECL_P (*to_p) && DECL_REGISTER (*to_p)))
5337 /* Don't force regs into memory. */
5338 use_target = false;
5339 else if (TREE_CODE (*expr_p) == INIT_EXPR)
5340 /* It's OK to use the target directly if it's being
5341 initialized. */
5342 use_target = true;
5343 else if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (*to_p)))
5344 != INTEGER_CST)
5345 /* Always use the target and thus RSO for variable-sized types.
5346 GIMPLE cannot deal with a variable-sized assignment
5347 embedded in a call statement. */
5348 use_target = true;
5349 else if (TREE_CODE (*to_p) != SSA_NAME
5350 && (!is_gimple_variable (*to_p)
5351 || needs_to_live_in_memory (*to_p)))
5352 /* Don't use the original target if it's already addressable;
5353 if its address escapes, and the called function uses the
5354 NRV optimization, a conforming program could see *to_p
5355 change before the called function returns; see c++/19317.
5356 When optimizing, the return_slot pass marks more functions
5357 as safe after we have escape info. */
5358 use_target = false;
5359 else
5360 use_target = true;
5361
5362 if (use_target)
5363 {
5364 CALL_EXPR_RETURN_SLOT_OPT (*from_p) = 1;
5365 mark_addressable (*to_p);
5366 }
5367 }
5368 break;
5369
5370 case WITH_SIZE_EXPR:
5371 /* Likewise for calls that return an aggregate of non-constant size,
5372 since we would not be able to generate a temporary at all. */
5373 if (TREE_CODE (TREE_OPERAND (*from_p, 0)) == CALL_EXPR)
5374 {
5375 *from_p = TREE_OPERAND (*from_p, 0);
5376 /* We don't change ret in this case because the
5377 WITH_SIZE_EXPR might have been added in
5378 gimplify_modify_expr, so returning GS_OK would lead to an
5379 infinite loop. */
5380 changed = true;
5381 }
5382 break;
5383
5384 /* If we're initializing from a container, push the initialization
5385 inside it. */
5386 case CLEANUP_POINT_EXPR:
5387 case BIND_EXPR:
5388 case STATEMENT_LIST:
5389 {
5390 tree wrap = *from_p;
5391 tree t;
5392
5393 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_min_lval,
5394 fb_lvalue);
5395 if (ret != GS_ERROR)
5396 ret = GS_OK;
5397
5398 t = voidify_wrapper_expr (wrap, *expr_p);
5399 gcc_assert (t == *expr_p);
5400
5401 if (want_value)
5402 {
5403 gimplify_and_add (wrap, pre_p);
5404 *expr_p = unshare_expr (*to_p);
5405 }
5406 else
5407 *expr_p = wrap;
5408 return GS_OK;
5409 }
5410
5411 case COMPOUND_LITERAL_EXPR:
5412 {
5413 tree complit = TREE_OPERAND (*expr_p, 1);
5414 tree decl_s = COMPOUND_LITERAL_EXPR_DECL_EXPR (complit);
5415 tree decl = DECL_EXPR_DECL (decl_s);
5416 tree init = DECL_INITIAL (decl);
5417
5418 /* struct T x = (struct T) { 0, 1, 2 } can be optimized
5419 into struct T x = { 0, 1, 2 } if the address of the
5420 compound literal has never been taken. */
5421 if (!TREE_ADDRESSABLE (complit)
5422 && !TREE_ADDRESSABLE (decl)
5423 && init)
5424 {
5425 *expr_p = copy_node (*expr_p);
5426 TREE_OPERAND (*expr_p, 1) = init;
5427 return GS_OK;
5428 }
5429 }
5430
5431 default:
5432 break;
5433 }
5434 }
5435 while (changed);
5436
5437 return ret;
5438 }
5439
5440
5441 /* Return true if T looks like a valid GIMPLE statement. */
5442
5443 static bool
is_gimple_stmt(tree t)5444 is_gimple_stmt (tree t)
5445 {
5446 const enum tree_code code = TREE_CODE (t);
5447
5448 switch (code)
5449 {
5450 case NOP_EXPR:
5451 /* The only valid NOP_EXPR is the empty statement. */
5452 return IS_EMPTY_STMT (t);
5453
5454 case BIND_EXPR:
5455 case COND_EXPR:
5456 /* These are only valid if they're void. */
5457 return TREE_TYPE (t) == NULL || VOID_TYPE_P (TREE_TYPE (t));
5458
5459 case SWITCH_EXPR:
5460 case GOTO_EXPR:
5461 case RETURN_EXPR:
5462 case LABEL_EXPR:
5463 case CASE_LABEL_EXPR:
5464 case TRY_CATCH_EXPR:
5465 case TRY_FINALLY_EXPR:
5466 case EH_FILTER_EXPR:
5467 case CATCH_EXPR:
5468 case ASM_EXPR:
5469 case STATEMENT_LIST:
5470 case OACC_PARALLEL:
5471 case OACC_KERNELS:
5472 case OACC_DATA:
5473 case OACC_HOST_DATA:
5474 case OACC_DECLARE:
5475 case OACC_UPDATE:
5476 case OACC_ENTER_DATA:
5477 case OACC_EXIT_DATA:
5478 case OACC_CACHE:
5479 case OMP_PARALLEL:
5480 case OMP_FOR:
5481 case OMP_SIMD:
5482 case OMP_DISTRIBUTE:
5483 case OACC_LOOP:
5484 case OMP_SECTIONS:
5485 case OMP_SECTION:
5486 case OMP_SINGLE:
5487 case OMP_MASTER:
5488 case OMP_TASKGROUP:
5489 case OMP_ORDERED:
5490 case OMP_CRITICAL:
5491 case OMP_TASK:
5492 case OMP_TARGET:
5493 case OMP_TARGET_DATA:
5494 case OMP_TARGET_UPDATE:
5495 case OMP_TARGET_ENTER_DATA:
5496 case OMP_TARGET_EXIT_DATA:
5497 case OMP_TASKLOOP:
5498 case OMP_TEAMS:
5499 /* These are always void. */
5500 return true;
5501
5502 case CALL_EXPR:
5503 case MODIFY_EXPR:
5504 case PREDICT_EXPR:
5505 /* These are valid regardless of their type. */
5506 return true;
5507
5508 default:
5509 return false;
5510 }
5511 }
5512
5513
5514 /* Promote partial stores to COMPLEX variables to total stores. *EXPR_P is
5515 a MODIFY_EXPR with a lhs of a REAL/IMAGPART_EXPR of a variable with
5516 DECL_GIMPLE_REG_P set.
5517
5518 IMPORTANT NOTE: This promotion is performed by introducing a load of the
5519 other, unmodified part of the complex object just before the total store.
5520 As a consequence, if the object is still uninitialized, an undefined value
5521 will be loaded into a register, which may result in a spurious exception
5522 if the register is floating-point and the value happens to be a signaling
5523 NaN for example. Then the fully-fledged complex operations lowering pass
5524 followed by a DCE pass are necessary in order to fix things up. */
5525
5526 static enum gimplify_status
gimplify_modify_expr_complex_part(tree * expr_p,gimple_seq * pre_p,bool want_value)5527 gimplify_modify_expr_complex_part (tree *expr_p, gimple_seq *pre_p,
5528 bool want_value)
5529 {
5530 enum tree_code code, ocode;
5531 tree lhs, rhs, new_rhs, other, realpart, imagpart;
5532
5533 lhs = TREE_OPERAND (*expr_p, 0);
5534 rhs = TREE_OPERAND (*expr_p, 1);
5535 code = TREE_CODE (lhs);
5536 lhs = TREE_OPERAND (lhs, 0);
5537
5538 ocode = code == REALPART_EXPR ? IMAGPART_EXPR : REALPART_EXPR;
5539 other = build1 (ocode, TREE_TYPE (rhs), lhs);
5540 TREE_NO_WARNING (other) = 1;
5541 other = get_formal_tmp_var (other, pre_p);
5542
5543 realpart = code == REALPART_EXPR ? rhs : other;
5544 imagpart = code == REALPART_EXPR ? other : rhs;
5545
5546 if (TREE_CONSTANT (realpart) && TREE_CONSTANT (imagpart))
5547 new_rhs = build_complex (TREE_TYPE (lhs), realpart, imagpart);
5548 else
5549 new_rhs = build2 (COMPLEX_EXPR, TREE_TYPE (lhs), realpart, imagpart);
5550
5551 gimplify_seq_add_stmt (pre_p, gimple_build_assign (lhs, new_rhs));
5552 *expr_p = (want_value) ? rhs : NULL_TREE;
5553
5554 return GS_ALL_DONE;
5555 }
5556
5557 /* Gimplify the MODIFY_EXPR node pointed to by EXPR_P.
5558
5559 modify_expr
5560 : varname '=' rhs
5561 | '*' ID '=' rhs
5562
5563 PRE_P points to the list where side effects that must happen before
5564 *EXPR_P should be stored.
5565
5566 POST_P points to the list where side effects that must happen after
5567 *EXPR_P should be stored.
5568
5569 WANT_VALUE is nonzero iff we want to use the value of this expression
5570 in another expression. */
5571
5572 static enum gimplify_status
gimplify_modify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool want_value)5573 gimplify_modify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
5574 bool want_value)
5575 {
5576 tree *from_p = &TREE_OPERAND (*expr_p, 1);
5577 tree *to_p = &TREE_OPERAND (*expr_p, 0);
5578 enum gimplify_status ret = GS_UNHANDLED;
5579 gimple *assign;
5580 location_t loc = EXPR_LOCATION (*expr_p);
5581 gimple_stmt_iterator gsi;
5582
5583 gcc_assert (TREE_CODE (*expr_p) == MODIFY_EXPR
5584 || TREE_CODE (*expr_p) == INIT_EXPR);
5585
5586 /* Trying to simplify a clobber using normal logic doesn't work,
5587 so handle it here. */
5588 if (TREE_CLOBBER_P (*from_p))
5589 {
5590 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5591 if (ret == GS_ERROR)
5592 return ret;
5593 gcc_assert (!want_value
5594 && (VAR_P (*to_p) || TREE_CODE (*to_p) == MEM_REF));
5595 gimplify_seq_add_stmt (pre_p, gimple_build_assign (*to_p, *from_p));
5596 *expr_p = NULL;
5597 return GS_ALL_DONE;
5598 }
5599
5600 /* Insert pointer conversions required by the middle-end that are not
5601 required by the frontend. This fixes middle-end type checking for
5602 for example gcc.dg/redecl-6.c. */
5603 if (POINTER_TYPE_P (TREE_TYPE (*to_p)))
5604 {
5605 STRIP_USELESS_TYPE_CONVERSION (*from_p);
5606 if (!useless_type_conversion_p (TREE_TYPE (*to_p), TREE_TYPE (*from_p)))
5607 *from_p = fold_convert_loc (loc, TREE_TYPE (*to_p), *from_p);
5608 }
5609
5610 /* See if any simplifications can be done based on what the RHS is. */
5611 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5612 want_value);
5613 if (ret != GS_UNHANDLED)
5614 return ret;
5615
5616 /* For zero sized types only gimplify the left hand side and right hand
5617 side as statements and throw away the assignment. Do this after
5618 gimplify_modify_expr_rhs so we handle TARGET_EXPRs of addressable
5619 types properly. */
5620 if (zero_sized_type (TREE_TYPE (*from_p))
5621 && !want_value
5622 /* Don't do this for calls that return addressable types, expand_call
5623 relies on those having a lhs. */
5624 && !(TREE_ADDRESSABLE (TREE_TYPE (*from_p))
5625 && TREE_CODE (*from_p) == CALL_EXPR))
5626 {
5627 gimplify_stmt (from_p, pre_p);
5628 gimplify_stmt (to_p, pre_p);
5629 *expr_p = NULL_TREE;
5630 return GS_ALL_DONE;
5631 }
5632
5633 /* If the value being copied is of variable width, compute the length
5634 of the copy into a WITH_SIZE_EXPR. Note that we need to do this
5635 before gimplifying any of the operands so that we can resolve any
5636 PLACEHOLDER_EXPRs in the size. Also note that the RTL expander uses
5637 the size of the expression to be copied, not of the destination, so
5638 that is what we must do here. */
5639 maybe_with_size_expr (from_p);
5640
5641 /* As a special case, we have to temporarily allow for assignments
5642 with a CALL_EXPR on the RHS. Since in GIMPLE a function call is
5643 a toplevel statement, when gimplifying the GENERIC expression
5644 MODIFY_EXPR <a, CALL_EXPR <foo>>, we cannot create the tuple
5645 GIMPLE_ASSIGN <a, GIMPLE_CALL <foo>>.
5646
5647 Instead, we need to create the tuple GIMPLE_CALL <a, foo>. To
5648 prevent gimplify_expr from trying to create a new temporary for
5649 foo's LHS, we tell it that it should only gimplify until it
5650 reaches the CALL_EXPR. On return from gimplify_expr, the newly
5651 created GIMPLE_CALL <foo> will be the last statement in *PRE_P
5652 and all we need to do here is set 'a' to be its LHS. */
5653
5654 /* Gimplify the RHS first for C++17 and bug 71104. */
5655 gimple_predicate initial_pred = initial_rhs_predicate_for (*to_p);
5656 ret = gimplify_expr (from_p, pre_p, post_p, initial_pred, fb_rvalue);
5657 if (ret == GS_ERROR)
5658 return ret;
5659
5660 /* Then gimplify the LHS. */
5661 /* If we gimplified the RHS to a CALL_EXPR and that call may return
5662 twice we have to make sure to gimplify into non-SSA as otherwise
5663 the abnormal edge added later will make those defs not dominate
5664 their uses.
5665 ??? Technically this applies only to the registers used in the
5666 resulting non-register *TO_P. */
5667 bool saved_into_ssa = gimplify_ctxp->into_ssa;
5668 if (saved_into_ssa
5669 && TREE_CODE (*from_p) == CALL_EXPR
5670 && call_expr_flags (*from_p) & ECF_RETURNS_TWICE)
5671 gimplify_ctxp->into_ssa = false;
5672 ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
5673 gimplify_ctxp->into_ssa = saved_into_ssa;
5674 if (ret == GS_ERROR)
5675 return ret;
5676
5677 /* Now that the LHS is gimplified, re-gimplify the RHS if our initial
5678 guess for the predicate was wrong. */
5679 gimple_predicate final_pred = rhs_predicate_for (*to_p);
5680 if (final_pred != initial_pred)
5681 {
5682 ret = gimplify_expr (from_p, pre_p, post_p, final_pred, fb_rvalue);
5683 if (ret == GS_ERROR)
5684 return ret;
5685 }
5686
5687 /* In case of va_arg internal fn wrappped in a WITH_SIZE_EXPR, add the type
5688 size as argument to the call. */
5689 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5690 {
5691 tree call = TREE_OPERAND (*from_p, 0);
5692 tree vlasize = TREE_OPERAND (*from_p, 1);
5693
5694 if (TREE_CODE (call) == CALL_EXPR
5695 && CALL_EXPR_IFN (call) == IFN_VA_ARG)
5696 {
5697 int nargs = call_expr_nargs (call);
5698 tree type = TREE_TYPE (call);
5699 tree ap = CALL_EXPR_ARG (call, 0);
5700 tree tag = CALL_EXPR_ARG (call, 1);
5701 tree aptag = CALL_EXPR_ARG (call, 2);
5702 tree newcall = build_call_expr_internal_loc (EXPR_LOCATION (call),
5703 IFN_VA_ARG, type,
5704 nargs + 1, ap, tag,
5705 aptag, vlasize);
5706 TREE_OPERAND (*from_p, 0) = newcall;
5707 }
5708 }
5709
5710 /* Now see if the above changed *from_p to something we handle specially. */
5711 ret = gimplify_modify_expr_rhs (expr_p, from_p, to_p, pre_p, post_p,
5712 want_value);
5713 if (ret != GS_UNHANDLED)
5714 return ret;
5715
5716 /* If we've got a variable sized assignment between two lvalues (i.e. does
5717 not involve a call), then we can make things a bit more straightforward
5718 by converting the assignment to memcpy or memset. */
5719 if (TREE_CODE (*from_p) == WITH_SIZE_EXPR)
5720 {
5721 tree from = TREE_OPERAND (*from_p, 0);
5722 tree size = TREE_OPERAND (*from_p, 1);
5723
5724 if (TREE_CODE (from) == CONSTRUCTOR)
5725 return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
5726
5727 if (is_gimple_addressable (from))
5728 {
5729 *from_p = from;
5730 return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
5731 pre_p);
5732 }
5733 }
5734
5735 /* Transform partial stores to non-addressable complex variables into
5736 total stores. This allows us to use real instead of virtual operands
5737 for these variables, which improves optimization. */
5738 if ((TREE_CODE (*to_p) == REALPART_EXPR
5739 || TREE_CODE (*to_p) == IMAGPART_EXPR)
5740 && is_gimple_reg (TREE_OPERAND (*to_p, 0)))
5741 return gimplify_modify_expr_complex_part (expr_p, pre_p, want_value);
5742
5743 /* Try to alleviate the effects of the gimplification creating artificial
5744 temporaries (see for example is_gimple_reg_rhs) on the debug info, but
5745 make sure not to create DECL_DEBUG_EXPR links across functions. */
5746 if (!gimplify_ctxp->into_ssa
5747 && VAR_P (*from_p)
5748 && DECL_IGNORED_P (*from_p)
5749 && DECL_P (*to_p)
5750 && !DECL_IGNORED_P (*to_p)
5751 && decl_function_context (*to_p) == current_function_decl
5752 && decl_function_context (*from_p) == current_function_decl)
5753 {
5754 if (!DECL_NAME (*from_p) && DECL_NAME (*to_p))
5755 DECL_NAME (*from_p)
5756 = create_tmp_var_name (IDENTIFIER_POINTER (DECL_NAME (*to_p)));
5757 DECL_HAS_DEBUG_EXPR_P (*from_p) = 1;
5758 SET_DECL_DEBUG_EXPR (*from_p, *to_p);
5759 }
5760
5761 if (want_value && TREE_THIS_VOLATILE (*to_p))
5762 *from_p = get_initialized_tmp_var (*from_p, pre_p, post_p);
5763
5764 if (TREE_CODE (*from_p) == CALL_EXPR)
5765 {
5766 /* Since the RHS is a CALL_EXPR, we need to create a GIMPLE_CALL
5767 instead of a GIMPLE_ASSIGN. */
5768 gcall *call_stmt;
5769 if (CALL_EXPR_FN (*from_p) == NULL_TREE)
5770 {
5771 /* Gimplify internal functions created in the FEs. */
5772 int nargs = call_expr_nargs (*from_p), i;
5773 enum internal_fn ifn = CALL_EXPR_IFN (*from_p);
5774 auto_vec<tree> vargs (nargs);
5775
5776 for (i = 0; i < nargs; i++)
5777 {
5778 gimplify_arg (&CALL_EXPR_ARG (*from_p, i), pre_p,
5779 EXPR_LOCATION (*from_p));
5780 vargs.quick_push (CALL_EXPR_ARG (*from_p, i));
5781 }
5782 call_stmt = gimple_build_call_internal_vec (ifn, vargs);
5783 gimple_call_set_nothrow (call_stmt, TREE_NOTHROW (*from_p));
5784 gimple_set_location (call_stmt, EXPR_LOCATION (*expr_p));
5785 }
5786 else
5787 {
5788 tree fnptrtype = TREE_TYPE (CALL_EXPR_FN (*from_p));
5789 CALL_EXPR_FN (*from_p) = TREE_OPERAND (CALL_EXPR_FN (*from_p), 0);
5790 STRIP_USELESS_TYPE_CONVERSION (CALL_EXPR_FN (*from_p));
5791 tree fndecl = get_callee_fndecl (*from_p);
5792 if (fndecl
5793 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
5794 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
5795 && call_expr_nargs (*from_p) == 3)
5796 call_stmt = gimple_build_call_internal (IFN_BUILTIN_EXPECT, 3,
5797 CALL_EXPR_ARG (*from_p, 0),
5798 CALL_EXPR_ARG (*from_p, 1),
5799 CALL_EXPR_ARG (*from_p, 2));
5800 else
5801 {
5802 call_stmt = gimple_build_call_from_tree (*from_p, fnptrtype);
5803 }
5804 }
5805 notice_special_calls (call_stmt);
5806 if (!gimple_call_noreturn_p (call_stmt) || !should_remove_lhs_p (*to_p))
5807 gimple_call_set_lhs (call_stmt, *to_p);
5808 else if (TREE_CODE (*to_p) == SSA_NAME)
5809 /* The above is somewhat premature, avoid ICEing later for a
5810 SSA name w/o a definition. We may have uses in the GIMPLE IL.
5811 ??? This doesn't make it a default-def. */
5812 SSA_NAME_DEF_STMT (*to_p) = gimple_build_nop ();
5813
5814 assign = call_stmt;
5815 }
5816 else
5817 {
5818 assign = gimple_build_assign (*to_p, *from_p);
5819 gimple_set_location (assign, EXPR_LOCATION (*expr_p));
5820 if (COMPARISON_CLASS_P (*from_p))
5821 gimple_set_no_warning (assign, TREE_NO_WARNING (*from_p));
5822 }
5823
5824 if (gimplify_ctxp->into_ssa && is_gimple_reg (*to_p))
5825 {
5826 /* We should have got an SSA name from the start. */
5827 gcc_assert (TREE_CODE (*to_p) == SSA_NAME
5828 || ! gimple_in_ssa_p (cfun));
5829 }
5830
5831 gimplify_seq_add_stmt (pre_p, assign);
5832 gsi = gsi_last (*pre_p);
5833 maybe_fold_stmt (&gsi);
5834
5835 if (want_value)
5836 {
5837 *expr_p = TREE_THIS_VOLATILE (*to_p) ? *from_p : unshare_expr (*to_p);
5838 return GS_OK;
5839 }
5840 else
5841 *expr_p = NULL;
5842
5843 return GS_ALL_DONE;
5844 }
5845
5846 /* Gimplify a comparison between two variable-sized objects. Do this
5847 with a call to BUILT_IN_MEMCMP. */
5848
5849 static enum gimplify_status
gimplify_variable_sized_compare(tree * expr_p)5850 gimplify_variable_sized_compare (tree *expr_p)
5851 {
5852 location_t loc = EXPR_LOCATION (*expr_p);
5853 tree op0 = TREE_OPERAND (*expr_p, 0);
5854 tree op1 = TREE_OPERAND (*expr_p, 1);
5855 tree t, arg, dest, src, expr;
5856
5857 arg = TYPE_SIZE_UNIT (TREE_TYPE (op0));
5858 arg = unshare_expr (arg);
5859 arg = SUBSTITUTE_PLACEHOLDER_IN_EXPR (arg, op0);
5860 src = build_fold_addr_expr_loc (loc, op1);
5861 dest = build_fold_addr_expr_loc (loc, op0);
5862 t = builtin_decl_implicit (BUILT_IN_MEMCMP);
5863 t = build_call_expr_loc (loc, t, 3, dest, src, arg);
5864
5865 expr
5866 = build2 (TREE_CODE (*expr_p), TREE_TYPE (*expr_p), t, integer_zero_node);
5867 SET_EXPR_LOCATION (expr, loc);
5868 *expr_p = expr;
5869
5870 return GS_OK;
5871 }
5872
5873 /* Gimplify a comparison between two aggregate objects of integral scalar
5874 mode as a comparison between the bitwise equivalent scalar values. */
5875
5876 static enum gimplify_status
gimplify_scalar_mode_aggregate_compare(tree * expr_p)5877 gimplify_scalar_mode_aggregate_compare (tree *expr_p)
5878 {
5879 location_t loc = EXPR_LOCATION (*expr_p);
5880 tree op0 = TREE_OPERAND (*expr_p, 0);
5881 tree op1 = TREE_OPERAND (*expr_p, 1);
5882
5883 tree type = TREE_TYPE (op0);
5884 tree scalar_type = lang_hooks.types.type_for_mode (TYPE_MODE (type), 1);
5885
5886 op0 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op0);
5887 op1 = fold_build1_loc (loc, VIEW_CONVERT_EXPR, scalar_type, op1);
5888
5889 *expr_p
5890 = fold_build2_loc (loc, TREE_CODE (*expr_p), TREE_TYPE (*expr_p), op0, op1);
5891
5892 return GS_OK;
5893 }
5894
5895 /* Gimplify an expression sequence. This function gimplifies each
5896 expression and rewrites the original expression with the last
5897 expression of the sequence in GIMPLE form.
5898
5899 PRE_P points to the list where the side effects for all the
5900 expressions in the sequence will be emitted.
5901
5902 WANT_VALUE is true when the result of the last COMPOUND_EXPR is used. */
5903
5904 static enum gimplify_status
gimplify_compound_expr(tree * expr_p,gimple_seq * pre_p,bool want_value)5905 gimplify_compound_expr (tree *expr_p, gimple_seq *pre_p, bool want_value)
5906 {
5907 tree t = *expr_p;
5908
5909 do
5910 {
5911 tree *sub_p = &TREE_OPERAND (t, 0);
5912
5913 if (TREE_CODE (*sub_p) == COMPOUND_EXPR)
5914 gimplify_compound_expr (sub_p, pre_p, false);
5915 else
5916 gimplify_stmt (sub_p, pre_p);
5917
5918 t = TREE_OPERAND (t, 1);
5919 }
5920 while (TREE_CODE (t) == COMPOUND_EXPR);
5921
5922 *expr_p = t;
5923 if (want_value)
5924 return GS_OK;
5925 else
5926 {
5927 gimplify_stmt (expr_p, pre_p);
5928 return GS_ALL_DONE;
5929 }
5930 }
5931
5932 /* Gimplify a SAVE_EXPR node. EXPR_P points to the expression to
5933 gimplify. After gimplification, EXPR_P will point to a new temporary
5934 that holds the original value of the SAVE_EXPR node.
5935
5936 PRE_P points to the list where side effects that must happen before
5937 *EXPR_P should be stored. */
5938
5939 static enum gimplify_status
gimplify_save_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)5940 gimplify_save_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5941 {
5942 enum gimplify_status ret = GS_ALL_DONE;
5943 tree val;
5944
5945 gcc_assert (TREE_CODE (*expr_p) == SAVE_EXPR);
5946 val = TREE_OPERAND (*expr_p, 0);
5947
5948 /* If the SAVE_EXPR has not been resolved, then evaluate it once. */
5949 if (!SAVE_EXPR_RESOLVED_P (*expr_p))
5950 {
5951 /* The operand may be a void-valued expression. It is
5952 being executed only for its side-effects. */
5953 if (TREE_TYPE (val) == void_type_node)
5954 {
5955 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
5956 is_gimple_stmt, fb_none);
5957 val = NULL;
5958 }
5959 else
5960 /* The temporary may not be an SSA name as later abnormal and EH
5961 control flow may invalidate use/def domination. When in SSA
5962 form then assume there are no such issues and SAVE_EXPRs only
5963 appear via GENERIC foldings. */
5964 val = get_initialized_tmp_var (val, pre_p, post_p,
5965 gimple_in_ssa_p (cfun));
5966
5967 TREE_OPERAND (*expr_p, 0) = val;
5968 SAVE_EXPR_RESOLVED_P (*expr_p) = 1;
5969 }
5970
5971 *expr_p = val;
5972
5973 return ret;
5974 }
5975
5976 /* Rewrite the ADDR_EXPR node pointed to by EXPR_P
5977
5978 unary_expr
5979 : ...
5980 | '&' varname
5981 ...
5982
5983 PRE_P points to the list where side effects that must happen before
5984 *EXPR_P should be stored.
5985
5986 POST_P points to the list where side effects that must happen after
5987 *EXPR_P should be stored. */
5988
5989 static enum gimplify_status
gimplify_addr_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)5990 gimplify_addr_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
5991 {
5992 tree expr = *expr_p;
5993 tree op0 = TREE_OPERAND (expr, 0);
5994 enum gimplify_status ret;
5995 location_t loc = EXPR_LOCATION (*expr_p);
5996
5997 switch (TREE_CODE (op0))
5998 {
5999 case INDIRECT_REF:
6000 do_indirect_ref:
6001 /* Check if we are dealing with an expression of the form '&*ptr'.
6002 While the front end folds away '&*ptr' into 'ptr', these
6003 expressions may be generated internally by the compiler (e.g.,
6004 builtins like __builtin_va_end). */
6005 /* Caution: the silent array decomposition semantics we allow for
6006 ADDR_EXPR means we can't always discard the pair. */
6007 /* Gimplification of the ADDR_EXPR operand may drop
6008 cv-qualification conversions, so make sure we add them if
6009 needed. */
6010 {
6011 tree op00 = TREE_OPERAND (op0, 0);
6012 tree t_expr = TREE_TYPE (expr);
6013 tree t_op00 = TREE_TYPE (op00);
6014
6015 if (!useless_type_conversion_p (t_expr, t_op00))
6016 op00 = fold_convert_loc (loc, TREE_TYPE (expr), op00);
6017 *expr_p = op00;
6018 ret = GS_OK;
6019 }
6020 break;
6021
6022 case VIEW_CONVERT_EXPR:
6023 /* Take the address of our operand and then convert it to the type of
6024 this ADDR_EXPR.
6025
6026 ??? The interactions of VIEW_CONVERT_EXPR and aliasing is not at
6027 all clear. The impact of this transformation is even less clear. */
6028
6029 /* If the operand is a useless conversion, look through it. Doing so
6030 guarantees that the ADDR_EXPR and its operand will remain of the
6031 same type. */
6032 if (tree_ssa_useless_type_conversion (TREE_OPERAND (op0, 0)))
6033 op0 = TREE_OPERAND (op0, 0);
6034
6035 *expr_p = fold_convert_loc (loc, TREE_TYPE (expr),
6036 build_fold_addr_expr_loc (loc,
6037 TREE_OPERAND (op0, 0)));
6038 ret = GS_OK;
6039 break;
6040
6041 case MEM_REF:
6042 if (integer_zerop (TREE_OPERAND (op0, 1)))
6043 goto do_indirect_ref;
6044
6045 /* fall through */
6046
6047 default:
6048 /* If we see a call to a declared builtin or see its address
6049 being taken (we can unify those cases here) then we can mark
6050 the builtin for implicit generation by GCC. */
6051 if (TREE_CODE (op0) == FUNCTION_DECL
6052 && DECL_BUILT_IN_CLASS (op0) == BUILT_IN_NORMAL
6053 && builtin_decl_declared_p (DECL_FUNCTION_CODE (op0)))
6054 set_builtin_decl_implicit_p (DECL_FUNCTION_CODE (op0), true);
6055
6056 /* We use fb_either here because the C frontend sometimes takes
6057 the address of a call that returns a struct; see
6058 gcc.dg/c99-array-lval-1.c. The gimplifier will correctly make
6059 the implied temporary explicit. */
6060
6061 /* Make the operand addressable. */
6062 ret = gimplify_expr (&TREE_OPERAND (expr, 0), pre_p, post_p,
6063 is_gimple_addressable, fb_either);
6064 if (ret == GS_ERROR)
6065 break;
6066
6067 /* Then mark it. Beware that it may not be possible to do so directly
6068 if a temporary has been created by the gimplification. */
6069 prepare_gimple_addressable (&TREE_OPERAND (expr, 0), pre_p);
6070
6071 op0 = TREE_OPERAND (expr, 0);
6072
6073 /* For various reasons, the gimplification of the expression
6074 may have made a new INDIRECT_REF. */
6075 if (TREE_CODE (op0) == INDIRECT_REF)
6076 goto do_indirect_ref;
6077
6078 mark_addressable (TREE_OPERAND (expr, 0));
6079
6080 /* The FEs may end up building ADDR_EXPRs early on a decl with
6081 an incomplete type. Re-build ADDR_EXPRs in canonical form
6082 here. */
6083 if (!types_compatible_p (TREE_TYPE (op0), TREE_TYPE (TREE_TYPE (expr))))
6084 *expr_p = build_fold_addr_expr (op0);
6085
6086 /* Make sure TREE_CONSTANT and TREE_SIDE_EFFECTS are set properly. */
6087 recompute_tree_invariant_for_addr_expr (*expr_p);
6088
6089 /* If we re-built the ADDR_EXPR add a conversion to the original type
6090 if required. */
6091 if (!useless_type_conversion_p (TREE_TYPE (expr), TREE_TYPE (*expr_p)))
6092 *expr_p = fold_convert (TREE_TYPE (expr), *expr_p);
6093
6094 break;
6095 }
6096
6097 return ret;
6098 }
6099
6100 /* Gimplify the operands of an ASM_EXPR. Input operands should be a gimple
6101 value; output operands should be a gimple lvalue. */
6102
6103 static enum gimplify_status
gimplify_asm_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6104 gimplify_asm_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6105 {
6106 tree expr;
6107 int noutputs;
6108 const char **oconstraints;
6109 int i;
6110 tree link;
6111 const char *constraint;
6112 bool allows_mem, allows_reg, is_inout;
6113 enum gimplify_status ret, tret;
6114 gasm *stmt;
6115 vec<tree, va_gc> *inputs;
6116 vec<tree, va_gc> *outputs;
6117 vec<tree, va_gc> *clobbers;
6118 vec<tree, va_gc> *labels;
6119 tree link_next;
6120
6121 expr = *expr_p;
6122 noutputs = list_length (ASM_OUTPUTS (expr));
6123 oconstraints = (const char **) alloca ((noutputs) * sizeof (const char *));
6124
6125 inputs = NULL;
6126 outputs = NULL;
6127 clobbers = NULL;
6128 labels = NULL;
6129
6130 ret = GS_ALL_DONE;
6131 link_next = NULL_TREE;
6132 for (i = 0, link = ASM_OUTPUTS (expr); link; ++i, link = link_next)
6133 {
6134 bool ok;
6135 size_t constraint_len;
6136
6137 link_next = TREE_CHAIN (link);
6138
6139 oconstraints[i]
6140 = constraint
6141 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6142 constraint_len = strlen (constraint);
6143 if (constraint_len == 0)
6144 continue;
6145
6146 ok = parse_output_constraint (&constraint, i, 0, 0,
6147 &allows_mem, &allows_reg, &is_inout);
6148 if (!ok)
6149 {
6150 ret = GS_ERROR;
6151 is_inout = false;
6152 }
6153
6154 /* If we can't make copies, we can only accept memory. */
6155 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6156 {
6157 if (allows_mem)
6158 allows_reg = 0;
6159 else
6160 {
6161 error ("impossible constraint in %<asm%>");
6162 error ("non-memory output %d must stay in memory", i);
6163 return GS_ERROR;
6164 }
6165 }
6166
6167 if (!allows_reg && allows_mem)
6168 mark_addressable (TREE_VALUE (link));
6169
6170 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6171 is_inout ? is_gimple_min_lval : is_gimple_lvalue,
6172 fb_lvalue | fb_mayfail);
6173 if (tret == GS_ERROR)
6174 {
6175 error ("invalid lvalue in asm output %d", i);
6176 ret = tret;
6177 }
6178
6179 /* If the constraint does not allow memory make sure we gimplify
6180 it to a register if it is not already but its base is. This
6181 happens for complex and vector components. */
6182 if (!allows_mem)
6183 {
6184 tree op = TREE_VALUE (link);
6185 if (! is_gimple_val (op)
6186 && is_gimple_reg_type (TREE_TYPE (op))
6187 && is_gimple_reg (get_base_address (op)))
6188 {
6189 tree tem = create_tmp_reg (TREE_TYPE (op));
6190 tree ass;
6191 if (is_inout)
6192 {
6193 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem),
6194 tem, unshare_expr (op));
6195 gimplify_and_add (ass, pre_p);
6196 }
6197 ass = build2 (MODIFY_EXPR, TREE_TYPE (tem), op, tem);
6198 gimplify_and_add (ass, post_p);
6199
6200 TREE_VALUE (link) = tem;
6201 tret = GS_OK;
6202 }
6203 }
6204
6205 vec_safe_push (outputs, link);
6206 TREE_CHAIN (link) = NULL_TREE;
6207
6208 if (is_inout)
6209 {
6210 /* An input/output operand. To give the optimizers more
6211 flexibility, split it into separate input and output
6212 operands. */
6213 tree input;
6214 /* Buffer big enough to format a 32-bit UINT_MAX into. */
6215 char buf[11];
6216
6217 /* Turn the in/out constraint into an output constraint. */
6218 char *p = xstrdup (constraint);
6219 p[0] = '=';
6220 TREE_VALUE (TREE_PURPOSE (link)) = build_string (constraint_len, p);
6221
6222 /* And add a matching input constraint. */
6223 if (allows_reg)
6224 {
6225 sprintf (buf, "%u", i);
6226
6227 /* If there are multiple alternatives in the constraint,
6228 handle each of them individually. Those that allow register
6229 will be replaced with operand number, the others will stay
6230 unchanged. */
6231 if (strchr (p, ',') != NULL)
6232 {
6233 size_t len = 0, buflen = strlen (buf);
6234 char *beg, *end, *str, *dst;
6235
6236 for (beg = p + 1;;)
6237 {
6238 end = strchr (beg, ',');
6239 if (end == NULL)
6240 end = strchr (beg, '\0');
6241 if ((size_t) (end - beg) < buflen)
6242 len += buflen + 1;
6243 else
6244 len += end - beg + 1;
6245 if (*end)
6246 beg = end + 1;
6247 else
6248 break;
6249 }
6250
6251 str = (char *) alloca (len);
6252 for (beg = p + 1, dst = str;;)
6253 {
6254 const char *tem;
6255 bool mem_p, reg_p, inout_p;
6256
6257 end = strchr (beg, ',');
6258 if (end)
6259 *end = '\0';
6260 beg[-1] = '=';
6261 tem = beg - 1;
6262 parse_output_constraint (&tem, i, 0, 0,
6263 &mem_p, ®_p, &inout_p);
6264 if (dst != str)
6265 *dst++ = ',';
6266 if (reg_p)
6267 {
6268 memcpy (dst, buf, buflen);
6269 dst += buflen;
6270 }
6271 else
6272 {
6273 if (end)
6274 len = end - beg;
6275 else
6276 len = strlen (beg);
6277 memcpy (dst, beg, len);
6278 dst += len;
6279 }
6280 if (end)
6281 beg = end + 1;
6282 else
6283 break;
6284 }
6285 *dst = '\0';
6286 input = build_string (dst - str, str);
6287 }
6288 else
6289 input = build_string (strlen (buf), buf);
6290 }
6291 else
6292 input = build_string (constraint_len - 1, constraint + 1);
6293
6294 free (p);
6295
6296 input = build_tree_list (build_tree_list (NULL_TREE, input),
6297 unshare_expr (TREE_VALUE (link)));
6298 ASM_INPUTS (expr) = chainon (ASM_INPUTS (expr), input);
6299 }
6300 }
6301
6302 link_next = NULL_TREE;
6303 for (link = ASM_INPUTS (expr); link; ++i, link = link_next)
6304 {
6305 link_next = TREE_CHAIN (link);
6306 constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
6307 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
6308 oconstraints, &allows_mem, &allows_reg);
6309
6310 /* If we can't make copies, we can only accept memory. */
6311 if (TREE_ADDRESSABLE (TREE_TYPE (TREE_VALUE (link))))
6312 {
6313 if (allows_mem)
6314 allows_reg = 0;
6315 else
6316 {
6317 error ("impossible constraint in %<asm%>");
6318 error ("non-memory input %d must stay in memory", i);
6319 return GS_ERROR;
6320 }
6321 }
6322
6323 /* If the operand is a memory input, it should be an lvalue. */
6324 if (!allows_reg && allows_mem)
6325 {
6326 tree inputv = TREE_VALUE (link);
6327 STRIP_NOPS (inputv);
6328 if (TREE_CODE (inputv) == PREDECREMENT_EXPR
6329 || TREE_CODE (inputv) == PREINCREMENT_EXPR
6330 || TREE_CODE (inputv) == POSTDECREMENT_EXPR
6331 || TREE_CODE (inputv) == POSTINCREMENT_EXPR
6332 || TREE_CODE (inputv) == MODIFY_EXPR)
6333 TREE_VALUE (link) = error_mark_node;
6334 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6335 is_gimple_lvalue, fb_lvalue | fb_mayfail);
6336 if (tret != GS_ERROR)
6337 {
6338 /* Unlike output operands, memory inputs are not guaranteed
6339 to be lvalues by the FE, and while the expressions are
6340 marked addressable there, if it is e.g. a statement
6341 expression, temporaries in it might not end up being
6342 addressable. They might be already used in the IL and thus
6343 it is too late to make them addressable now though. */
6344 tree x = TREE_VALUE (link);
6345 while (handled_component_p (x))
6346 x = TREE_OPERAND (x, 0);
6347 if (TREE_CODE (x) == MEM_REF
6348 && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
6349 x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
6350 if ((VAR_P (x)
6351 || TREE_CODE (x) == PARM_DECL
6352 || TREE_CODE (x) == RESULT_DECL)
6353 && !TREE_ADDRESSABLE (x)
6354 && is_gimple_reg (x))
6355 {
6356 warning_at (EXPR_LOC_OR_LOC (TREE_VALUE (link),
6357 input_location), 0,
6358 "memory input %d is not directly addressable",
6359 i);
6360 prepare_gimple_addressable (&TREE_VALUE (link), pre_p);
6361 }
6362 }
6363 mark_addressable (TREE_VALUE (link));
6364 if (tret == GS_ERROR)
6365 {
6366 error_at (EXPR_LOC_OR_LOC (TREE_VALUE (link), input_location),
6367 "memory input %d is not directly addressable", i);
6368 ret = tret;
6369 }
6370 }
6371 else
6372 {
6373 tret = gimplify_expr (&TREE_VALUE (link), pre_p, post_p,
6374 is_gimple_asm_val, fb_rvalue);
6375 if (tret == GS_ERROR)
6376 ret = tret;
6377 }
6378
6379 TREE_CHAIN (link) = NULL_TREE;
6380 vec_safe_push (inputs, link);
6381 }
6382
6383 link_next = NULL_TREE;
6384 for (link = ASM_CLOBBERS (expr); link; ++i, link = link_next)
6385 {
6386 link_next = TREE_CHAIN (link);
6387 TREE_CHAIN (link) = NULL_TREE;
6388 vec_safe_push (clobbers, link);
6389 }
6390
6391 link_next = NULL_TREE;
6392 for (link = ASM_LABELS (expr); link; ++i, link = link_next)
6393 {
6394 link_next = TREE_CHAIN (link);
6395 TREE_CHAIN (link) = NULL_TREE;
6396 vec_safe_push (labels, link);
6397 }
6398
6399 /* Do not add ASMs with errors to the gimple IL stream. */
6400 if (ret != GS_ERROR)
6401 {
6402 stmt = gimple_build_asm_vec (TREE_STRING_POINTER (ASM_STRING (expr)),
6403 inputs, outputs, clobbers, labels);
6404
6405 gimple_asm_set_volatile (stmt, ASM_VOLATILE_P (expr) || noutputs == 0);
6406 gimple_asm_set_input (stmt, ASM_INPUT_P (expr));
6407 gimple_asm_set_inline (stmt, ASM_INLINE_P (expr));
6408
6409 gimplify_seq_add_stmt (pre_p, stmt);
6410 }
6411
6412 return ret;
6413 }
6414
6415 /* Gimplify a CLEANUP_POINT_EXPR. Currently this works by adding
6416 GIMPLE_WITH_CLEANUP_EXPRs to the prequeue as we encounter cleanups while
6417 gimplifying the body, and converting them to TRY_FINALLY_EXPRs when we
6418 return to this function.
6419
6420 FIXME should we complexify the prequeue handling instead? Or use flags
6421 for all the cleanups and let the optimizer tighten them up? The current
6422 code seems pretty fragile; it will break on a cleanup within any
6423 non-conditional nesting. But any such nesting would be broken, anyway;
6424 we can't write a TRY_FINALLY_EXPR that starts inside a nesting construct
6425 and continues out of it. We can do that at the RTL level, though, so
6426 having an optimizer to tighten up try/finally regions would be a Good
6427 Thing. */
6428
6429 static enum gimplify_status
gimplify_cleanup_point_expr(tree * expr_p,gimple_seq * pre_p)6430 gimplify_cleanup_point_expr (tree *expr_p, gimple_seq *pre_p)
6431 {
6432 gimple_stmt_iterator iter;
6433 gimple_seq body_sequence = NULL;
6434
6435 tree temp = voidify_wrapper_expr (*expr_p, NULL);
6436
6437 /* We only care about the number of conditions between the innermost
6438 CLEANUP_POINT_EXPR and the cleanup. So save and reset the count and
6439 any cleanups collected outside the CLEANUP_POINT_EXPR. */
6440 int old_conds = gimplify_ctxp->conditions;
6441 gimple_seq old_cleanups = gimplify_ctxp->conditional_cleanups;
6442 bool old_in_cleanup_point_expr = gimplify_ctxp->in_cleanup_point_expr;
6443 gimplify_ctxp->conditions = 0;
6444 gimplify_ctxp->conditional_cleanups = NULL;
6445 gimplify_ctxp->in_cleanup_point_expr = true;
6446
6447 gimplify_stmt (&TREE_OPERAND (*expr_p, 0), &body_sequence);
6448
6449 gimplify_ctxp->conditions = old_conds;
6450 gimplify_ctxp->conditional_cleanups = old_cleanups;
6451 gimplify_ctxp->in_cleanup_point_expr = old_in_cleanup_point_expr;
6452
6453 for (iter = gsi_start (body_sequence); !gsi_end_p (iter); )
6454 {
6455 gimple *wce = gsi_stmt (iter);
6456
6457 if (gimple_code (wce) == GIMPLE_WITH_CLEANUP_EXPR)
6458 {
6459 if (gsi_one_before_end_p (iter))
6460 {
6461 /* Note that gsi_insert_seq_before and gsi_remove do not
6462 scan operands, unlike some other sequence mutators. */
6463 if (!gimple_wce_cleanup_eh_only (wce))
6464 gsi_insert_seq_before_without_update (&iter,
6465 gimple_wce_cleanup (wce),
6466 GSI_SAME_STMT);
6467 gsi_remove (&iter, true);
6468 break;
6469 }
6470 else
6471 {
6472 gtry *gtry;
6473 gimple_seq seq;
6474 enum gimple_try_flags kind;
6475
6476 if (gimple_wce_cleanup_eh_only (wce))
6477 kind = GIMPLE_TRY_CATCH;
6478 else
6479 kind = GIMPLE_TRY_FINALLY;
6480 seq = gsi_split_seq_after (iter);
6481
6482 gtry = gimple_build_try (seq, gimple_wce_cleanup (wce), kind);
6483 /* Do not use gsi_replace here, as it may scan operands.
6484 We want to do a simple structural modification only. */
6485 gsi_set_stmt (&iter, gtry);
6486 iter = gsi_start (gtry->eval);
6487 }
6488 }
6489 else
6490 gsi_next (&iter);
6491 }
6492
6493 gimplify_seq_add_seq (pre_p, body_sequence);
6494 if (temp)
6495 {
6496 *expr_p = temp;
6497 return GS_OK;
6498 }
6499 else
6500 {
6501 *expr_p = NULL;
6502 return GS_ALL_DONE;
6503 }
6504 }
6505
6506 /* Insert a cleanup marker for gimplify_cleanup_point_expr. CLEANUP
6507 is the cleanup action required. EH_ONLY is true if the cleanup should
6508 only be executed if an exception is thrown, not on normal exit.
6509 If FORCE_UNCOND is true perform the cleanup unconditionally; this is
6510 only valid for clobbers. */
6511
6512 static void
6513 gimple_push_cleanup (tree var, tree cleanup, bool eh_only, gimple_seq *pre_p,
6514 bool force_uncond = false)
6515 {
6516 gimple *wce;
6517 gimple_seq cleanup_stmts = NULL;
6518
6519 /* Errors can result in improperly nested cleanups. Which results in
6520 confusion when trying to resolve the GIMPLE_WITH_CLEANUP_EXPR. */
6521 if (seen_error ())
6522 return;
6523
6524 if (gimple_conditional_context ())
6525 {
6526 /* If we're in a conditional context, this is more complex. We only
6527 want to run the cleanup if we actually ran the initialization that
6528 necessitates it, but we want to run it after the end of the
6529 conditional context. So we wrap the try/finally around the
6530 condition and use a flag to determine whether or not to actually
6531 run the destructor. Thus
6532
6533 test ? f(A()) : 0
6534
6535 becomes (approximately)
6536
6537 flag = 0;
6538 try {
6539 if (test) { A::A(temp); flag = 1; val = f(temp); }
6540 else { val = 0; }
6541 } finally {
6542 if (flag) A::~A(temp);
6543 }
6544 val
6545 */
6546 if (force_uncond)
6547 {
6548 gimplify_stmt (&cleanup, &cleanup_stmts);
6549 wce = gimple_build_wce (cleanup_stmts);
6550 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6551 }
6552 else
6553 {
6554 tree flag = create_tmp_var (boolean_type_node, "cleanup");
6555 gassign *ffalse = gimple_build_assign (flag, boolean_false_node);
6556 gassign *ftrue = gimple_build_assign (flag, boolean_true_node);
6557
6558 cleanup = build3 (COND_EXPR, void_type_node, flag, cleanup, NULL);
6559 gimplify_stmt (&cleanup, &cleanup_stmts);
6560 wce = gimple_build_wce (cleanup_stmts);
6561
6562 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, ffalse);
6563 gimplify_seq_add_stmt (&gimplify_ctxp->conditional_cleanups, wce);
6564 gimplify_seq_add_stmt (pre_p, ftrue);
6565
6566 /* Because of this manipulation, and the EH edges that jump
6567 threading cannot redirect, the temporary (VAR) will appear
6568 to be used uninitialized. Don't warn. */
6569 TREE_NO_WARNING (var) = 1;
6570 }
6571 }
6572 else
6573 {
6574 gimplify_stmt (&cleanup, &cleanup_stmts);
6575 wce = gimple_build_wce (cleanup_stmts);
6576 gimple_wce_set_cleanup_eh_only (wce, eh_only);
6577 gimplify_seq_add_stmt (pre_p, wce);
6578 }
6579 }
6580
6581 /* Gimplify a TARGET_EXPR which doesn't appear on the rhs of an INIT_EXPR. */
6582
6583 static enum gimplify_status
gimplify_target_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p)6584 gimplify_target_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
6585 {
6586 tree targ = *expr_p;
6587 tree temp = TARGET_EXPR_SLOT (targ);
6588 tree init = TARGET_EXPR_INITIAL (targ);
6589 enum gimplify_status ret;
6590
6591 bool unpoison_empty_seq = false;
6592 gimple_stmt_iterator unpoison_it;
6593
6594 if (init)
6595 {
6596 tree cleanup = NULL_TREE;
6597
6598 /* TARGET_EXPR temps aren't part of the enclosing block, so add it
6599 to the temps list. Handle also variable length TARGET_EXPRs. */
6600 if (TREE_CODE (DECL_SIZE (temp)) != INTEGER_CST)
6601 {
6602 if (!TYPE_SIZES_GIMPLIFIED (TREE_TYPE (temp)))
6603 gimplify_type_sizes (TREE_TYPE (temp), pre_p);
6604 gimplify_vla_decl (temp, pre_p);
6605 }
6606 else
6607 {
6608 /* Save location where we need to place unpoisoning. It's possible
6609 that a variable will be converted to needs_to_live_in_memory. */
6610 unpoison_it = gsi_last (*pre_p);
6611 unpoison_empty_seq = gsi_end_p (unpoison_it);
6612
6613 gimple_add_tmp_var (temp);
6614 }
6615
6616 /* If TARGET_EXPR_INITIAL is void, then the mere evaluation of the
6617 expression is supposed to initialize the slot. */
6618 if (VOID_TYPE_P (TREE_TYPE (init)))
6619 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6620 else
6621 {
6622 tree init_expr = build2 (INIT_EXPR, void_type_node, temp, init);
6623 init = init_expr;
6624 ret = gimplify_expr (&init, pre_p, post_p, is_gimple_stmt, fb_none);
6625 init = NULL;
6626 ggc_free (init_expr);
6627 }
6628 if (ret == GS_ERROR)
6629 {
6630 /* PR c++/28266 Make sure this is expanded only once. */
6631 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6632 return GS_ERROR;
6633 }
6634 if (init)
6635 gimplify_and_add (init, pre_p);
6636
6637 /* If needed, push the cleanup for the temp. */
6638 if (TARGET_EXPR_CLEANUP (targ))
6639 {
6640 if (CLEANUP_EH_ONLY (targ))
6641 gimple_push_cleanup (temp, TARGET_EXPR_CLEANUP (targ),
6642 CLEANUP_EH_ONLY (targ), pre_p);
6643 else
6644 cleanup = TARGET_EXPR_CLEANUP (targ);
6645 }
6646
6647 /* Add a clobber for the temporary going out of scope, like
6648 gimplify_bind_expr. */
6649 if (gimplify_ctxp->in_cleanup_point_expr
6650 && needs_to_live_in_memory (temp))
6651 {
6652 if (flag_stack_reuse == SR_ALL)
6653 {
6654 tree clobber = build_constructor (TREE_TYPE (temp),
6655 NULL);
6656 TREE_THIS_VOLATILE (clobber) = true;
6657 clobber = build2 (MODIFY_EXPR, TREE_TYPE (temp), temp, clobber);
6658 gimple_push_cleanup (temp, clobber, false, pre_p, true);
6659 }
6660 if (asan_poisoned_variables
6661 && DECL_ALIGN (temp) <= MAX_SUPPORTED_STACK_ALIGNMENT
6662 && !TREE_STATIC (temp)
6663 && dbg_cnt (asan_use_after_scope)
6664 && !gimplify_omp_ctxp)
6665 {
6666 tree asan_cleanup = build_asan_poison_call_expr (temp);
6667 if (asan_cleanup)
6668 {
6669 if (unpoison_empty_seq)
6670 unpoison_it = gsi_start (*pre_p);
6671
6672 asan_poison_variable (temp, false, &unpoison_it,
6673 unpoison_empty_seq);
6674 gimple_push_cleanup (temp, asan_cleanup, false, pre_p);
6675 }
6676 }
6677 }
6678 if (cleanup)
6679 gimple_push_cleanup (temp, cleanup, false, pre_p);
6680
6681 /* Only expand this once. */
6682 TREE_OPERAND (targ, 3) = init;
6683 TARGET_EXPR_INITIAL (targ) = NULL_TREE;
6684 }
6685 else
6686 /* We should have expanded this before. */
6687 gcc_assert (DECL_SEEN_IN_BIND_EXPR_P (temp));
6688
6689 *expr_p = temp;
6690 return GS_OK;
6691 }
6692
6693 /* Gimplification of expression trees. */
6694
6695 /* Gimplify an expression which appears at statement context. The
6696 corresponding GIMPLE statements are added to *SEQ_P. If *SEQ_P is
6697 NULL, a new sequence is allocated.
6698
6699 Return true if we actually added a statement to the queue. */
6700
6701 bool
gimplify_stmt(tree * stmt_p,gimple_seq * seq_p)6702 gimplify_stmt (tree *stmt_p, gimple_seq *seq_p)
6703 {
6704 gimple_seq_node last;
6705
6706 last = gimple_seq_last (*seq_p);
6707 gimplify_expr (stmt_p, seq_p, NULL, is_gimple_stmt, fb_none);
6708 return last != gimple_seq_last (*seq_p);
6709 }
6710
6711 /* Add FIRSTPRIVATE entries for DECL in the OpenMP the surrounding parallels
6712 to CTX. If entries already exist, force them to be some flavor of private.
6713 If there is no enclosing parallel, do nothing. */
6714
6715 void
omp_firstprivatize_variable(struct gimplify_omp_ctx * ctx,tree decl)6716 omp_firstprivatize_variable (struct gimplify_omp_ctx *ctx, tree decl)
6717 {
6718 splay_tree_node n;
6719
6720 if (decl == NULL || !DECL_P (decl) || ctx->region_type == ORT_NONE)
6721 return;
6722
6723 do
6724 {
6725 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6726 if (n != NULL)
6727 {
6728 if (n->value & GOVD_SHARED)
6729 n->value = GOVD_FIRSTPRIVATE | (n->value & GOVD_SEEN);
6730 else if (n->value & GOVD_MAP)
6731 n->value |= GOVD_MAP_TO_ONLY;
6732 else
6733 return;
6734 }
6735 else if ((ctx->region_type & ORT_TARGET) != 0)
6736 {
6737 if (ctx->target_map_scalars_firstprivate)
6738 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6739 else
6740 omp_add_variable (ctx, decl, GOVD_MAP | GOVD_MAP_TO_ONLY);
6741 }
6742 else if (ctx->region_type != ORT_WORKSHARE
6743 && ctx->region_type != ORT_SIMD
6744 && ctx->region_type != ORT_ACC
6745 && !(ctx->region_type & ORT_TARGET_DATA))
6746 omp_add_variable (ctx, decl, GOVD_FIRSTPRIVATE);
6747
6748 ctx = ctx->outer_context;
6749 }
6750 while (ctx);
6751 }
6752
6753 /* Similarly for each of the type sizes of TYPE. */
6754
6755 static void
omp_firstprivatize_type_sizes(struct gimplify_omp_ctx * ctx,tree type)6756 omp_firstprivatize_type_sizes (struct gimplify_omp_ctx *ctx, tree type)
6757 {
6758 if (type == NULL || type == error_mark_node)
6759 return;
6760 type = TYPE_MAIN_VARIANT (type);
6761
6762 if (ctx->privatized_types->add (type))
6763 return;
6764
6765 switch (TREE_CODE (type))
6766 {
6767 case INTEGER_TYPE:
6768 case ENUMERAL_TYPE:
6769 case BOOLEAN_TYPE:
6770 case REAL_TYPE:
6771 case FIXED_POINT_TYPE:
6772 omp_firstprivatize_variable (ctx, TYPE_MIN_VALUE (type));
6773 omp_firstprivatize_variable (ctx, TYPE_MAX_VALUE (type));
6774 break;
6775
6776 case ARRAY_TYPE:
6777 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6778 omp_firstprivatize_type_sizes (ctx, TYPE_DOMAIN (type));
6779 break;
6780
6781 case RECORD_TYPE:
6782 case UNION_TYPE:
6783 case QUAL_UNION_TYPE:
6784 {
6785 tree field;
6786 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
6787 if (TREE_CODE (field) == FIELD_DECL)
6788 {
6789 omp_firstprivatize_variable (ctx, DECL_FIELD_OFFSET (field));
6790 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (field));
6791 }
6792 }
6793 break;
6794
6795 case POINTER_TYPE:
6796 case REFERENCE_TYPE:
6797 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (type));
6798 break;
6799
6800 default:
6801 break;
6802 }
6803
6804 omp_firstprivatize_variable (ctx, TYPE_SIZE (type));
6805 omp_firstprivatize_variable (ctx, TYPE_SIZE_UNIT (type));
6806 lang_hooks.types.omp_firstprivatize_type_sizes (ctx, type);
6807 }
6808
6809 /* Add an entry for DECL in the OMP context CTX with FLAGS. */
6810
6811 static void
omp_add_variable(struct gimplify_omp_ctx * ctx,tree decl,unsigned int flags)6812 omp_add_variable (struct gimplify_omp_ctx *ctx, tree decl, unsigned int flags)
6813 {
6814 splay_tree_node n;
6815 unsigned int nflags;
6816 tree t;
6817
6818 if (error_operand_p (decl) || ctx->region_type == ORT_NONE)
6819 return;
6820
6821 /* Never elide decls whose type has TREE_ADDRESSABLE set. This means
6822 there are constructors involved somewhere. Exception is a shared clause,
6823 there is nothing privatized in that case. */
6824 if ((flags & GOVD_SHARED) == 0
6825 && (TREE_ADDRESSABLE (TREE_TYPE (decl))
6826 || TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (decl))))
6827 flags |= GOVD_SEEN;
6828
6829 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6830 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
6831 {
6832 /* We shouldn't be re-adding the decl with the same data
6833 sharing class. */
6834 gcc_assert ((n->value & GOVD_DATA_SHARE_CLASS & flags) == 0);
6835 nflags = n->value | flags;
6836 /* The only combination of data sharing classes we should see is
6837 FIRSTPRIVATE and LASTPRIVATE. However, OpenACC permits
6838 reduction variables to be used in data sharing clauses. */
6839 gcc_assert ((ctx->region_type & ORT_ACC) != 0
6840 || ((nflags & GOVD_DATA_SHARE_CLASS)
6841 == (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE))
6842 || (flags & GOVD_DATA_SHARE_CLASS) == 0);
6843 n->value = nflags;
6844 return;
6845 }
6846
6847 /* When adding a variable-sized variable, we have to handle all sorts
6848 of additional bits of data: the pointer replacement variable, and
6849 the parameters of the type. */
6850 if (DECL_SIZE (decl) && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
6851 {
6852 /* Add the pointer replacement variable as PRIVATE if the variable
6853 replacement is private, else FIRSTPRIVATE since we'll need the
6854 address of the original variable either for SHARED, or for the
6855 copy into or out of the context. */
6856 if (!(flags & GOVD_LOCAL))
6857 {
6858 if (flags & GOVD_MAP)
6859 nflags = GOVD_MAP | GOVD_MAP_TO_ONLY | GOVD_EXPLICIT;
6860 else if (flags & GOVD_PRIVATE)
6861 nflags = GOVD_PRIVATE;
6862 else if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
6863 && (flags & GOVD_FIRSTPRIVATE))
6864 nflags = GOVD_PRIVATE | GOVD_EXPLICIT;
6865 else
6866 nflags = GOVD_FIRSTPRIVATE;
6867 nflags |= flags & GOVD_SEEN;
6868 t = DECL_VALUE_EXPR (decl);
6869 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
6870 t = TREE_OPERAND (t, 0);
6871 gcc_assert (DECL_P (t));
6872 omp_add_variable (ctx, t, nflags);
6873 }
6874
6875 /* Add all of the variable and type parameters (which should have
6876 been gimplified to a formal temporary) as FIRSTPRIVATE. */
6877 omp_firstprivatize_variable (ctx, DECL_SIZE_UNIT (decl));
6878 omp_firstprivatize_variable (ctx, DECL_SIZE (decl));
6879 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6880
6881 /* The variable-sized variable itself is never SHARED, only some form
6882 of PRIVATE. The sharing would take place via the pointer variable
6883 which we remapped above. */
6884 if (flags & GOVD_SHARED)
6885 flags = GOVD_SHARED | GOVD_DEBUG_PRIVATE
6886 | (flags & (GOVD_SEEN | GOVD_EXPLICIT));
6887
6888 /* We're going to make use of the TYPE_SIZE_UNIT at least in the
6889 alloca statement we generate for the variable, so make sure it
6890 is available. This isn't automatically needed for the SHARED
6891 case, since we won't be allocating local storage then.
6892 For local variables TYPE_SIZE_UNIT might not be gimplified yet,
6893 in this case omp_notice_variable will be called later
6894 on when it is gimplified. */
6895 else if (! (flags & (GOVD_LOCAL | GOVD_MAP))
6896 && DECL_P (TYPE_SIZE_UNIT (TREE_TYPE (decl))))
6897 omp_notice_variable (ctx, TYPE_SIZE_UNIT (TREE_TYPE (decl)), true);
6898 }
6899 else if ((flags & (GOVD_MAP | GOVD_LOCAL)) == 0
6900 && lang_hooks.decls.omp_privatize_by_reference (decl))
6901 {
6902 omp_firstprivatize_type_sizes (ctx, TREE_TYPE (decl));
6903
6904 /* Similar to the direct variable sized case above, we'll need the
6905 size of references being privatized. */
6906 if ((flags & GOVD_SHARED) == 0)
6907 {
6908 t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
6909 if (DECL_P (t))
6910 omp_notice_variable (ctx, t, true);
6911 }
6912 }
6913
6914 if (n != NULL)
6915 n->value |= flags;
6916 else
6917 splay_tree_insert (ctx->variables, (splay_tree_key)decl, flags);
6918
6919 /* For reductions clauses in OpenACC loop directives, by default create a
6920 copy clause on the enclosing parallel construct for carrying back the
6921 results. */
6922 if (ctx->region_type == ORT_ACC && (flags & GOVD_REDUCTION))
6923 {
6924 struct gimplify_omp_ctx *outer_ctx = ctx->outer_context;
6925 while (outer_ctx)
6926 {
6927 n = splay_tree_lookup (outer_ctx->variables, (splay_tree_key)decl);
6928 if (n != NULL)
6929 {
6930 /* Ignore local variables and explicitly declared clauses. */
6931 if (n->value & (GOVD_LOCAL | GOVD_EXPLICIT))
6932 break;
6933 else if (outer_ctx->region_type == ORT_ACC_KERNELS)
6934 {
6935 /* According to the OpenACC spec, such a reduction variable
6936 should already have a copy map on a kernels construct,
6937 verify that here. */
6938 gcc_assert (!(n->value & GOVD_FIRSTPRIVATE)
6939 && (n->value & GOVD_MAP));
6940 }
6941 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6942 {
6943 /* Remove firstprivate and make it a copy map. */
6944 n->value &= ~GOVD_FIRSTPRIVATE;
6945 n->value |= GOVD_MAP;
6946 }
6947 }
6948 else if (outer_ctx->region_type == ORT_ACC_PARALLEL)
6949 {
6950 splay_tree_insert (outer_ctx->variables, (splay_tree_key)decl,
6951 GOVD_MAP | GOVD_SEEN);
6952 break;
6953 }
6954 outer_ctx = outer_ctx->outer_context;
6955 }
6956 }
6957 }
6958
6959 /* Notice a threadprivate variable DECL used in OMP context CTX.
6960 This just prints out diagnostics about threadprivate variable uses
6961 in untied tasks. If DECL2 is non-NULL, prevent this warning
6962 on that variable. */
6963
6964 static bool
omp_notice_threadprivate_variable(struct gimplify_omp_ctx * ctx,tree decl,tree decl2)6965 omp_notice_threadprivate_variable (struct gimplify_omp_ctx *ctx, tree decl,
6966 tree decl2)
6967 {
6968 splay_tree_node n;
6969 struct gimplify_omp_ctx *octx;
6970
6971 for (octx = ctx; octx; octx = octx->outer_context)
6972 if ((octx->region_type & ORT_TARGET) != 0)
6973 {
6974 n = splay_tree_lookup (octx->variables, (splay_tree_key)decl);
6975 if (n == NULL)
6976 {
6977 error ("threadprivate variable %qE used in target region",
6978 DECL_NAME (decl));
6979 error_at (octx->location, "enclosing target region");
6980 splay_tree_insert (octx->variables, (splay_tree_key)decl, 0);
6981 }
6982 if (decl2)
6983 splay_tree_insert (octx->variables, (splay_tree_key)decl2, 0);
6984 }
6985
6986 if (ctx->region_type != ORT_UNTIED_TASK)
6987 return false;
6988 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
6989 if (n == NULL)
6990 {
6991 error ("threadprivate variable %qE used in untied task",
6992 DECL_NAME (decl));
6993 error_at (ctx->location, "enclosing task");
6994 splay_tree_insert (ctx->variables, (splay_tree_key)decl, 0);
6995 }
6996 if (decl2)
6997 splay_tree_insert (ctx->variables, (splay_tree_key)decl2, 0);
6998 return false;
6999 }
7000
7001 /* Return true if global var DECL is device resident. */
7002
7003 static bool
device_resident_p(tree decl)7004 device_resident_p (tree decl)
7005 {
7006 tree attr = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (decl));
7007
7008 if (!attr)
7009 return false;
7010
7011 for (tree t = TREE_VALUE (attr); t; t = TREE_PURPOSE (t))
7012 {
7013 tree c = TREE_VALUE (t);
7014 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_DEVICE_RESIDENT)
7015 return true;
7016 }
7017
7018 return false;
7019 }
7020
7021 /* Return true if DECL has an ACC DECLARE attribute. */
7022
7023 static bool
is_oacc_declared(tree decl)7024 is_oacc_declared (tree decl)
7025 {
7026 tree t = TREE_CODE (decl) == MEM_REF ? TREE_OPERAND (decl, 0) : decl;
7027 tree declared = lookup_attribute ("oacc declare target", DECL_ATTRIBUTES (t));
7028 return declared != NULL_TREE;
7029 }
7030
7031 /* Determine outer default flags for DECL mentioned in an OMP region
7032 but not declared in an enclosing clause.
7033
7034 ??? Some compiler-generated variables (like SAVE_EXPRs) could be
7035 remapped firstprivate instead of shared. To some extent this is
7036 addressed in omp_firstprivatize_type_sizes, but not
7037 effectively. */
7038
7039 static unsigned
omp_default_clause(struct gimplify_omp_ctx * ctx,tree decl,bool in_code,unsigned flags)7040 omp_default_clause (struct gimplify_omp_ctx *ctx, tree decl,
7041 bool in_code, unsigned flags)
7042 {
7043 enum omp_clause_default_kind default_kind = ctx->default_kind;
7044 enum omp_clause_default_kind kind;
7045
7046 kind = lang_hooks.decls.omp_predetermined_sharing (decl);
7047 if (kind != OMP_CLAUSE_DEFAULT_UNSPECIFIED)
7048 default_kind = kind;
7049
7050 switch (default_kind)
7051 {
7052 case OMP_CLAUSE_DEFAULT_NONE:
7053 {
7054 const char *rtype;
7055
7056 if (ctx->region_type & ORT_PARALLEL)
7057 rtype = "parallel";
7058 else if (ctx->region_type & ORT_TASK)
7059 rtype = "task";
7060 else if (ctx->region_type & ORT_TEAMS)
7061 rtype = "teams";
7062 else
7063 gcc_unreachable ();
7064
7065 error ("%qE not specified in enclosing %qs",
7066 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rtype);
7067 error_at (ctx->location, "enclosing %qs", rtype);
7068 }
7069 /* FALLTHRU */
7070 case OMP_CLAUSE_DEFAULT_SHARED:
7071 flags |= GOVD_SHARED;
7072 break;
7073 case OMP_CLAUSE_DEFAULT_PRIVATE:
7074 flags |= GOVD_PRIVATE;
7075 break;
7076 case OMP_CLAUSE_DEFAULT_FIRSTPRIVATE:
7077 flags |= GOVD_FIRSTPRIVATE;
7078 break;
7079 case OMP_CLAUSE_DEFAULT_UNSPECIFIED:
7080 /* decl will be either GOVD_FIRSTPRIVATE or GOVD_SHARED. */
7081 gcc_assert ((ctx->region_type & ORT_TASK) != 0);
7082 if (struct gimplify_omp_ctx *octx = ctx->outer_context)
7083 {
7084 omp_notice_variable (octx, decl, in_code);
7085 for (; octx; octx = octx->outer_context)
7086 {
7087 splay_tree_node n2;
7088
7089 n2 = splay_tree_lookup (octx->variables, (splay_tree_key) decl);
7090 if ((octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)) != 0
7091 && (n2 == NULL || (n2->value & GOVD_DATA_SHARE_CLASS) == 0))
7092 continue;
7093 if (n2 && (n2->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED)
7094 {
7095 flags |= GOVD_FIRSTPRIVATE;
7096 goto found_outer;
7097 }
7098 if ((octx->region_type & (ORT_PARALLEL | ORT_TEAMS)) != 0)
7099 {
7100 flags |= GOVD_SHARED;
7101 goto found_outer;
7102 }
7103 }
7104 }
7105
7106 if (TREE_CODE (decl) == PARM_DECL
7107 || (!is_global_var (decl)
7108 && DECL_CONTEXT (decl) == current_function_decl))
7109 flags |= GOVD_FIRSTPRIVATE;
7110 else
7111 flags |= GOVD_SHARED;
7112 found_outer:
7113 break;
7114
7115 default:
7116 gcc_unreachable ();
7117 }
7118
7119 return flags;
7120 }
7121
7122
7123 /* Determine outer default flags for DECL mentioned in an OACC region
7124 but not declared in an enclosing clause. */
7125
7126 static unsigned
oacc_default_clause(struct gimplify_omp_ctx * ctx,tree decl,unsigned flags)7127 oacc_default_clause (struct gimplify_omp_ctx *ctx, tree decl, unsigned flags)
7128 {
7129 const char *rkind;
7130 bool on_device = false;
7131 bool declared = is_oacc_declared (decl);
7132 tree type = TREE_TYPE (decl);
7133
7134 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7135 type = TREE_TYPE (type);
7136
7137 if ((ctx->region_type & (ORT_ACC_PARALLEL | ORT_ACC_KERNELS)) != 0
7138 && is_global_var (decl)
7139 && device_resident_p (decl))
7140 {
7141 on_device = true;
7142 flags |= GOVD_MAP_TO_ONLY;
7143 }
7144
7145 switch (ctx->region_type)
7146 {
7147 case ORT_ACC_KERNELS:
7148 rkind = "kernels";
7149
7150 if (AGGREGATE_TYPE_P (type))
7151 {
7152 /* Aggregates default to 'present_or_copy', or 'present'. */
7153 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7154 flags |= GOVD_MAP;
7155 else
7156 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7157 }
7158 else
7159 /* Scalars default to 'copy'. */
7160 flags |= GOVD_MAP | GOVD_MAP_FORCE;
7161
7162 break;
7163
7164 case ORT_ACC_PARALLEL:
7165 rkind = "parallel";
7166
7167 if (on_device || declared)
7168 flags |= GOVD_MAP;
7169 else if (AGGREGATE_TYPE_P (type))
7170 {
7171 /* Aggregates default to 'present_or_copy', or 'present'. */
7172 if (ctx->default_kind != OMP_CLAUSE_DEFAULT_PRESENT)
7173 flags |= GOVD_MAP;
7174 else
7175 flags |= GOVD_MAP | GOVD_MAP_FORCE_PRESENT;
7176 }
7177 else
7178 /* Scalars default to 'firstprivate'. */
7179 flags |= GOVD_FIRSTPRIVATE;
7180
7181 break;
7182
7183 default:
7184 gcc_unreachable ();
7185 }
7186
7187 if (DECL_ARTIFICIAL (decl))
7188 ; /* We can get compiler-generated decls, and should not complain
7189 about them. */
7190 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_NONE)
7191 {
7192 error ("%qE not specified in enclosing OpenACC %qs construct",
7193 DECL_NAME (lang_hooks.decls.omp_report_decl (decl)), rkind);
7194 inform (ctx->location, "enclosing OpenACC %qs construct", rkind);
7195 }
7196 else if (ctx->default_kind == OMP_CLAUSE_DEFAULT_PRESENT)
7197 ; /* Handled above. */
7198 else
7199 gcc_checking_assert (ctx->default_kind == OMP_CLAUSE_DEFAULT_SHARED);
7200
7201 return flags;
7202 }
7203
7204 /* Record the fact that DECL was used within the OMP context CTX.
7205 IN_CODE is true when real code uses DECL, and false when we should
7206 merely emit default(none) errors. Return true if DECL is going to
7207 be remapped and thus DECL shouldn't be gimplified into its
7208 DECL_VALUE_EXPR (if any). */
7209
7210 static bool
omp_notice_variable(struct gimplify_omp_ctx * ctx,tree decl,bool in_code)7211 omp_notice_variable (struct gimplify_omp_ctx *ctx, tree decl, bool in_code)
7212 {
7213 splay_tree_node n;
7214 unsigned flags = in_code ? GOVD_SEEN : 0;
7215 bool ret = false, shared;
7216
7217 if (error_operand_p (decl))
7218 return false;
7219
7220 if (ctx->region_type == ORT_NONE)
7221 return lang_hooks.decls.omp_disregard_value_expr (decl, false);
7222
7223 if (is_global_var (decl))
7224 {
7225 /* Threadprivate variables are predetermined. */
7226 if (DECL_THREAD_LOCAL_P (decl))
7227 return omp_notice_threadprivate_variable (ctx, decl, NULL_TREE);
7228
7229 if (DECL_HAS_VALUE_EXPR_P (decl))
7230 {
7231 tree value = get_base_address (DECL_VALUE_EXPR (decl));
7232
7233 if (value && DECL_P (value) && DECL_THREAD_LOCAL_P (value))
7234 return omp_notice_threadprivate_variable (ctx, decl, value);
7235 }
7236
7237 if (gimplify_omp_ctxp->outer_context == NULL
7238 && VAR_P (decl)
7239 && oacc_get_fn_attrib (current_function_decl))
7240 {
7241 location_t loc = DECL_SOURCE_LOCATION (decl);
7242
7243 if (lookup_attribute ("omp declare target link",
7244 DECL_ATTRIBUTES (decl)))
7245 {
7246 error_at (loc,
7247 "%qE with %<link%> clause used in %<routine%> function",
7248 DECL_NAME (decl));
7249 return false;
7250 }
7251 else if (!lookup_attribute ("omp declare target",
7252 DECL_ATTRIBUTES (decl)))
7253 {
7254 error_at (loc,
7255 "%qE requires a %<declare%> directive for use "
7256 "in a %<routine%> function", DECL_NAME (decl));
7257 return false;
7258 }
7259 }
7260 }
7261
7262 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7263 if ((ctx->region_type & ORT_TARGET) != 0)
7264 {
7265 ret = lang_hooks.decls.omp_disregard_value_expr (decl, true);
7266 if (n == NULL)
7267 {
7268 unsigned nflags = flags;
7269 if (ctx->target_map_pointers_as_0len_arrays
7270 || ctx->target_map_scalars_firstprivate)
7271 {
7272 bool is_declare_target = false;
7273 bool is_scalar = false;
7274 if (is_global_var (decl)
7275 && varpool_node::get_create (decl)->offloadable)
7276 {
7277 struct gimplify_omp_ctx *octx;
7278 for (octx = ctx->outer_context;
7279 octx; octx = octx->outer_context)
7280 {
7281 n = splay_tree_lookup (octx->variables,
7282 (splay_tree_key)decl);
7283 if (n
7284 && (n->value & GOVD_DATA_SHARE_CLASS) != GOVD_SHARED
7285 && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
7286 break;
7287 }
7288 is_declare_target = octx == NULL;
7289 }
7290 if (!is_declare_target && ctx->target_map_scalars_firstprivate)
7291 is_scalar = lang_hooks.decls.omp_scalar_p (decl);
7292 if (is_declare_target)
7293 ;
7294 else if (ctx->target_map_pointers_as_0len_arrays
7295 && (TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE
7296 || (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
7297 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl)))
7298 == POINTER_TYPE)))
7299 nflags |= GOVD_MAP | GOVD_MAP_0LEN_ARRAY;
7300 else if (is_scalar)
7301 nflags |= GOVD_FIRSTPRIVATE;
7302 }
7303
7304 struct gimplify_omp_ctx *octx = ctx->outer_context;
7305 if ((ctx->region_type & ORT_ACC) && octx)
7306 {
7307 /* Look in outer OpenACC contexts, to see if there's a
7308 data attribute for this variable. */
7309 omp_notice_variable (octx, decl, in_code);
7310
7311 for (; octx; octx = octx->outer_context)
7312 {
7313 if (!(octx->region_type & (ORT_TARGET_DATA | ORT_TARGET)))
7314 break;
7315 splay_tree_node n2
7316 = splay_tree_lookup (octx->variables,
7317 (splay_tree_key) decl);
7318 if (n2)
7319 {
7320 if (octx->region_type == ORT_ACC_HOST_DATA)
7321 error ("variable %qE declared in enclosing "
7322 "%<host_data%> region", DECL_NAME (decl));
7323 nflags |= GOVD_MAP;
7324 if (octx->region_type == ORT_ACC_DATA
7325 && (n2->value & GOVD_MAP_0LEN_ARRAY))
7326 nflags |= GOVD_MAP_0LEN_ARRAY;
7327 goto found_outer;
7328 }
7329 }
7330 }
7331
7332 {
7333 tree type = TREE_TYPE (decl);
7334
7335 if (nflags == flags
7336 && gimplify_omp_ctxp->target_firstprivatize_array_bases
7337 && lang_hooks.decls.omp_privatize_by_reference (decl))
7338 type = TREE_TYPE (type);
7339 if (nflags == flags
7340 && !lang_hooks.types.omp_mappable_type (type))
7341 {
7342 error ("%qD referenced in target region does not have "
7343 "a mappable type", decl);
7344 nflags |= GOVD_MAP | GOVD_EXPLICIT;
7345 }
7346 else if (nflags == flags)
7347 {
7348 if ((ctx->region_type & ORT_ACC) != 0)
7349 nflags = oacc_default_clause (ctx, decl, flags);
7350 else
7351 nflags |= GOVD_MAP;
7352 }
7353 }
7354 found_outer:
7355 omp_add_variable (ctx, decl, nflags);
7356 }
7357 else
7358 {
7359 /* If nothing changed, there's nothing left to do. */
7360 if ((n->value & flags) == flags)
7361 return ret;
7362 flags |= n->value;
7363 n->value = flags;
7364 }
7365 goto do_outer;
7366 }
7367
7368 if (n == NULL)
7369 {
7370 if (ctx->region_type == ORT_WORKSHARE
7371 || ctx->region_type == ORT_SIMD
7372 || ctx->region_type == ORT_ACC
7373 || (ctx->region_type & ORT_TARGET_DATA) != 0)
7374 goto do_outer;
7375
7376 flags = omp_default_clause (ctx, decl, in_code, flags);
7377
7378 if ((flags & GOVD_PRIVATE)
7379 && lang_hooks.decls.omp_private_outer_ref (decl))
7380 flags |= GOVD_PRIVATE_OUTER_REF;
7381
7382 omp_add_variable (ctx, decl, flags);
7383
7384 shared = (flags & GOVD_SHARED) != 0;
7385 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7386 goto do_outer;
7387 }
7388
7389 if ((n->value & (GOVD_SEEN | GOVD_LOCAL)) == 0
7390 && (flags & (GOVD_SEEN | GOVD_LOCAL)) == GOVD_SEEN
7391 && DECL_SIZE (decl))
7392 {
7393 if (TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
7394 {
7395 splay_tree_node n2;
7396 tree t = DECL_VALUE_EXPR (decl);
7397 gcc_assert (TREE_CODE (t) == INDIRECT_REF);
7398 t = TREE_OPERAND (t, 0);
7399 gcc_assert (DECL_P (t));
7400 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7401 n2->value |= GOVD_SEEN;
7402 }
7403 else if (lang_hooks.decls.omp_privatize_by_reference (decl)
7404 && TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)))
7405 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))))
7406 != INTEGER_CST))
7407 {
7408 splay_tree_node n2;
7409 tree t = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl)));
7410 gcc_assert (DECL_P (t));
7411 n2 = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
7412 if (n2)
7413 omp_notice_variable (ctx, t, true);
7414 }
7415 }
7416
7417 shared = ((flags | n->value) & GOVD_SHARED) != 0;
7418 ret = lang_hooks.decls.omp_disregard_value_expr (decl, shared);
7419
7420 /* If nothing changed, there's nothing left to do. */
7421 if ((n->value & flags) == flags)
7422 return ret;
7423 flags |= n->value;
7424 n->value = flags;
7425
7426 do_outer:
7427 /* If the variable is private in the current context, then we don't
7428 need to propagate anything to an outer context. */
7429 if ((flags & GOVD_PRIVATE) && !(flags & GOVD_PRIVATE_OUTER_REF))
7430 return ret;
7431 if ((flags & (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7432 == (GOVD_LINEAR | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7433 return ret;
7434 if ((flags & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
7435 | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7436 == (GOVD_LASTPRIVATE | GOVD_LINEAR_LASTPRIVATE_NO_OUTER))
7437 return ret;
7438 if (ctx->outer_context
7439 && omp_notice_variable (ctx->outer_context, decl, in_code))
7440 return true;
7441 return ret;
7442 }
7443
7444 /* Verify that DECL is private within CTX. If there's specific information
7445 to the contrary in the innermost scope, generate an error. */
7446
7447 static bool
omp_is_private(struct gimplify_omp_ctx * ctx,tree decl,int simd)7448 omp_is_private (struct gimplify_omp_ctx *ctx, tree decl, int simd)
7449 {
7450 splay_tree_node n;
7451
7452 n = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
7453 if (n != NULL)
7454 {
7455 if (n->value & GOVD_SHARED)
7456 {
7457 if (ctx == gimplify_omp_ctxp)
7458 {
7459 if (simd)
7460 error ("iteration variable %qE is predetermined linear",
7461 DECL_NAME (decl));
7462 else
7463 error ("iteration variable %qE should be private",
7464 DECL_NAME (decl));
7465 n->value = GOVD_PRIVATE;
7466 return true;
7467 }
7468 else
7469 return false;
7470 }
7471 else if ((n->value & GOVD_EXPLICIT) != 0
7472 && (ctx == gimplify_omp_ctxp
7473 || (ctx->region_type == ORT_COMBINED_PARALLEL
7474 && gimplify_omp_ctxp->outer_context == ctx)))
7475 {
7476 if ((n->value & GOVD_FIRSTPRIVATE) != 0)
7477 error ("iteration variable %qE should not be firstprivate",
7478 DECL_NAME (decl));
7479 else if ((n->value & GOVD_REDUCTION) != 0)
7480 error ("iteration variable %qE should not be reduction",
7481 DECL_NAME (decl));
7482 else if (simd == 0 && (n->value & GOVD_LINEAR) != 0)
7483 error ("iteration variable %qE should not be linear",
7484 DECL_NAME (decl));
7485 else if (simd == 1 && (n->value & GOVD_LASTPRIVATE) != 0)
7486 error ("iteration variable %qE should not be lastprivate",
7487 DECL_NAME (decl));
7488 else if (simd && (n->value & GOVD_PRIVATE) != 0)
7489 error ("iteration variable %qE should not be private",
7490 DECL_NAME (decl));
7491 else if (simd == 2 && (n->value & GOVD_LINEAR) != 0)
7492 error ("iteration variable %qE is predetermined linear",
7493 DECL_NAME (decl));
7494 }
7495 return (ctx == gimplify_omp_ctxp
7496 || (ctx->region_type == ORT_COMBINED_PARALLEL
7497 && gimplify_omp_ctxp->outer_context == ctx));
7498 }
7499
7500 if (ctx->region_type != ORT_WORKSHARE
7501 && ctx->region_type != ORT_SIMD
7502 && ctx->region_type != ORT_ACC)
7503 return false;
7504 else if (ctx->outer_context)
7505 return omp_is_private (ctx->outer_context, decl, simd);
7506 return false;
7507 }
7508
7509 /* Return true if DECL is private within a parallel region
7510 that binds to the current construct's context or in parallel
7511 region's REDUCTION clause. */
7512
7513 static bool
omp_check_private(struct gimplify_omp_ctx * ctx,tree decl,bool copyprivate)7514 omp_check_private (struct gimplify_omp_ctx *ctx, tree decl, bool copyprivate)
7515 {
7516 splay_tree_node n;
7517
7518 do
7519 {
7520 ctx = ctx->outer_context;
7521 if (ctx == NULL)
7522 {
7523 if (is_global_var (decl))
7524 return false;
7525
7526 /* References might be private, but might be shared too,
7527 when checking for copyprivate, assume they might be
7528 private, otherwise assume they might be shared. */
7529 if (copyprivate)
7530 return true;
7531
7532 if (lang_hooks.decls.omp_privatize_by_reference (decl))
7533 return false;
7534
7535 /* Treat C++ privatized non-static data members outside
7536 of the privatization the same. */
7537 if (omp_member_access_dummy_var (decl))
7538 return false;
7539
7540 return true;
7541 }
7542
7543 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
7544
7545 if ((ctx->region_type & (ORT_TARGET | ORT_TARGET_DATA)) != 0
7546 && (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0))
7547 continue;
7548
7549 if (n != NULL)
7550 {
7551 if ((n->value & GOVD_LOCAL) != 0
7552 && omp_member_access_dummy_var (decl))
7553 return false;
7554 return (n->value & GOVD_SHARED) == 0;
7555 }
7556 }
7557 while (ctx->region_type == ORT_WORKSHARE
7558 || ctx->region_type == ORT_SIMD
7559 || ctx->region_type == ORT_ACC);
7560 return false;
7561 }
7562
7563 /* Callback for walk_tree to find a DECL_EXPR for the given DECL. */
7564
7565 static tree
find_decl_expr(tree * tp,int * walk_subtrees,void * data)7566 find_decl_expr (tree *tp, int *walk_subtrees, void *data)
7567 {
7568 tree t = *tp;
7569
7570 /* If this node has been visited, unmark it and keep looking. */
7571 if (TREE_CODE (t) == DECL_EXPR && DECL_EXPR_DECL (t) == (tree) data)
7572 return t;
7573
7574 if (IS_TYPE_OR_DECL_P (t))
7575 *walk_subtrees = 0;
7576 return NULL_TREE;
7577 }
7578
7579 /* Scan the OMP clauses in *LIST_P, installing mappings into a new
7580 and previous omp contexts. */
7581
7582 static void
gimplify_scan_omp_clauses(tree * list_p,gimple_seq * pre_p,enum omp_region_type region_type,enum tree_code code)7583 gimplify_scan_omp_clauses (tree *list_p, gimple_seq *pre_p,
7584 enum omp_region_type region_type,
7585 enum tree_code code)
7586 {
7587 struct gimplify_omp_ctx *ctx, *outer_ctx;
7588 tree c;
7589 hash_map<tree, tree> *struct_map_to_clause = NULL;
7590 tree *prev_list_p = NULL;
7591
7592 ctx = new_omp_context (region_type);
7593 outer_ctx = ctx->outer_context;
7594 if (code == OMP_TARGET)
7595 {
7596 if (!lang_GNU_Fortran ())
7597 ctx->target_map_pointers_as_0len_arrays = true;
7598 ctx->target_map_scalars_firstprivate = true;
7599 }
7600 if (!lang_GNU_Fortran ())
7601 switch (code)
7602 {
7603 case OMP_TARGET:
7604 case OMP_TARGET_DATA:
7605 case OMP_TARGET_ENTER_DATA:
7606 case OMP_TARGET_EXIT_DATA:
7607 case OACC_DECLARE:
7608 case OACC_HOST_DATA:
7609 ctx->target_firstprivatize_array_bases = true;
7610 default:
7611 break;
7612 }
7613
7614 while ((c = *list_p) != NULL)
7615 {
7616 bool remove = false;
7617 bool notice_outer = true;
7618 const char *check_non_private = NULL;
7619 unsigned int flags;
7620 tree decl;
7621
7622 switch (OMP_CLAUSE_CODE (c))
7623 {
7624 case OMP_CLAUSE_PRIVATE:
7625 flags = GOVD_PRIVATE | GOVD_EXPLICIT;
7626 if (lang_hooks.decls.omp_private_outer_ref (OMP_CLAUSE_DECL (c)))
7627 {
7628 flags |= GOVD_PRIVATE_OUTER_REF;
7629 OMP_CLAUSE_PRIVATE_OUTER_REF (c) = 1;
7630 }
7631 else
7632 notice_outer = false;
7633 goto do_add;
7634 case OMP_CLAUSE_SHARED:
7635 flags = GOVD_SHARED | GOVD_EXPLICIT;
7636 goto do_add;
7637 case OMP_CLAUSE_FIRSTPRIVATE:
7638 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
7639 check_non_private = "firstprivate";
7640 goto do_add;
7641 case OMP_CLAUSE_LASTPRIVATE:
7642 flags = GOVD_LASTPRIVATE | GOVD_SEEN | GOVD_EXPLICIT;
7643 check_non_private = "lastprivate";
7644 decl = OMP_CLAUSE_DECL (c);
7645 if (error_operand_p (decl))
7646 goto do_add;
7647 else if (outer_ctx
7648 && (outer_ctx->region_type == ORT_COMBINED_PARALLEL
7649 || outer_ctx->region_type == ORT_COMBINED_TEAMS)
7650 && splay_tree_lookup (outer_ctx->variables,
7651 (splay_tree_key) decl) == NULL)
7652 {
7653 omp_add_variable (outer_ctx, decl, GOVD_SHARED | GOVD_SEEN);
7654 if (outer_ctx->outer_context)
7655 omp_notice_variable (outer_ctx->outer_context, decl, true);
7656 }
7657 else if (outer_ctx
7658 && (outer_ctx->region_type & ORT_TASK) != 0
7659 && outer_ctx->combined_loop
7660 && splay_tree_lookup (outer_ctx->variables,
7661 (splay_tree_key) decl) == NULL)
7662 {
7663 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7664 if (outer_ctx->outer_context)
7665 omp_notice_variable (outer_ctx->outer_context, decl, true);
7666 }
7667 else if (outer_ctx
7668 && (outer_ctx->region_type == ORT_WORKSHARE
7669 || outer_ctx->region_type == ORT_ACC)
7670 && outer_ctx->combined_loop
7671 && splay_tree_lookup (outer_ctx->variables,
7672 (splay_tree_key) decl) == NULL
7673 && !omp_check_private (outer_ctx, decl, false))
7674 {
7675 omp_add_variable (outer_ctx, decl, GOVD_LASTPRIVATE | GOVD_SEEN);
7676 if (outer_ctx->outer_context
7677 && (outer_ctx->outer_context->region_type
7678 == ORT_COMBINED_PARALLEL)
7679 && splay_tree_lookup (outer_ctx->outer_context->variables,
7680 (splay_tree_key) decl) == NULL)
7681 {
7682 struct gimplify_omp_ctx *octx = outer_ctx->outer_context;
7683 omp_add_variable (octx, decl, GOVD_SHARED | GOVD_SEEN);
7684 if (octx->outer_context)
7685 {
7686 octx = octx->outer_context;
7687 if (octx->region_type == ORT_WORKSHARE
7688 && octx->combined_loop
7689 && splay_tree_lookup (octx->variables,
7690 (splay_tree_key) decl) == NULL
7691 && !omp_check_private (octx, decl, false))
7692 {
7693 omp_add_variable (octx, decl,
7694 GOVD_LASTPRIVATE | GOVD_SEEN);
7695 octx = octx->outer_context;
7696 if (octx
7697 && octx->region_type == ORT_COMBINED_TEAMS
7698 && (splay_tree_lookup (octx->variables,
7699 (splay_tree_key) decl)
7700 == NULL))
7701 {
7702 omp_add_variable (octx, decl,
7703 GOVD_SHARED | GOVD_SEEN);
7704 octx = octx->outer_context;
7705 }
7706 }
7707 if (octx)
7708 omp_notice_variable (octx, decl, true);
7709 }
7710 }
7711 else if (outer_ctx->outer_context)
7712 omp_notice_variable (outer_ctx->outer_context, decl, true);
7713 }
7714 goto do_add;
7715 case OMP_CLAUSE_REDUCTION:
7716 flags = GOVD_REDUCTION | GOVD_SEEN | GOVD_EXPLICIT;
7717 /* OpenACC permits reductions on private variables. */
7718 if (!(region_type & ORT_ACC))
7719 check_non_private = "reduction";
7720 decl = OMP_CLAUSE_DECL (c);
7721 if (TREE_CODE (decl) == MEM_REF)
7722 {
7723 tree type = TREE_TYPE (decl);
7724 if (gimplify_expr (&TYPE_MAX_VALUE (TYPE_DOMAIN (type)), pre_p,
7725 NULL, is_gimple_val, fb_rvalue, false)
7726 == GS_ERROR)
7727 {
7728 remove = true;
7729 break;
7730 }
7731 tree v = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
7732 if (DECL_P (v))
7733 {
7734 omp_firstprivatize_variable (ctx, v);
7735 omp_notice_variable (ctx, v, true);
7736 }
7737 decl = TREE_OPERAND (decl, 0);
7738 if (TREE_CODE (decl) == POINTER_PLUS_EXPR)
7739 {
7740 if (gimplify_expr (&TREE_OPERAND (decl, 1), pre_p,
7741 NULL, is_gimple_val, fb_rvalue, false)
7742 == GS_ERROR)
7743 {
7744 remove = true;
7745 break;
7746 }
7747 v = TREE_OPERAND (decl, 1);
7748 if (DECL_P (v))
7749 {
7750 omp_firstprivatize_variable (ctx, v);
7751 omp_notice_variable (ctx, v, true);
7752 }
7753 decl = TREE_OPERAND (decl, 0);
7754 }
7755 if (TREE_CODE (decl) == ADDR_EXPR
7756 || TREE_CODE (decl) == INDIRECT_REF)
7757 decl = TREE_OPERAND (decl, 0);
7758 }
7759 goto do_add_decl;
7760 case OMP_CLAUSE_LINEAR:
7761 if (gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c), pre_p, NULL,
7762 is_gimple_val, fb_rvalue) == GS_ERROR)
7763 {
7764 remove = true;
7765 break;
7766 }
7767 else
7768 {
7769 if (code == OMP_SIMD
7770 && !OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7771 {
7772 struct gimplify_omp_ctx *octx = outer_ctx;
7773 if (octx
7774 && octx->region_type == ORT_WORKSHARE
7775 && octx->combined_loop
7776 && !octx->distribute)
7777 {
7778 if (octx->outer_context
7779 && (octx->outer_context->region_type
7780 == ORT_COMBINED_PARALLEL))
7781 octx = octx->outer_context->outer_context;
7782 else
7783 octx = octx->outer_context;
7784 }
7785 if (octx
7786 && octx->region_type == ORT_WORKSHARE
7787 && octx->combined_loop
7788 && octx->distribute)
7789 {
7790 error_at (OMP_CLAUSE_LOCATION (c),
7791 "%<linear%> clause for variable other than "
7792 "loop iterator specified on construct "
7793 "combined with %<distribute%>");
7794 remove = true;
7795 break;
7796 }
7797 }
7798 /* For combined #pragma omp parallel for simd, need to put
7799 lastprivate and perhaps firstprivate too on the
7800 parallel. Similarly for #pragma omp for simd. */
7801 struct gimplify_omp_ctx *octx = outer_ctx;
7802 decl = NULL_TREE;
7803 do
7804 {
7805 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7806 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7807 break;
7808 decl = OMP_CLAUSE_DECL (c);
7809 if (error_operand_p (decl))
7810 {
7811 decl = NULL_TREE;
7812 break;
7813 }
7814 flags = GOVD_SEEN;
7815 if (!OMP_CLAUSE_LINEAR_NO_COPYIN (c))
7816 flags |= GOVD_FIRSTPRIVATE;
7817 if (!OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7818 flags |= GOVD_LASTPRIVATE;
7819 if (octx
7820 && octx->region_type == ORT_WORKSHARE
7821 && octx->combined_loop)
7822 {
7823 if (octx->outer_context
7824 && (octx->outer_context->region_type
7825 == ORT_COMBINED_PARALLEL))
7826 octx = octx->outer_context;
7827 else if (omp_check_private (octx, decl, false))
7828 break;
7829 }
7830 else if (octx
7831 && (octx->region_type & ORT_TASK) != 0
7832 && octx->combined_loop)
7833 ;
7834 else if (octx
7835 && octx->region_type == ORT_COMBINED_PARALLEL
7836 && ctx->region_type == ORT_WORKSHARE
7837 && octx == outer_ctx)
7838 flags = GOVD_SEEN | GOVD_SHARED;
7839 else if (octx
7840 && octx->region_type == ORT_COMBINED_TEAMS)
7841 flags = GOVD_SEEN | GOVD_SHARED;
7842 else if (octx
7843 && octx->region_type == ORT_COMBINED_TARGET)
7844 {
7845 flags &= ~GOVD_LASTPRIVATE;
7846 if (flags == GOVD_SEEN)
7847 break;
7848 }
7849 else
7850 break;
7851 splay_tree_node on
7852 = splay_tree_lookup (octx->variables,
7853 (splay_tree_key) decl);
7854 if (on && (on->value & GOVD_DATA_SHARE_CLASS) != 0)
7855 {
7856 octx = NULL;
7857 break;
7858 }
7859 omp_add_variable (octx, decl, flags);
7860 if (octx->outer_context == NULL)
7861 break;
7862 octx = octx->outer_context;
7863 }
7864 while (1);
7865 if (octx
7866 && decl
7867 && (!OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7868 || !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
7869 omp_notice_variable (octx, decl, true);
7870 }
7871 flags = GOVD_LINEAR | GOVD_EXPLICIT;
7872 if (OMP_CLAUSE_LINEAR_NO_COPYIN (c)
7873 && OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
7874 {
7875 notice_outer = false;
7876 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
7877 }
7878 goto do_add;
7879
7880 case OMP_CLAUSE_MAP:
7881 decl = OMP_CLAUSE_DECL (c);
7882 if (error_operand_p (decl))
7883 remove = true;
7884 switch (code)
7885 {
7886 case OMP_TARGET:
7887 break;
7888 case OACC_DATA:
7889 if (TREE_CODE (TREE_TYPE (decl)) != ARRAY_TYPE)
7890 break;
7891 /* FALLTHRU */
7892 case OMP_TARGET_DATA:
7893 case OMP_TARGET_ENTER_DATA:
7894 case OMP_TARGET_EXIT_DATA:
7895 case OACC_ENTER_DATA:
7896 case OACC_EXIT_DATA:
7897 case OACC_HOST_DATA:
7898 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7899 || (OMP_CLAUSE_MAP_KIND (c)
7900 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7901 /* For target {,enter ,exit }data only the array slice is
7902 mapped, but not the pointer to it. */
7903 remove = true;
7904 break;
7905 default:
7906 break;
7907 }
7908 if (remove)
7909 break;
7910 if (DECL_P (decl) && outer_ctx && (region_type & ORT_ACC))
7911 {
7912 struct gimplify_omp_ctx *octx;
7913 for (octx = outer_ctx; octx; octx = octx->outer_context)
7914 {
7915 if (octx->region_type != ORT_ACC_HOST_DATA)
7916 break;
7917 splay_tree_node n2
7918 = splay_tree_lookup (octx->variables,
7919 (splay_tree_key) decl);
7920 if (n2)
7921 error_at (OMP_CLAUSE_LOCATION (c), "variable %qE "
7922 "declared in enclosing %<host_data%> region",
7923 DECL_NAME (decl));
7924 }
7925 }
7926 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
7927 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
7928 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
7929 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
7930 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
7931 {
7932 remove = true;
7933 break;
7934 }
7935 else if ((OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER
7936 || (OMP_CLAUSE_MAP_KIND (c)
7937 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
7938 && TREE_CODE (OMP_CLAUSE_SIZE (c)) != INTEGER_CST)
7939 {
7940 OMP_CLAUSE_SIZE (c)
7941 = get_initialized_tmp_var (OMP_CLAUSE_SIZE (c), pre_p, NULL,
7942 false);
7943 omp_add_variable (ctx, OMP_CLAUSE_SIZE (c),
7944 GOVD_FIRSTPRIVATE | GOVD_SEEN);
7945 }
7946 if (!DECL_P (decl))
7947 {
7948 tree d = decl, *pd;
7949 if (TREE_CODE (d) == ARRAY_REF)
7950 {
7951 while (TREE_CODE (d) == ARRAY_REF)
7952 d = TREE_OPERAND (d, 0);
7953 if (TREE_CODE (d) == COMPONENT_REF
7954 && TREE_CODE (TREE_TYPE (d)) == ARRAY_TYPE)
7955 decl = d;
7956 }
7957 pd = &OMP_CLAUSE_DECL (c);
7958 if (d == decl
7959 && TREE_CODE (decl) == INDIRECT_REF
7960 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
7961 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7962 == REFERENCE_TYPE))
7963 {
7964 pd = &TREE_OPERAND (decl, 0);
7965 decl = TREE_OPERAND (decl, 0);
7966 }
7967 if (TREE_CODE (decl) == COMPONENT_REF)
7968 {
7969 while (TREE_CODE (decl) == COMPONENT_REF)
7970 decl = TREE_OPERAND (decl, 0);
7971 if (TREE_CODE (decl) == INDIRECT_REF
7972 && DECL_P (TREE_OPERAND (decl, 0))
7973 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
7974 == REFERENCE_TYPE))
7975 decl = TREE_OPERAND (decl, 0);
7976 }
7977 if (gimplify_expr (pd, pre_p, NULL, is_gimple_lvalue, fb_lvalue)
7978 == GS_ERROR)
7979 {
7980 remove = true;
7981 break;
7982 }
7983 if (DECL_P (decl))
7984 {
7985 if (error_operand_p (decl))
7986 {
7987 remove = true;
7988 break;
7989 }
7990
7991 tree stype = TREE_TYPE (decl);
7992 if (TREE_CODE (stype) == REFERENCE_TYPE)
7993 stype = TREE_TYPE (stype);
7994 if (TYPE_SIZE_UNIT (stype) == NULL
7995 || TREE_CODE (TYPE_SIZE_UNIT (stype)) != INTEGER_CST)
7996 {
7997 error_at (OMP_CLAUSE_LOCATION (c),
7998 "mapping field %qE of variable length "
7999 "structure", OMP_CLAUSE_DECL (c));
8000 remove = true;
8001 break;
8002 }
8003
8004 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
8005 {
8006 /* Error recovery. */
8007 if (prev_list_p == NULL)
8008 {
8009 remove = true;
8010 break;
8011 }
8012 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8013 {
8014 tree ch = OMP_CLAUSE_CHAIN (*prev_list_p);
8015 if (ch == NULL_TREE || OMP_CLAUSE_CHAIN (ch) != c)
8016 {
8017 remove = true;
8018 break;
8019 }
8020 }
8021 }
8022
8023 tree offset;
8024 poly_int64 bitsize, bitpos;
8025 machine_mode mode;
8026 int unsignedp, reversep, volatilep = 0;
8027 tree base = OMP_CLAUSE_DECL (c);
8028 while (TREE_CODE (base) == ARRAY_REF)
8029 base = TREE_OPERAND (base, 0);
8030 if (TREE_CODE (base) == INDIRECT_REF)
8031 base = TREE_OPERAND (base, 0);
8032 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8033 &mode, &unsignedp, &reversep,
8034 &volatilep);
8035 tree orig_base = base;
8036 if ((TREE_CODE (base) == INDIRECT_REF
8037 || (TREE_CODE (base) == MEM_REF
8038 && integer_zerop (TREE_OPERAND (base, 1))))
8039 && DECL_P (TREE_OPERAND (base, 0))
8040 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base, 0)))
8041 == REFERENCE_TYPE))
8042 base = TREE_OPERAND (base, 0);
8043 gcc_assert (base == decl
8044 && (offset == NULL_TREE
8045 || poly_int_tree_p (offset)));
8046
8047 splay_tree_node n
8048 = splay_tree_lookup (ctx->variables, (splay_tree_key)decl);
8049 bool ptr = (OMP_CLAUSE_MAP_KIND (c)
8050 == GOMP_MAP_ALWAYS_POINTER);
8051 if (n == NULL || (n->value & GOVD_MAP) == 0)
8052 {
8053 tree l = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8054 OMP_CLAUSE_MAP);
8055 OMP_CLAUSE_SET_MAP_KIND (l, GOMP_MAP_STRUCT);
8056 if (orig_base != base)
8057 OMP_CLAUSE_DECL (l) = unshare_expr (orig_base);
8058 else
8059 OMP_CLAUSE_DECL (l) = decl;
8060 OMP_CLAUSE_SIZE (l) = size_int (1);
8061 if (struct_map_to_clause == NULL)
8062 struct_map_to_clause = new hash_map<tree, tree>;
8063 struct_map_to_clause->put (decl, l);
8064 if (ptr)
8065 {
8066 enum gomp_map_kind mkind
8067 = code == OMP_TARGET_EXIT_DATA
8068 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8069 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8070 OMP_CLAUSE_MAP);
8071 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8072 OMP_CLAUSE_DECL (c2)
8073 = unshare_expr (OMP_CLAUSE_DECL (c));
8074 OMP_CLAUSE_CHAIN (c2) = *prev_list_p;
8075 OMP_CLAUSE_SIZE (c2)
8076 = TYPE_SIZE_UNIT (ptr_type_node);
8077 OMP_CLAUSE_CHAIN (l) = c2;
8078 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8079 {
8080 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8081 tree c3
8082 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8083 OMP_CLAUSE_MAP);
8084 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8085 OMP_CLAUSE_DECL (c3)
8086 = unshare_expr (OMP_CLAUSE_DECL (c4));
8087 OMP_CLAUSE_SIZE (c3)
8088 = TYPE_SIZE_UNIT (ptr_type_node);
8089 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8090 OMP_CLAUSE_CHAIN (c2) = c3;
8091 }
8092 *prev_list_p = l;
8093 prev_list_p = NULL;
8094 }
8095 else
8096 {
8097 OMP_CLAUSE_CHAIN (l) = c;
8098 *list_p = l;
8099 list_p = &OMP_CLAUSE_CHAIN (l);
8100 }
8101 if (orig_base != base && code == OMP_TARGET)
8102 {
8103 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8104 OMP_CLAUSE_MAP);
8105 enum gomp_map_kind mkind
8106 = GOMP_MAP_FIRSTPRIVATE_REFERENCE;
8107 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8108 OMP_CLAUSE_DECL (c2) = decl;
8109 OMP_CLAUSE_SIZE (c2) = size_zero_node;
8110 OMP_CLAUSE_CHAIN (c2) = OMP_CLAUSE_CHAIN (l);
8111 OMP_CLAUSE_CHAIN (l) = c2;
8112 }
8113 flags = GOVD_MAP | GOVD_EXPLICIT;
8114 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8115 flags |= GOVD_SEEN;
8116 goto do_add_decl;
8117 }
8118 else
8119 {
8120 tree *osc = struct_map_to_clause->get (decl);
8121 tree *sc = NULL, *scp = NULL;
8122 if (GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) || ptr)
8123 n->value |= GOVD_SEEN;
8124 poly_offset_int o1, o2;
8125 if (offset)
8126 o1 = wi::to_poly_offset (offset);
8127 else
8128 o1 = 0;
8129 if (maybe_ne (bitpos, 0))
8130 o1 += bits_to_bytes_round_down (bitpos);
8131 sc = &OMP_CLAUSE_CHAIN (*osc);
8132 if (*sc != c
8133 && (OMP_CLAUSE_MAP_KIND (*sc)
8134 == GOMP_MAP_FIRSTPRIVATE_REFERENCE))
8135 sc = &OMP_CLAUSE_CHAIN (*sc);
8136 for (; *sc != c; sc = &OMP_CLAUSE_CHAIN (*sc))
8137 if (ptr && sc == prev_list_p)
8138 break;
8139 else if (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8140 != COMPONENT_REF
8141 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8142 != INDIRECT_REF)
8143 && (TREE_CODE (OMP_CLAUSE_DECL (*sc))
8144 != ARRAY_REF))
8145 break;
8146 else
8147 {
8148 tree offset2;
8149 poly_int64 bitsize2, bitpos2;
8150 base = OMP_CLAUSE_DECL (*sc);
8151 if (TREE_CODE (base) == ARRAY_REF)
8152 {
8153 while (TREE_CODE (base) == ARRAY_REF)
8154 base = TREE_OPERAND (base, 0);
8155 if (TREE_CODE (base) != COMPONENT_REF
8156 || (TREE_CODE (TREE_TYPE (base))
8157 != ARRAY_TYPE))
8158 break;
8159 }
8160 else if (TREE_CODE (base) == INDIRECT_REF
8161 && (TREE_CODE (TREE_OPERAND (base, 0))
8162 == COMPONENT_REF)
8163 && (TREE_CODE (TREE_TYPE
8164 (TREE_OPERAND (base, 0)))
8165 == REFERENCE_TYPE))
8166 base = TREE_OPERAND (base, 0);
8167 base = get_inner_reference (base, &bitsize2,
8168 &bitpos2, &offset2,
8169 &mode, &unsignedp,
8170 &reversep, &volatilep);
8171 if ((TREE_CODE (base) == INDIRECT_REF
8172 || (TREE_CODE (base) == MEM_REF
8173 && integer_zerop (TREE_OPERAND (base,
8174 1))))
8175 && DECL_P (TREE_OPERAND (base, 0))
8176 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (base,
8177 0)))
8178 == REFERENCE_TYPE))
8179 base = TREE_OPERAND (base, 0);
8180 if (base != decl)
8181 break;
8182 if (scp)
8183 continue;
8184 gcc_assert (offset2 == NULL_TREE
8185 || poly_int_tree_p (offset2));
8186 tree d1 = OMP_CLAUSE_DECL (*sc);
8187 tree d2 = OMP_CLAUSE_DECL (c);
8188 while (TREE_CODE (d1) == ARRAY_REF)
8189 d1 = TREE_OPERAND (d1, 0);
8190 while (TREE_CODE (d2) == ARRAY_REF)
8191 d2 = TREE_OPERAND (d2, 0);
8192 if (TREE_CODE (d1) == INDIRECT_REF)
8193 d1 = TREE_OPERAND (d1, 0);
8194 if (TREE_CODE (d2) == INDIRECT_REF)
8195 d2 = TREE_OPERAND (d2, 0);
8196 while (TREE_CODE (d1) == COMPONENT_REF)
8197 if (TREE_CODE (d2) == COMPONENT_REF
8198 && TREE_OPERAND (d1, 1)
8199 == TREE_OPERAND (d2, 1))
8200 {
8201 d1 = TREE_OPERAND (d1, 0);
8202 d2 = TREE_OPERAND (d2, 0);
8203 }
8204 else
8205 break;
8206 if (d1 == d2)
8207 {
8208 error_at (OMP_CLAUSE_LOCATION (c),
8209 "%qE appears more than once in map "
8210 "clauses", OMP_CLAUSE_DECL (c));
8211 remove = true;
8212 break;
8213 }
8214 if (offset2)
8215 o2 = wi::to_poly_offset (offset2);
8216 else
8217 o2 = 0;
8218 o2 += bits_to_bytes_round_down (bitpos2);
8219 if (maybe_lt (o1, o2)
8220 || (known_eq (o1, o2)
8221 && maybe_lt (bitpos, bitpos2)))
8222 {
8223 if (ptr)
8224 scp = sc;
8225 else
8226 break;
8227 }
8228 }
8229 if (remove)
8230 break;
8231 OMP_CLAUSE_SIZE (*osc)
8232 = size_binop (PLUS_EXPR, OMP_CLAUSE_SIZE (*osc),
8233 size_one_node);
8234 if (ptr)
8235 {
8236 tree c2 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8237 OMP_CLAUSE_MAP);
8238 tree cl = NULL_TREE;
8239 enum gomp_map_kind mkind
8240 = code == OMP_TARGET_EXIT_DATA
8241 ? GOMP_MAP_RELEASE : GOMP_MAP_ALLOC;
8242 OMP_CLAUSE_SET_MAP_KIND (c2, mkind);
8243 OMP_CLAUSE_DECL (c2)
8244 = unshare_expr (OMP_CLAUSE_DECL (c));
8245 OMP_CLAUSE_CHAIN (c2) = scp ? *scp : *prev_list_p;
8246 OMP_CLAUSE_SIZE (c2)
8247 = TYPE_SIZE_UNIT (ptr_type_node);
8248 cl = scp ? *prev_list_p : c2;
8249 if (OMP_CLAUSE_CHAIN (*prev_list_p) != c)
8250 {
8251 tree c4 = OMP_CLAUSE_CHAIN (*prev_list_p);
8252 tree c3
8253 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
8254 OMP_CLAUSE_MAP);
8255 OMP_CLAUSE_SET_MAP_KIND (c3, mkind);
8256 OMP_CLAUSE_DECL (c3)
8257 = unshare_expr (OMP_CLAUSE_DECL (c4));
8258 OMP_CLAUSE_SIZE (c3)
8259 = TYPE_SIZE_UNIT (ptr_type_node);
8260 OMP_CLAUSE_CHAIN (c3) = *prev_list_p;
8261 if (!scp)
8262 OMP_CLAUSE_CHAIN (c2) = c3;
8263 else
8264 cl = c3;
8265 }
8266 if (scp)
8267 *scp = c2;
8268 if (sc == prev_list_p)
8269 {
8270 *sc = cl;
8271 prev_list_p = NULL;
8272 }
8273 else
8274 {
8275 *prev_list_p = OMP_CLAUSE_CHAIN (c);
8276 list_p = prev_list_p;
8277 prev_list_p = NULL;
8278 OMP_CLAUSE_CHAIN (c) = *sc;
8279 *sc = cl;
8280 continue;
8281 }
8282 }
8283 else if (*sc != c)
8284 {
8285 *list_p = OMP_CLAUSE_CHAIN (c);
8286 OMP_CLAUSE_CHAIN (c) = *sc;
8287 *sc = c;
8288 continue;
8289 }
8290 }
8291 }
8292 if (!remove
8293 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_ALWAYS_POINTER
8294 && OMP_CLAUSE_CHAIN (c)
8295 && OMP_CLAUSE_CODE (OMP_CLAUSE_CHAIN (c)) == OMP_CLAUSE_MAP
8296 && (OMP_CLAUSE_MAP_KIND (OMP_CLAUSE_CHAIN (c))
8297 == GOMP_MAP_ALWAYS_POINTER))
8298 prev_list_p = list_p;
8299 break;
8300 }
8301 flags = GOVD_MAP | GOVD_EXPLICIT;
8302 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TO
8303 || OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_TOFROM)
8304 flags |= GOVD_MAP_ALWAYS_TO;
8305 goto do_add;
8306
8307 case OMP_CLAUSE_DEPEND:
8308 if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
8309 {
8310 tree deps = OMP_CLAUSE_DECL (c);
8311 while (deps && TREE_CODE (deps) == TREE_LIST)
8312 {
8313 if (TREE_CODE (TREE_PURPOSE (deps)) == TRUNC_DIV_EXPR
8314 && DECL_P (TREE_OPERAND (TREE_PURPOSE (deps), 1)))
8315 gimplify_expr (&TREE_OPERAND (TREE_PURPOSE (deps), 1),
8316 pre_p, NULL, is_gimple_val, fb_rvalue);
8317 deps = TREE_CHAIN (deps);
8318 }
8319 break;
8320 }
8321 else if (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
8322 break;
8323 if (TREE_CODE (OMP_CLAUSE_DECL (c)) == COMPOUND_EXPR)
8324 {
8325 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (c), 0), pre_p,
8326 NULL, is_gimple_val, fb_rvalue);
8327 OMP_CLAUSE_DECL (c) = TREE_OPERAND (OMP_CLAUSE_DECL (c), 1);
8328 }
8329 if (error_operand_p (OMP_CLAUSE_DECL (c)))
8330 {
8331 remove = true;
8332 break;
8333 }
8334 OMP_CLAUSE_DECL (c) = build_fold_addr_expr (OMP_CLAUSE_DECL (c));
8335 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p, NULL,
8336 is_gimple_val, fb_rvalue) == GS_ERROR)
8337 {
8338 remove = true;
8339 break;
8340 }
8341 break;
8342
8343 case OMP_CLAUSE_TO:
8344 case OMP_CLAUSE_FROM:
8345 case OMP_CLAUSE__CACHE_:
8346 decl = OMP_CLAUSE_DECL (c);
8347 if (error_operand_p (decl))
8348 {
8349 remove = true;
8350 break;
8351 }
8352 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
8353 OMP_CLAUSE_SIZE (c) = DECL_P (decl) ? DECL_SIZE_UNIT (decl)
8354 : TYPE_SIZE_UNIT (TREE_TYPE (decl));
8355 if (gimplify_expr (&OMP_CLAUSE_SIZE (c), pre_p,
8356 NULL, is_gimple_val, fb_rvalue) == GS_ERROR)
8357 {
8358 remove = true;
8359 break;
8360 }
8361 if (!DECL_P (decl))
8362 {
8363 if (gimplify_expr (&OMP_CLAUSE_DECL (c), pre_p,
8364 NULL, is_gimple_lvalue, fb_lvalue)
8365 == GS_ERROR)
8366 {
8367 remove = true;
8368 break;
8369 }
8370 break;
8371 }
8372 goto do_notice;
8373
8374 case OMP_CLAUSE_USE_DEVICE_PTR:
8375 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8376 goto do_add;
8377 case OMP_CLAUSE_IS_DEVICE_PTR:
8378 flags = GOVD_FIRSTPRIVATE | GOVD_EXPLICIT;
8379 goto do_add;
8380
8381 do_add:
8382 decl = OMP_CLAUSE_DECL (c);
8383 do_add_decl:
8384 if (error_operand_p (decl))
8385 {
8386 remove = true;
8387 break;
8388 }
8389 if (DECL_NAME (decl) == NULL_TREE && (flags & GOVD_SHARED) == 0)
8390 {
8391 tree t = omp_member_access_dummy_var (decl);
8392 if (t)
8393 {
8394 tree v = DECL_VALUE_EXPR (decl);
8395 DECL_NAME (decl) = DECL_NAME (TREE_OPERAND (v, 1));
8396 if (outer_ctx)
8397 omp_notice_variable (outer_ctx, t, true);
8398 }
8399 }
8400 if (code == OACC_DATA
8401 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8402 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8403 flags |= GOVD_MAP_0LEN_ARRAY;
8404 omp_add_variable (ctx, decl, flags);
8405 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_REDUCTION
8406 && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
8407 {
8408 omp_add_variable (ctx, OMP_CLAUSE_REDUCTION_PLACEHOLDER (c),
8409 GOVD_LOCAL | GOVD_SEEN);
8410 if (OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c)
8411 && walk_tree (&OMP_CLAUSE_REDUCTION_INIT (c),
8412 find_decl_expr,
8413 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8414 NULL) == NULL_TREE)
8415 omp_add_variable (ctx,
8416 OMP_CLAUSE_REDUCTION_DECL_PLACEHOLDER (c),
8417 GOVD_LOCAL | GOVD_SEEN);
8418 gimplify_omp_ctxp = ctx;
8419 push_gimplify_context ();
8420
8421 OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c) = NULL;
8422 OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c) = NULL;
8423
8424 gimplify_and_add (OMP_CLAUSE_REDUCTION_INIT (c),
8425 &OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c));
8426 pop_gimplify_context
8427 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_INIT (c)));
8428 push_gimplify_context ();
8429 gimplify_and_add (OMP_CLAUSE_REDUCTION_MERGE (c),
8430 &OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c));
8431 pop_gimplify_context
8432 (gimple_seq_first_stmt (OMP_CLAUSE_REDUCTION_GIMPLE_MERGE (c)));
8433 OMP_CLAUSE_REDUCTION_INIT (c) = NULL_TREE;
8434 OMP_CLAUSE_REDUCTION_MERGE (c) = NULL_TREE;
8435
8436 gimplify_omp_ctxp = outer_ctx;
8437 }
8438 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
8439 && OMP_CLAUSE_LASTPRIVATE_STMT (c))
8440 {
8441 gimplify_omp_ctxp = ctx;
8442 push_gimplify_context ();
8443 if (TREE_CODE (OMP_CLAUSE_LASTPRIVATE_STMT (c)) != BIND_EXPR)
8444 {
8445 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8446 NULL, NULL);
8447 TREE_SIDE_EFFECTS (bind) = 1;
8448 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LASTPRIVATE_STMT (c);
8449 OMP_CLAUSE_LASTPRIVATE_STMT (c) = bind;
8450 }
8451 gimplify_and_add (OMP_CLAUSE_LASTPRIVATE_STMT (c),
8452 &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c));
8453 pop_gimplify_context
8454 (gimple_seq_first_stmt (OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c)));
8455 OMP_CLAUSE_LASTPRIVATE_STMT (c) = NULL_TREE;
8456
8457 gimplify_omp_ctxp = outer_ctx;
8458 }
8459 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
8460 && OMP_CLAUSE_LINEAR_STMT (c))
8461 {
8462 gimplify_omp_ctxp = ctx;
8463 push_gimplify_context ();
8464 if (TREE_CODE (OMP_CLAUSE_LINEAR_STMT (c)) != BIND_EXPR)
8465 {
8466 tree bind = build3 (BIND_EXPR, void_type_node, NULL,
8467 NULL, NULL);
8468 TREE_SIDE_EFFECTS (bind) = 1;
8469 BIND_EXPR_BODY (bind) = OMP_CLAUSE_LINEAR_STMT (c);
8470 OMP_CLAUSE_LINEAR_STMT (c) = bind;
8471 }
8472 gimplify_and_add (OMP_CLAUSE_LINEAR_STMT (c),
8473 &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c));
8474 pop_gimplify_context
8475 (gimple_seq_first_stmt (OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c)));
8476 OMP_CLAUSE_LINEAR_STMT (c) = NULL_TREE;
8477
8478 gimplify_omp_ctxp = outer_ctx;
8479 }
8480 if (notice_outer)
8481 goto do_notice;
8482 break;
8483
8484 case OMP_CLAUSE_COPYIN:
8485 case OMP_CLAUSE_COPYPRIVATE:
8486 decl = OMP_CLAUSE_DECL (c);
8487 if (error_operand_p (decl))
8488 {
8489 remove = true;
8490 break;
8491 }
8492 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_COPYPRIVATE
8493 && !remove
8494 && !omp_check_private (ctx, decl, true))
8495 {
8496 remove = true;
8497 if (is_global_var (decl))
8498 {
8499 if (DECL_THREAD_LOCAL_P (decl))
8500 remove = false;
8501 else if (DECL_HAS_VALUE_EXPR_P (decl))
8502 {
8503 tree value = get_base_address (DECL_VALUE_EXPR (decl));
8504
8505 if (value
8506 && DECL_P (value)
8507 && DECL_THREAD_LOCAL_P (value))
8508 remove = false;
8509 }
8510 }
8511 if (remove)
8512 error_at (OMP_CLAUSE_LOCATION (c),
8513 "copyprivate variable %qE is not threadprivate"
8514 " or private in outer context", DECL_NAME (decl));
8515 }
8516 do_notice:
8517 if (outer_ctx)
8518 omp_notice_variable (outer_ctx, decl, true);
8519 if (check_non_private
8520 && region_type == ORT_WORKSHARE
8521 && (OMP_CLAUSE_CODE (c) != OMP_CLAUSE_REDUCTION
8522 || decl == OMP_CLAUSE_DECL (c)
8523 || (TREE_CODE (OMP_CLAUSE_DECL (c)) == MEM_REF
8524 && (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8525 == ADDR_EXPR
8526 || (TREE_CODE (TREE_OPERAND (OMP_CLAUSE_DECL (c), 0))
8527 == POINTER_PLUS_EXPR
8528 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND
8529 (OMP_CLAUSE_DECL (c), 0), 0))
8530 == ADDR_EXPR)))))
8531 && omp_check_private (ctx, decl, false))
8532 {
8533 error ("%s variable %qE is private in outer context",
8534 check_non_private, DECL_NAME (decl));
8535 remove = true;
8536 }
8537 break;
8538
8539 case OMP_CLAUSE_IF:
8540 if (OMP_CLAUSE_IF_MODIFIER (c) != ERROR_MARK
8541 && OMP_CLAUSE_IF_MODIFIER (c) != code)
8542 {
8543 const char *p[2];
8544 for (int i = 0; i < 2; i++)
8545 switch (i ? OMP_CLAUSE_IF_MODIFIER (c) : code)
8546 {
8547 case OMP_PARALLEL: p[i] = "parallel"; break;
8548 case OMP_TASK: p[i] = "task"; break;
8549 case OMP_TASKLOOP: p[i] = "taskloop"; break;
8550 case OMP_TARGET_DATA: p[i] = "target data"; break;
8551 case OMP_TARGET: p[i] = "target"; break;
8552 case OMP_TARGET_UPDATE: p[i] = "target update"; break;
8553 case OMP_TARGET_ENTER_DATA:
8554 p[i] = "target enter data"; break;
8555 case OMP_TARGET_EXIT_DATA: p[i] = "target exit data"; break;
8556 default: gcc_unreachable ();
8557 }
8558 error_at (OMP_CLAUSE_LOCATION (c),
8559 "expected %qs %<if%> clause modifier rather than %qs",
8560 p[0], p[1]);
8561 remove = true;
8562 }
8563 /* Fall through. */
8564
8565 case OMP_CLAUSE_FINAL:
8566 OMP_CLAUSE_OPERAND (c, 0)
8567 = gimple_boolify (OMP_CLAUSE_OPERAND (c, 0));
8568 /* Fall through. */
8569
8570 case OMP_CLAUSE_SCHEDULE:
8571 case OMP_CLAUSE_NUM_THREADS:
8572 case OMP_CLAUSE_NUM_TEAMS:
8573 case OMP_CLAUSE_THREAD_LIMIT:
8574 case OMP_CLAUSE_DIST_SCHEDULE:
8575 case OMP_CLAUSE_DEVICE:
8576 case OMP_CLAUSE_PRIORITY:
8577 case OMP_CLAUSE_GRAINSIZE:
8578 case OMP_CLAUSE_NUM_TASKS:
8579 case OMP_CLAUSE_HINT:
8580 case OMP_CLAUSE_ASYNC:
8581 case OMP_CLAUSE_WAIT:
8582 case OMP_CLAUSE_NUM_GANGS:
8583 case OMP_CLAUSE_NUM_WORKERS:
8584 case OMP_CLAUSE_VECTOR_LENGTH:
8585 case OMP_CLAUSE_WORKER:
8586 case OMP_CLAUSE_VECTOR:
8587 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8588 is_gimple_val, fb_rvalue) == GS_ERROR)
8589 remove = true;
8590 break;
8591
8592 case OMP_CLAUSE_GANG:
8593 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 0), pre_p, NULL,
8594 is_gimple_val, fb_rvalue) == GS_ERROR)
8595 remove = true;
8596 if (gimplify_expr (&OMP_CLAUSE_OPERAND (c, 1), pre_p, NULL,
8597 is_gimple_val, fb_rvalue) == GS_ERROR)
8598 remove = true;
8599 break;
8600
8601 case OMP_CLAUSE_NOWAIT:
8602 case OMP_CLAUSE_ORDERED:
8603 case OMP_CLAUSE_UNTIED:
8604 case OMP_CLAUSE_COLLAPSE:
8605 case OMP_CLAUSE_TILE:
8606 case OMP_CLAUSE_AUTO:
8607 case OMP_CLAUSE_SEQ:
8608 case OMP_CLAUSE_INDEPENDENT:
8609 case OMP_CLAUSE_MERGEABLE:
8610 case OMP_CLAUSE_PROC_BIND:
8611 case OMP_CLAUSE_SAFELEN:
8612 case OMP_CLAUSE_SIMDLEN:
8613 case OMP_CLAUSE_NOGROUP:
8614 case OMP_CLAUSE_THREADS:
8615 case OMP_CLAUSE_SIMD:
8616 break;
8617
8618 case OMP_CLAUSE_DEFAULTMAP:
8619 ctx->target_map_scalars_firstprivate = false;
8620 break;
8621
8622 case OMP_CLAUSE_ALIGNED:
8623 decl = OMP_CLAUSE_DECL (c);
8624 if (error_operand_p (decl))
8625 {
8626 remove = true;
8627 break;
8628 }
8629 if (gimplify_expr (&OMP_CLAUSE_ALIGNED_ALIGNMENT (c), pre_p, NULL,
8630 is_gimple_val, fb_rvalue) == GS_ERROR)
8631 {
8632 remove = true;
8633 break;
8634 }
8635 if (!is_global_var (decl)
8636 && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
8637 omp_add_variable (ctx, decl, GOVD_ALIGNED);
8638 break;
8639
8640 case OMP_CLAUSE_DEFAULT:
8641 ctx->default_kind = OMP_CLAUSE_DEFAULT_KIND (c);
8642 break;
8643
8644 default:
8645 gcc_unreachable ();
8646 }
8647
8648 if (code == OACC_DATA
8649 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_MAP
8650 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
8651 remove = true;
8652 if (remove)
8653 *list_p = OMP_CLAUSE_CHAIN (c);
8654 else
8655 list_p = &OMP_CLAUSE_CHAIN (c);
8656 }
8657
8658 gimplify_omp_ctxp = ctx;
8659 if (struct_map_to_clause)
8660 delete struct_map_to_clause;
8661 }
8662
8663 /* Return true if DECL is a candidate for shared to firstprivate
8664 optimization. We only consider non-addressable scalars, not
8665 too big, and not references. */
8666
8667 static bool
omp_shared_to_firstprivate_optimizable_decl_p(tree decl)8668 omp_shared_to_firstprivate_optimizable_decl_p (tree decl)
8669 {
8670 if (TREE_ADDRESSABLE (decl))
8671 return false;
8672 tree type = TREE_TYPE (decl);
8673 if (!is_gimple_reg_type (type)
8674 || TREE_CODE (type) == REFERENCE_TYPE
8675 || TREE_ADDRESSABLE (type))
8676 return false;
8677 /* Don't optimize too large decls, as each thread/task will have
8678 its own. */
8679 HOST_WIDE_INT len = int_size_in_bytes (type);
8680 if (len == -1 || len > 4 * POINTER_SIZE / BITS_PER_UNIT)
8681 return false;
8682 if (lang_hooks.decls.omp_privatize_by_reference (decl))
8683 return false;
8684 return true;
8685 }
8686
8687 /* Helper function of omp_find_stores_op and gimplify_adjust_omp_clauses*.
8688 For omp_shared_to_firstprivate_optimizable_decl_p decl mark it as
8689 GOVD_WRITTEN in outer contexts. */
8690
8691 static void
omp_mark_stores(struct gimplify_omp_ctx * ctx,tree decl)8692 omp_mark_stores (struct gimplify_omp_ctx *ctx, tree decl)
8693 {
8694 for (; ctx; ctx = ctx->outer_context)
8695 {
8696 splay_tree_node n = splay_tree_lookup (ctx->variables,
8697 (splay_tree_key) decl);
8698 if (n == NULL)
8699 continue;
8700 else if (n->value & GOVD_SHARED)
8701 {
8702 n->value |= GOVD_WRITTEN;
8703 return;
8704 }
8705 else if (n->value & GOVD_DATA_SHARE_CLASS)
8706 return;
8707 }
8708 }
8709
8710 /* Helper callback for walk_gimple_seq to discover possible stores
8711 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8712 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8713 for those. */
8714
8715 static tree
omp_find_stores_op(tree * tp,int * walk_subtrees,void * data)8716 omp_find_stores_op (tree *tp, int *walk_subtrees, void *data)
8717 {
8718 struct walk_stmt_info *wi = (struct walk_stmt_info *) data;
8719
8720 *walk_subtrees = 0;
8721 if (!wi->is_lhs)
8722 return NULL_TREE;
8723
8724 tree op = *tp;
8725 do
8726 {
8727 if (handled_component_p (op))
8728 op = TREE_OPERAND (op, 0);
8729 else if ((TREE_CODE (op) == MEM_REF || TREE_CODE (op) == TARGET_MEM_REF)
8730 && TREE_CODE (TREE_OPERAND (op, 0)) == ADDR_EXPR)
8731 op = TREE_OPERAND (TREE_OPERAND (op, 0), 0);
8732 else
8733 break;
8734 }
8735 while (1);
8736 if (!DECL_P (op) || !omp_shared_to_firstprivate_optimizable_decl_p (op))
8737 return NULL_TREE;
8738
8739 omp_mark_stores (gimplify_omp_ctxp, op);
8740 return NULL_TREE;
8741 }
8742
8743 /* Helper callback for walk_gimple_seq to discover possible stores
8744 to omp_shared_to_firstprivate_optimizable_decl_p decls and set
8745 GOVD_WRITTEN if they are GOVD_SHARED in some outer context
8746 for those. */
8747
8748 static tree
omp_find_stores_stmt(gimple_stmt_iterator * gsi_p,bool * handled_ops_p,struct walk_stmt_info * wi)8749 omp_find_stores_stmt (gimple_stmt_iterator *gsi_p,
8750 bool *handled_ops_p,
8751 struct walk_stmt_info *wi)
8752 {
8753 gimple *stmt = gsi_stmt (*gsi_p);
8754 switch (gimple_code (stmt))
8755 {
8756 /* Don't recurse on OpenMP constructs for which
8757 gimplify_adjust_omp_clauses already handled the bodies,
8758 except handle gimple_omp_for_pre_body. */
8759 case GIMPLE_OMP_FOR:
8760 *handled_ops_p = true;
8761 if (gimple_omp_for_pre_body (stmt))
8762 walk_gimple_seq (gimple_omp_for_pre_body (stmt),
8763 omp_find_stores_stmt, omp_find_stores_op, wi);
8764 break;
8765 case GIMPLE_OMP_PARALLEL:
8766 case GIMPLE_OMP_TASK:
8767 case GIMPLE_OMP_SECTIONS:
8768 case GIMPLE_OMP_SINGLE:
8769 case GIMPLE_OMP_TARGET:
8770 case GIMPLE_OMP_TEAMS:
8771 case GIMPLE_OMP_CRITICAL:
8772 *handled_ops_p = true;
8773 break;
8774 default:
8775 break;
8776 }
8777 return NULL_TREE;
8778 }
8779
8780 struct gimplify_adjust_omp_clauses_data
8781 {
8782 tree *list_p;
8783 gimple_seq *pre_p;
8784 };
8785
8786 /* For all variables that were not actually used within the context,
8787 remove PRIVATE, SHARED, and FIRSTPRIVATE clauses. */
8788
8789 static int
gimplify_adjust_omp_clauses_1(splay_tree_node n,void * data)8790 gimplify_adjust_omp_clauses_1 (splay_tree_node n, void *data)
8791 {
8792 tree *list_p = ((struct gimplify_adjust_omp_clauses_data *) data)->list_p;
8793 gimple_seq *pre_p
8794 = ((struct gimplify_adjust_omp_clauses_data *) data)->pre_p;
8795 tree decl = (tree) n->key;
8796 unsigned flags = n->value;
8797 enum omp_clause_code code;
8798 tree clause;
8799 bool private_debug;
8800
8801 if (flags & (GOVD_EXPLICIT | GOVD_LOCAL))
8802 return 0;
8803 if ((flags & GOVD_SEEN) == 0)
8804 return 0;
8805 if (flags & GOVD_DEBUG_PRIVATE)
8806 {
8807 gcc_assert ((flags & GOVD_DATA_SHARE_CLASS) == GOVD_SHARED);
8808 private_debug = true;
8809 }
8810 else if (flags & GOVD_MAP)
8811 private_debug = false;
8812 else
8813 private_debug
8814 = lang_hooks.decls.omp_private_debug_clause (decl,
8815 !!(flags & GOVD_SHARED));
8816 if (private_debug)
8817 code = OMP_CLAUSE_PRIVATE;
8818 else if (flags & GOVD_MAP)
8819 {
8820 code = OMP_CLAUSE_MAP;
8821 if ((gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8822 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8823 {
8824 error ("%<_Atomic%> %qD in implicit %<map%> clause", decl);
8825 return 0;
8826 }
8827 }
8828 else if (flags & GOVD_SHARED)
8829 {
8830 if (is_global_var (decl))
8831 {
8832 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8833 while (ctx != NULL)
8834 {
8835 splay_tree_node on
8836 = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
8837 if (on && (on->value & (GOVD_FIRSTPRIVATE | GOVD_LASTPRIVATE
8838 | GOVD_PRIVATE | GOVD_REDUCTION
8839 | GOVD_LINEAR | GOVD_MAP)) != 0)
8840 break;
8841 ctx = ctx->outer_context;
8842 }
8843 if (ctx == NULL)
8844 return 0;
8845 }
8846 code = OMP_CLAUSE_SHARED;
8847 }
8848 else if (flags & GOVD_PRIVATE)
8849 code = OMP_CLAUSE_PRIVATE;
8850 else if (flags & GOVD_FIRSTPRIVATE)
8851 {
8852 code = OMP_CLAUSE_FIRSTPRIVATE;
8853 if ((gimplify_omp_ctxp->region_type & ORT_TARGET)
8854 && (gimplify_omp_ctxp->region_type & ORT_ACC) == 0
8855 && TYPE_ATOMIC (strip_array_types (TREE_TYPE (decl))))
8856 {
8857 error ("%<_Atomic%> %qD in implicit %<firstprivate%> clause on "
8858 "%<target%> construct", decl);
8859 return 0;
8860 }
8861 }
8862 else if (flags & GOVD_LASTPRIVATE)
8863 code = OMP_CLAUSE_LASTPRIVATE;
8864 else if (flags & GOVD_ALIGNED)
8865 return 0;
8866 else
8867 gcc_unreachable ();
8868
8869 if (((flags & GOVD_LASTPRIVATE)
8870 || (code == OMP_CLAUSE_SHARED && (flags & GOVD_WRITTEN)))
8871 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8872 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
8873
8874 tree chain = *list_p;
8875 clause = build_omp_clause (input_location, code);
8876 OMP_CLAUSE_DECL (clause) = decl;
8877 OMP_CLAUSE_CHAIN (clause) = chain;
8878 if (private_debug)
8879 OMP_CLAUSE_PRIVATE_DEBUG (clause) = 1;
8880 else if (code == OMP_CLAUSE_PRIVATE && (flags & GOVD_PRIVATE_OUTER_REF))
8881 OMP_CLAUSE_PRIVATE_OUTER_REF (clause) = 1;
8882 else if (code == OMP_CLAUSE_SHARED
8883 && (flags & GOVD_WRITTEN) == 0
8884 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
8885 OMP_CLAUSE_SHARED_READONLY (clause) = 1;
8886 else if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_EXPLICIT) == 0)
8887 OMP_CLAUSE_FIRSTPRIVATE_IMPLICIT (clause) = 1;
8888 else if (code == OMP_CLAUSE_MAP && (flags & GOVD_MAP_0LEN_ARRAY) != 0)
8889 {
8890 tree nc = build_omp_clause (input_location, OMP_CLAUSE_MAP);
8891 OMP_CLAUSE_DECL (nc) = decl;
8892 if (TREE_CODE (TREE_TYPE (decl)) == REFERENCE_TYPE
8893 && TREE_CODE (TREE_TYPE (TREE_TYPE (decl))) == POINTER_TYPE)
8894 OMP_CLAUSE_DECL (clause)
8895 = build_simple_mem_ref_loc (input_location, decl);
8896 OMP_CLAUSE_DECL (clause)
8897 = build2 (MEM_REF, char_type_node, OMP_CLAUSE_DECL (clause),
8898 build_int_cst (build_pointer_type (char_type_node), 0));
8899 OMP_CLAUSE_SIZE (clause) = size_zero_node;
8900 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8901 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_ALLOC);
8902 OMP_CLAUSE_MAP_MAYBE_ZERO_LENGTH_ARRAY_SECTION (clause) = 1;
8903 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8904 OMP_CLAUSE_CHAIN (nc) = chain;
8905 OMP_CLAUSE_CHAIN (clause) = nc;
8906 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8907 gimplify_omp_ctxp = ctx->outer_context;
8908 gimplify_expr (&TREE_OPERAND (OMP_CLAUSE_DECL (clause), 0),
8909 pre_p, NULL, is_gimple_val, fb_rvalue);
8910 gimplify_omp_ctxp = ctx;
8911 }
8912 else if (code == OMP_CLAUSE_MAP)
8913 {
8914 int kind;
8915 /* Not all combinations of these GOVD_MAP flags are actually valid. */
8916 switch (flags & (GOVD_MAP_TO_ONLY
8917 | GOVD_MAP_FORCE
8918 | GOVD_MAP_FORCE_PRESENT))
8919 {
8920 case 0:
8921 kind = GOMP_MAP_TOFROM;
8922 break;
8923 case GOVD_MAP_FORCE:
8924 kind = GOMP_MAP_TOFROM | GOMP_MAP_FLAG_FORCE;
8925 break;
8926 case GOVD_MAP_TO_ONLY:
8927 kind = GOMP_MAP_TO;
8928 break;
8929 case GOVD_MAP_TO_ONLY | GOVD_MAP_FORCE:
8930 kind = GOMP_MAP_TO | GOMP_MAP_FLAG_FORCE;
8931 break;
8932 case GOVD_MAP_FORCE_PRESENT:
8933 kind = GOMP_MAP_FORCE_PRESENT;
8934 break;
8935 default:
8936 gcc_unreachable ();
8937 }
8938 OMP_CLAUSE_SET_MAP_KIND (clause, kind);
8939 if (DECL_SIZE (decl)
8940 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
8941 {
8942 tree decl2 = DECL_VALUE_EXPR (decl);
8943 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
8944 decl2 = TREE_OPERAND (decl2, 0);
8945 gcc_assert (DECL_P (decl2));
8946 tree mem = build_simple_mem_ref (decl2);
8947 OMP_CLAUSE_DECL (clause) = mem;
8948 OMP_CLAUSE_SIZE (clause) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
8949 if (gimplify_omp_ctxp->outer_context)
8950 {
8951 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp->outer_context;
8952 omp_notice_variable (ctx, decl2, true);
8953 omp_notice_variable (ctx, OMP_CLAUSE_SIZE (clause), true);
8954 }
8955 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8956 OMP_CLAUSE_MAP);
8957 OMP_CLAUSE_DECL (nc) = decl;
8958 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8959 if (gimplify_omp_ctxp->target_firstprivatize_array_bases)
8960 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_POINTER);
8961 else
8962 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
8963 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8964 OMP_CLAUSE_CHAIN (clause) = nc;
8965 }
8966 else if (gimplify_omp_ctxp->target_firstprivatize_array_bases
8967 && lang_hooks.decls.omp_privatize_by_reference (decl))
8968 {
8969 OMP_CLAUSE_DECL (clause) = build_simple_mem_ref (decl);
8970 OMP_CLAUSE_SIZE (clause)
8971 = unshare_expr (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (decl))));
8972 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8973 gimplify_omp_ctxp = ctx->outer_context;
8974 gimplify_expr (&OMP_CLAUSE_SIZE (clause),
8975 pre_p, NULL, is_gimple_val, fb_rvalue);
8976 gimplify_omp_ctxp = ctx;
8977 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (clause),
8978 OMP_CLAUSE_MAP);
8979 OMP_CLAUSE_DECL (nc) = decl;
8980 OMP_CLAUSE_SIZE (nc) = size_zero_node;
8981 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_FIRSTPRIVATE_REFERENCE);
8982 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (clause);
8983 OMP_CLAUSE_CHAIN (clause) = nc;
8984 }
8985 else
8986 OMP_CLAUSE_SIZE (clause) = DECL_SIZE_UNIT (decl);
8987 }
8988 if (code == OMP_CLAUSE_FIRSTPRIVATE && (flags & GOVD_LASTPRIVATE) != 0)
8989 {
8990 tree nc = build_omp_clause (input_location, OMP_CLAUSE_LASTPRIVATE);
8991 OMP_CLAUSE_DECL (nc) = decl;
8992 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (nc) = 1;
8993 OMP_CLAUSE_CHAIN (nc) = chain;
8994 OMP_CLAUSE_CHAIN (clause) = nc;
8995 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
8996 gimplify_omp_ctxp = ctx->outer_context;
8997 lang_hooks.decls.omp_finish_clause (nc, pre_p);
8998 gimplify_omp_ctxp = ctx;
8999 }
9000 *list_p = clause;
9001 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9002 gimplify_omp_ctxp = ctx->outer_context;
9003 lang_hooks.decls.omp_finish_clause (clause, pre_p);
9004 if (gimplify_omp_ctxp)
9005 for (; clause != chain; clause = OMP_CLAUSE_CHAIN (clause))
9006 if (OMP_CLAUSE_CODE (clause) == OMP_CLAUSE_MAP
9007 && DECL_P (OMP_CLAUSE_SIZE (clause)))
9008 omp_notice_variable (gimplify_omp_ctxp, OMP_CLAUSE_SIZE (clause),
9009 true);
9010 gimplify_omp_ctxp = ctx;
9011 return 0;
9012 }
9013
9014 static void
gimplify_adjust_omp_clauses(gimple_seq * pre_p,gimple_seq body,tree * list_p,enum tree_code code)9015 gimplify_adjust_omp_clauses (gimple_seq *pre_p, gimple_seq body, tree *list_p,
9016 enum tree_code code)
9017 {
9018 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
9019 tree c, decl;
9020
9021 if (body)
9022 {
9023 struct gimplify_omp_ctx *octx;
9024 for (octx = ctx; octx; octx = octx->outer_context)
9025 if ((octx->region_type & (ORT_PARALLEL | ORT_TASK | ORT_TEAMS)) != 0)
9026 break;
9027 if (octx)
9028 {
9029 struct walk_stmt_info wi;
9030 memset (&wi, 0, sizeof (wi));
9031 walk_gimple_seq (body, omp_find_stores_stmt,
9032 omp_find_stores_op, &wi);
9033 }
9034 }
9035
9036 if (ctx->add_safelen1)
9037 {
9038 /* If there are VLAs in the body of simd loop, prevent
9039 vectorization. */
9040 gcc_assert (ctx->region_type == ORT_SIMD);
9041 c = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_SAFELEN);
9042 OMP_CLAUSE_SAFELEN_EXPR (c) = integer_one_node;
9043 OMP_CLAUSE_CHAIN (c) = *list_p;
9044 *list_p = c;
9045 list_p = &OMP_CLAUSE_CHAIN (c);
9046 }
9047
9048 while ((c = *list_p) != NULL)
9049 {
9050 splay_tree_node n;
9051 bool remove = false;
9052
9053 switch (OMP_CLAUSE_CODE (c))
9054 {
9055 case OMP_CLAUSE_FIRSTPRIVATE:
9056 if ((ctx->region_type & ORT_TARGET)
9057 && (ctx->region_type & ORT_ACC) == 0
9058 && TYPE_ATOMIC (strip_array_types
9059 (TREE_TYPE (OMP_CLAUSE_DECL (c)))))
9060 {
9061 error_at (OMP_CLAUSE_LOCATION (c),
9062 "%<_Atomic%> %qD in %<firstprivate%> clause on "
9063 "%<target%> construct", OMP_CLAUSE_DECL (c));
9064 remove = true;
9065 break;
9066 }
9067 /* FALLTHRU */
9068 case OMP_CLAUSE_PRIVATE:
9069 case OMP_CLAUSE_SHARED:
9070 case OMP_CLAUSE_LINEAR:
9071 decl = OMP_CLAUSE_DECL (c);
9072 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9073 remove = !(n->value & GOVD_SEEN);
9074 if (! remove)
9075 {
9076 bool shared = OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED;
9077 if ((n->value & GOVD_DEBUG_PRIVATE)
9078 || lang_hooks.decls.omp_private_debug_clause (decl, shared))
9079 {
9080 gcc_assert ((n->value & GOVD_DEBUG_PRIVATE) == 0
9081 || ((n->value & GOVD_DATA_SHARE_CLASS)
9082 == GOVD_SHARED));
9083 OMP_CLAUSE_SET_CODE (c, OMP_CLAUSE_PRIVATE);
9084 OMP_CLAUSE_PRIVATE_DEBUG (c) = 1;
9085 }
9086 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9087 && (n->value & GOVD_WRITTEN) == 0
9088 && DECL_P (decl)
9089 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9090 OMP_CLAUSE_SHARED_READONLY (c) = 1;
9091 else if (DECL_P (decl)
9092 && ((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_SHARED
9093 && (n->value & GOVD_WRITTEN) != 0)
9094 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9095 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)))
9096 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9097 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9098 }
9099 break;
9100
9101 case OMP_CLAUSE_LASTPRIVATE:
9102 /* Make sure OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE is set to
9103 accurately reflect the presence of a FIRSTPRIVATE clause. */
9104 decl = OMP_CLAUSE_DECL (c);
9105 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9106 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c)
9107 = (n->value & GOVD_FIRSTPRIVATE) != 0;
9108 if (code == OMP_DISTRIBUTE
9109 && OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
9110 {
9111 remove = true;
9112 error_at (OMP_CLAUSE_LOCATION (c),
9113 "same variable used in %<firstprivate%> and "
9114 "%<lastprivate%> clauses on %<distribute%> "
9115 "construct");
9116 }
9117 if (!remove
9118 && OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
9119 && DECL_P (decl)
9120 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9121 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9122 break;
9123
9124 case OMP_CLAUSE_ALIGNED:
9125 decl = OMP_CLAUSE_DECL (c);
9126 if (!is_global_var (decl))
9127 {
9128 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9129 remove = n == NULL || !(n->value & GOVD_SEEN);
9130 if (!remove && TREE_CODE (TREE_TYPE (decl)) == POINTER_TYPE)
9131 {
9132 struct gimplify_omp_ctx *octx;
9133 if (n != NULL
9134 && (n->value & (GOVD_DATA_SHARE_CLASS
9135 & ~GOVD_FIRSTPRIVATE)))
9136 remove = true;
9137 else
9138 for (octx = ctx->outer_context; octx;
9139 octx = octx->outer_context)
9140 {
9141 n = splay_tree_lookup (octx->variables,
9142 (splay_tree_key) decl);
9143 if (n == NULL)
9144 continue;
9145 if (n->value & GOVD_LOCAL)
9146 break;
9147 /* We have to avoid assigning a shared variable
9148 to itself when trying to add
9149 __builtin_assume_aligned. */
9150 if (n->value & GOVD_SHARED)
9151 {
9152 remove = true;
9153 break;
9154 }
9155 }
9156 }
9157 }
9158 else if (TREE_CODE (TREE_TYPE (decl)) == ARRAY_TYPE)
9159 {
9160 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9161 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9162 remove = true;
9163 }
9164 break;
9165
9166 case OMP_CLAUSE_MAP:
9167 if (code == OMP_TARGET_EXIT_DATA
9168 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_ALWAYS_POINTER)
9169 {
9170 remove = true;
9171 break;
9172 }
9173 decl = OMP_CLAUSE_DECL (c);
9174 /* Data clauses associated with acc parallel reductions must be
9175 compatible with present_or_copy. Warn and adjust the clause
9176 if that is not the case. */
9177 if (ctx->region_type == ORT_ACC_PARALLEL)
9178 {
9179 tree t = DECL_P (decl) ? decl : TREE_OPERAND (decl, 0);
9180 n = NULL;
9181
9182 if (DECL_P (t))
9183 n = splay_tree_lookup (ctx->variables, (splay_tree_key) t);
9184
9185 if (n && (n->value & GOVD_REDUCTION))
9186 {
9187 enum gomp_map_kind kind = OMP_CLAUSE_MAP_KIND (c);
9188
9189 OMP_CLAUSE_MAP_IN_REDUCTION (c) = 1;
9190 if ((kind & GOMP_MAP_TOFROM) != GOMP_MAP_TOFROM
9191 && kind != GOMP_MAP_FORCE_PRESENT
9192 && kind != GOMP_MAP_POINTER)
9193 {
9194 warning_at (OMP_CLAUSE_LOCATION (c), 0,
9195 "incompatible data clause with reduction "
9196 "on %qE; promoting to present_or_copy",
9197 DECL_NAME (t));
9198 OMP_CLAUSE_SET_MAP_KIND (c, GOMP_MAP_TOFROM);
9199 }
9200 }
9201 }
9202 if (!DECL_P (decl))
9203 {
9204 if ((ctx->region_type & ORT_TARGET) != 0
9205 && OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_FIRSTPRIVATE_POINTER)
9206 {
9207 if (TREE_CODE (decl) == INDIRECT_REF
9208 && TREE_CODE (TREE_OPERAND (decl, 0)) == COMPONENT_REF
9209 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (decl, 0)))
9210 == REFERENCE_TYPE))
9211 decl = TREE_OPERAND (decl, 0);
9212 if (TREE_CODE (decl) == COMPONENT_REF)
9213 {
9214 while (TREE_CODE (decl) == COMPONENT_REF)
9215 decl = TREE_OPERAND (decl, 0);
9216 if (DECL_P (decl))
9217 {
9218 n = splay_tree_lookup (ctx->variables,
9219 (splay_tree_key) decl);
9220 if (!(n->value & GOVD_SEEN))
9221 remove = true;
9222 }
9223 }
9224 }
9225 break;
9226 }
9227 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9228 if ((ctx->region_type & ORT_TARGET) != 0
9229 && !(n->value & GOVD_SEEN)
9230 && GOMP_MAP_ALWAYS_P (OMP_CLAUSE_MAP_KIND (c)) == 0
9231 && (!is_global_var (decl)
9232 || !lookup_attribute ("omp declare target link",
9233 DECL_ATTRIBUTES (decl))))
9234 {
9235 remove = true;
9236 /* For struct element mapping, if struct is never referenced
9237 in target block and none of the mapping has always modifier,
9238 remove all the struct element mappings, which immediately
9239 follow the GOMP_MAP_STRUCT map clause. */
9240 if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT)
9241 {
9242 HOST_WIDE_INT cnt = tree_to_shwi (OMP_CLAUSE_SIZE (c));
9243 while (cnt--)
9244 OMP_CLAUSE_CHAIN (c)
9245 = OMP_CLAUSE_CHAIN (OMP_CLAUSE_CHAIN (c));
9246 }
9247 }
9248 else if (OMP_CLAUSE_MAP_KIND (c) == GOMP_MAP_STRUCT
9249 && code == OMP_TARGET_EXIT_DATA)
9250 remove = true;
9251 else if (DECL_SIZE (decl)
9252 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST
9253 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_POINTER
9254 && OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FIRSTPRIVATE_POINTER
9255 && (OMP_CLAUSE_MAP_KIND (c)
9256 != GOMP_MAP_FIRSTPRIVATE_REFERENCE))
9257 {
9258 /* For GOMP_MAP_FORCE_DEVICEPTR, we'll never enter here, because
9259 for these, TREE_CODE (DECL_SIZE (decl)) will always be
9260 INTEGER_CST. */
9261 gcc_assert (OMP_CLAUSE_MAP_KIND (c) != GOMP_MAP_FORCE_DEVICEPTR);
9262
9263 tree decl2 = DECL_VALUE_EXPR (decl);
9264 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9265 decl2 = TREE_OPERAND (decl2, 0);
9266 gcc_assert (DECL_P (decl2));
9267 tree mem = build_simple_mem_ref (decl2);
9268 OMP_CLAUSE_DECL (c) = mem;
9269 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9270 if (ctx->outer_context)
9271 {
9272 omp_notice_variable (ctx->outer_context, decl2, true);
9273 omp_notice_variable (ctx->outer_context,
9274 OMP_CLAUSE_SIZE (c), true);
9275 }
9276 if (((ctx->region_type & ORT_TARGET) != 0
9277 || !ctx->target_firstprivatize_array_bases)
9278 && ((n->value & GOVD_SEEN) == 0
9279 || (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE)) == 0))
9280 {
9281 tree nc = build_omp_clause (OMP_CLAUSE_LOCATION (c),
9282 OMP_CLAUSE_MAP);
9283 OMP_CLAUSE_DECL (nc) = decl;
9284 OMP_CLAUSE_SIZE (nc) = size_zero_node;
9285 if (ctx->target_firstprivatize_array_bases)
9286 OMP_CLAUSE_SET_MAP_KIND (nc,
9287 GOMP_MAP_FIRSTPRIVATE_POINTER);
9288 else
9289 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_POINTER);
9290 OMP_CLAUSE_CHAIN (nc) = OMP_CLAUSE_CHAIN (c);
9291 OMP_CLAUSE_CHAIN (c) = nc;
9292 c = nc;
9293 }
9294 }
9295 else
9296 {
9297 if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9298 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9299 gcc_assert ((n->value & GOVD_SEEN) == 0
9300 || ((n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9301 == 0));
9302 }
9303 break;
9304
9305 case OMP_CLAUSE_TO:
9306 case OMP_CLAUSE_FROM:
9307 case OMP_CLAUSE__CACHE_:
9308 decl = OMP_CLAUSE_DECL (c);
9309 if (!DECL_P (decl))
9310 break;
9311 if (DECL_SIZE (decl)
9312 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
9313 {
9314 tree decl2 = DECL_VALUE_EXPR (decl);
9315 gcc_assert (TREE_CODE (decl2) == INDIRECT_REF);
9316 decl2 = TREE_OPERAND (decl2, 0);
9317 gcc_assert (DECL_P (decl2));
9318 tree mem = build_simple_mem_ref (decl2);
9319 OMP_CLAUSE_DECL (c) = mem;
9320 OMP_CLAUSE_SIZE (c) = TYPE_SIZE_UNIT (TREE_TYPE (decl));
9321 if (ctx->outer_context)
9322 {
9323 omp_notice_variable (ctx->outer_context, decl2, true);
9324 omp_notice_variable (ctx->outer_context,
9325 OMP_CLAUSE_SIZE (c), true);
9326 }
9327 }
9328 else if (OMP_CLAUSE_SIZE (c) == NULL_TREE)
9329 OMP_CLAUSE_SIZE (c) = DECL_SIZE_UNIT (decl);
9330 break;
9331
9332 case OMP_CLAUSE_REDUCTION:
9333 decl = OMP_CLAUSE_DECL (c);
9334 /* OpenACC reductions need a present_or_copy data clause.
9335 Add one if necessary. Error is the reduction is private. */
9336 if (ctx->region_type == ORT_ACC_PARALLEL)
9337 {
9338 n = splay_tree_lookup (ctx->variables, (splay_tree_key) decl);
9339 if (n->value & (GOVD_PRIVATE | GOVD_FIRSTPRIVATE))
9340 error_at (OMP_CLAUSE_LOCATION (c), "invalid private "
9341 "reduction on %qE", DECL_NAME (decl));
9342 else if ((n->value & GOVD_MAP) == 0)
9343 {
9344 tree next = OMP_CLAUSE_CHAIN (c);
9345 tree nc = build_omp_clause (UNKNOWN_LOCATION, OMP_CLAUSE_MAP);
9346 OMP_CLAUSE_SET_MAP_KIND (nc, GOMP_MAP_TOFROM);
9347 OMP_CLAUSE_DECL (nc) = decl;
9348 OMP_CLAUSE_CHAIN (c) = nc;
9349 lang_hooks.decls.omp_finish_clause (nc, pre_p);
9350 while (1)
9351 {
9352 OMP_CLAUSE_MAP_IN_REDUCTION (nc) = 1;
9353 if (OMP_CLAUSE_CHAIN (nc) == NULL)
9354 break;
9355 nc = OMP_CLAUSE_CHAIN (nc);
9356 }
9357 OMP_CLAUSE_CHAIN (nc) = next;
9358 n->value |= GOVD_MAP;
9359 }
9360 }
9361 if (DECL_P (decl)
9362 && omp_shared_to_firstprivate_optimizable_decl_p (decl))
9363 omp_mark_stores (gimplify_omp_ctxp->outer_context, decl);
9364 break;
9365 case OMP_CLAUSE_COPYIN:
9366 case OMP_CLAUSE_COPYPRIVATE:
9367 case OMP_CLAUSE_IF:
9368 case OMP_CLAUSE_NUM_THREADS:
9369 case OMP_CLAUSE_NUM_TEAMS:
9370 case OMP_CLAUSE_THREAD_LIMIT:
9371 case OMP_CLAUSE_DIST_SCHEDULE:
9372 case OMP_CLAUSE_DEVICE:
9373 case OMP_CLAUSE_SCHEDULE:
9374 case OMP_CLAUSE_NOWAIT:
9375 case OMP_CLAUSE_ORDERED:
9376 case OMP_CLAUSE_DEFAULT:
9377 case OMP_CLAUSE_UNTIED:
9378 case OMP_CLAUSE_COLLAPSE:
9379 case OMP_CLAUSE_FINAL:
9380 case OMP_CLAUSE_MERGEABLE:
9381 case OMP_CLAUSE_PROC_BIND:
9382 case OMP_CLAUSE_SAFELEN:
9383 case OMP_CLAUSE_SIMDLEN:
9384 case OMP_CLAUSE_DEPEND:
9385 case OMP_CLAUSE_PRIORITY:
9386 case OMP_CLAUSE_GRAINSIZE:
9387 case OMP_CLAUSE_NUM_TASKS:
9388 case OMP_CLAUSE_NOGROUP:
9389 case OMP_CLAUSE_THREADS:
9390 case OMP_CLAUSE_SIMD:
9391 case OMP_CLAUSE_HINT:
9392 case OMP_CLAUSE_DEFAULTMAP:
9393 case OMP_CLAUSE_USE_DEVICE_PTR:
9394 case OMP_CLAUSE_IS_DEVICE_PTR:
9395 case OMP_CLAUSE_ASYNC:
9396 case OMP_CLAUSE_WAIT:
9397 case OMP_CLAUSE_INDEPENDENT:
9398 case OMP_CLAUSE_NUM_GANGS:
9399 case OMP_CLAUSE_NUM_WORKERS:
9400 case OMP_CLAUSE_VECTOR_LENGTH:
9401 case OMP_CLAUSE_GANG:
9402 case OMP_CLAUSE_WORKER:
9403 case OMP_CLAUSE_VECTOR:
9404 case OMP_CLAUSE_AUTO:
9405 case OMP_CLAUSE_SEQ:
9406 case OMP_CLAUSE_TILE:
9407 break;
9408
9409 default:
9410 gcc_unreachable ();
9411 }
9412
9413 if (remove)
9414 *list_p = OMP_CLAUSE_CHAIN (c);
9415 else
9416 list_p = &OMP_CLAUSE_CHAIN (c);
9417 }
9418
9419 /* Add in any implicit data sharing. */
9420 struct gimplify_adjust_omp_clauses_data data;
9421 data.list_p = list_p;
9422 data.pre_p = pre_p;
9423 splay_tree_foreach (ctx->variables, gimplify_adjust_omp_clauses_1, &data);
9424
9425 gimplify_omp_ctxp = ctx->outer_context;
9426 delete_omp_context (ctx);
9427 }
9428
9429 /* Gimplify OACC_CACHE. */
9430
9431 static void
gimplify_oacc_cache(tree * expr_p,gimple_seq * pre_p)9432 gimplify_oacc_cache (tree *expr_p, gimple_seq *pre_p)
9433 {
9434 tree expr = *expr_p;
9435
9436 gimplify_scan_omp_clauses (&OACC_CACHE_CLAUSES (expr), pre_p, ORT_ACC,
9437 OACC_CACHE);
9438 gimplify_adjust_omp_clauses (pre_p, NULL, &OACC_CACHE_CLAUSES (expr),
9439 OACC_CACHE);
9440
9441 /* TODO: Do something sensible with this information. */
9442
9443 *expr_p = NULL_TREE;
9444 }
9445
9446 /* Helper function of gimplify_oacc_declare. The helper's purpose is to,
9447 if required, translate 'kind' in CLAUSE into an 'entry' kind and 'exit'
9448 kind. The entry kind will replace the one in CLAUSE, while the exit
9449 kind will be used in a new omp_clause and returned to the caller. */
9450
9451 static tree
gimplify_oacc_declare_1(tree clause)9452 gimplify_oacc_declare_1 (tree clause)
9453 {
9454 HOST_WIDE_INT kind, new_op;
9455 bool ret = false;
9456 tree c = NULL;
9457
9458 kind = OMP_CLAUSE_MAP_KIND (clause);
9459
9460 switch (kind)
9461 {
9462 case GOMP_MAP_ALLOC:
9463 case GOMP_MAP_FORCE_ALLOC:
9464 case GOMP_MAP_FORCE_TO:
9465 new_op = GOMP_MAP_DELETE;
9466 ret = true;
9467 break;
9468
9469 case GOMP_MAP_FORCE_FROM:
9470 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9471 new_op = GOMP_MAP_FORCE_FROM;
9472 ret = true;
9473 break;
9474
9475 case GOMP_MAP_FORCE_TOFROM:
9476 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_TO);
9477 new_op = GOMP_MAP_FORCE_FROM;
9478 ret = true;
9479 break;
9480
9481 case GOMP_MAP_FROM:
9482 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_FORCE_ALLOC);
9483 new_op = GOMP_MAP_FROM;
9484 ret = true;
9485 break;
9486
9487 case GOMP_MAP_TOFROM:
9488 OMP_CLAUSE_SET_MAP_KIND (clause, GOMP_MAP_TO);
9489 new_op = GOMP_MAP_FROM;
9490 ret = true;
9491 break;
9492
9493 case GOMP_MAP_DEVICE_RESIDENT:
9494 case GOMP_MAP_FORCE_DEVICEPTR:
9495 case GOMP_MAP_FORCE_PRESENT:
9496 case GOMP_MAP_LINK:
9497 case GOMP_MAP_POINTER:
9498 case GOMP_MAP_TO:
9499 break;
9500
9501 default:
9502 gcc_unreachable ();
9503 break;
9504 }
9505
9506 if (ret)
9507 {
9508 c = build_omp_clause (OMP_CLAUSE_LOCATION (clause), OMP_CLAUSE_MAP);
9509 OMP_CLAUSE_SET_MAP_KIND (c, new_op);
9510 OMP_CLAUSE_DECL (c) = OMP_CLAUSE_DECL (clause);
9511 }
9512
9513 return c;
9514 }
9515
9516 /* Gimplify OACC_DECLARE. */
9517
9518 static void
gimplify_oacc_declare(tree * expr_p,gimple_seq * pre_p)9519 gimplify_oacc_declare (tree *expr_p, gimple_seq *pre_p)
9520 {
9521 tree expr = *expr_p;
9522 gomp_target *stmt;
9523 tree clauses, t, decl;
9524
9525 clauses = OACC_DECLARE_CLAUSES (expr);
9526
9527 gimplify_scan_omp_clauses (&clauses, pre_p, ORT_TARGET_DATA, OACC_DECLARE);
9528 gimplify_adjust_omp_clauses (pre_p, NULL, &clauses, OACC_DECLARE);
9529
9530 for (t = clauses; t; t = OMP_CLAUSE_CHAIN (t))
9531 {
9532 decl = OMP_CLAUSE_DECL (t);
9533
9534 if (TREE_CODE (decl) == MEM_REF)
9535 decl = TREE_OPERAND (decl, 0);
9536
9537 if (VAR_P (decl) && !is_oacc_declared (decl))
9538 {
9539 tree attr = get_identifier ("oacc declare target");
9540 DECL_ATTRIBUTES (decl) = tree_cons (attr, NULL_TREE,
9541 DECL_ATTRIBUTES (decl));
9542 }
9543
9544 if (VAR_P (decl)
9545 && !is_global_var (decl)
9546 && DECL_CONTEXT (decl) == current_function_decl)
9547 {
9548 tree c = gimplify_oacc_declare_1 (t);
9549 if (c)
9550 {
9551 if (oacc_declare_returns == NULL)
9552 oacc_declare_returns = new hash_map<tree, tree>;
9553
9554 oacc_declare_returns->put (decl, c);
9555 }
9556 }
9557
9558 if (gimplify_omp_ctxp)
9559 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_SEEN);
9560 }
9561
9562 stmt = gimple_build_omp_target (NULL, GF_OMP_TARGET_KIND_OACC_DECLARE,
9563 clauses);
9564
9565 gimplify_seq_add_stmt (pre_p, stmt);
9566
9567 *expr_p = NULL_TREE;
9568 }
9569
9570 /* Gimplify the contents of an OMP_PARALLEL statement. This involves
9571 gimplification of the body, as well as scanning the body for used
9572 variables. We need to do this scan now, because variable-sized
9573 decls will be decomposed during gimplification. */
9574
9575 static void
gimplify_omp_parallel(tree * expr_p,gimple_seq * pre_p)9576 gimplify_omp_parallel (tree *expr_p, gimple_seq *pre_p)
9577 {
9578 tree expr = *expr_p;
9579 gimple *g;
9580 gimple_seq body = NULL;
9581
9582 gimplify_scan_omp_clauses (&OMP_PARALLEL_CLAUSES (expr), pre_p,
9583 OMP_PARALLEL_COMBINED (expr)
9584 ? ORT_COMBINED_PARALLEL
9585 : ORT_PARALLEL, OMP_PARALLEL);
9586
9587 push_gimplify_context ();
9588
9589 g = gimplify_and_return_first (OMP_PARALLEL_BODY (expr), &body);
9590 if (gimple_code (g) == GIMPLE_BIND)
9591 pop_gimplify_context (g);
9592 else
9593 pop_gimplify_context (NULL);
9594
9595 gimplify_adjust_omp_clauses (pre_p, body, &OMP_PARALLEL_CLAUSES (expr),
9596 OMP_PARALLEL);
9597
9598 g = gimple_build_omp_parallel (body,
9599 OMP_PARALLEL_CLAUSES (expr),
9600 NULL_TREE, NULL_TREE);
9601 if (OMP_PARALLEL_COMBINED (expr))
9602 gimple_omp_set_subcode (g, GF_OMP_PARALLEL_COMBINED);
9603 gimplify_seq_add_stmt (pre_p, g);
9604 *expr_p = NULL_TREE;
9605 }
9606
9607 /* Gimplify the contents of an OMP_TASK statement. This involves
9608 gimplification of the body, as well as scanning the body for used
9609 variables. We need to do this scan now, because variable-sized
9610 decls will be decomposed during gimplification. */
9611
9612 static void
gimplify_omp_task(tree * expr_p,gimple_seq * pre_p)9613 gimplify_omp_task (tree *expr_p, gimple_seq *pre_p)
9614 {
9615 tree expr = *expr_p;
9616 gimple *g;
9617 gimple_seq body = NULL;
9618
9619 gimplify_scan_omp_clauses (&OMP_TASK_CLAUSES (expr), pre_p,
9620 omp_find_clause (OMP_TASK_CLAUSES (expr),
9621 OMP_CLAUSE_UNTIED)
9622 ? ORT_UNTIED_TASK : ORT_TASK, OMP_TASK);
9623
9624 push_gimplify_context ();
9625
9626 g = gimplify_and_return_first (OMP_TASK_BODY (expr), &body);
9627 if (gimple_code (g) == GIMPLE_BIND)
9628 pop_gimplify_context (g);
9629 else
9630 pop_gimplify_context (NULL);
9631
9632 gimplify_adjust_omp_clauses (pre_p, body, &OMP_TASK_CLAUSES (expr),
9633 OMP_TASK);
9634
9635 g = gimple_build_omp_task (body,
9636 OMP_TASK_CLAUSES (expr),
9637 NULL_TREE, NULL_TREE,
9638 NULL_TREE, NULL_TREE, NULL_TREE);
9639 gimplify_seq_add_stmt (pre_p, g);
9640 *expr_p = NULL_TREE;
9641 }
9642
9643 /* Helper function of gimplify_omp_for, find OMP_FOR resp. OMP_SIMD
9644 with non-NULL OMP_FOR_INIT. */
9645
9646 static tree
find_combined_omp_for(tree * tp,int * walk_subtrees,void *)9647 find_combined_omp_for (tree *tp, int *walk_subtrees, void *)
9648 {
9649 *walk_subtrees = 0;
9650 switch (TREE_CODE (*tp))
9651 {
9652 case OMP_FOR:
9653 *walk_subtrees = 1;
9654 /* FALLTHRU */
9655 case OMP_SIMD:
9656 if (OMP_FOR_INIT (*tp) != NULL_TREE)
9657 return *tp;
9658 break;
9659 case BIND_EXPR:
9660 case STATEMENT_LIST:
9661 case OMP_PARALLEL:
9662 *walk_subtrees = 1;
9663 break;
9664 default:
9665 break;
9666 }
9667 return NULL_TREE;
9668 }
9669
9670 /* Gimplify the gross structure of an OMP_FOR statement. */
9671
9672 static enum gimplify_status
gimplify_omp_for(tree * expr_p,gimple_seq * pre_p)9673 gimplify_omp_for (tree *expr_p, gimple_seq *pre_p)
9674 {
9675 tree for_stmt, orig_for_stmt, inner_for_stmt = NULL_TREE, decl, var, t;
9676 enum gimplify_status ret = GS_ALL_DONE;
9677 enum gimplify_status tret;
9678 gomp_for *gfor;
9679 gimple_seq for_body, for_pre_body;
9680 int i;
9681 bitmap has_decl_expr = NULL;
9682 enum omp_region_type ort = ORT_WORKSHARE;
9683
9684 orig_for_stmt = for_stmt = *expr_p;
9685
9686 switch (TREE_CODE (for_stmt))
9687 {
9688 case OMP_FOR:
9689 case OMP_DISTRIBUTE:
9690 break;
9691 case OACC_LOOP:
9692 ort = ORT_ACC;
9693 break;
9694 case OMP_TASKLOOP:
9695 if (omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_UNTIED))
9696 ort = ORT_UNTIED_TASK;
9697 else
9698 ort = ORT_TASK;
9699 break;
9700 case OMP_SIMD:
9701 ort = ORT_SIMD;
9702 break;
9703 default:
9704 gcc_unreachable ();
9705 }
9706
9707 /* Set OMP_CLAUSE_LINEAR_NO_COPYIN flag on explicit linear
9708 clause for the IV. */
9709 if (ort == ORT_SIMD && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9710 {
9711 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), 0);
9712 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9713 decl = TREE_OPERAND (t, 0);
9714 for (tree c = OMP_FOR_CLAUSES (for_stmt); c; c = OMP_CLAUSE_CHAIN (c))
9715 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
9716 && OMP_CLAUSE_DECL (c) == decl)
9717 {
9718 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9719 break;
9720 }
9721 }
9722
9723 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9724 {
9725 gcc_assert (TREE_CODE (for_stmt) != OACC_LOOP);
9726 inner_for_stmt = walk_tree (&OMP_FOR_BODY (for_stmt),
9727 find_combined_omp_for, NULL, NULL);
9728 if (inner_for_stmt == NULL_TREE)
9729 {
9730 gcc_assert (seen_error ());
9731 *expr_p = NULL_TREE;
9732 return GS_ERROR;
9733 }
9734 }
9735
9736 if (TREE_CODE (for_stmt) != OMP_TASKLOOP)
9737 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (for_stmt), pre_p, ort,
9738 TREE_CODE (for_stmt));
9739
9740 if (TREE_CODE (for_stmt) == OMP_DISTRIBUTE)
9741 gimplify_omp_ctxp->distribute = true;
9742
9743 /* Handle OMP_FOR_INIT. */
9744 for_pre_body = NULL;
9745 if (ort == ORT_SIMD && OMP_FOR_PRE_BODY (for_stmt))
9746 {
9747 has_decl_expr = BITMAP_ALLOC (NULL);
9748 if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == DECL_EXPR
9749 && TREE_CODE (DECL_EXPR_DECL (OMP_FOR_PRE_BODY (for_stmt)))
9750 == VAR_DECL)
9751 {
9752 t = OMP_FOR_PRE_BODY (for_stmt);
9753 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9754 }
9755 else if (TREE_CODE (OMP_FOR_PRE_BODY (for_stmt)) == STATEMENT_LIST)
9756 {
9757 tree_stmt_iterator si;
9758 for (si = tsi_start (OMP_FOR_PRE_BODY (for_stmt)); !tsi_end_p (si);
9759 tsi_next (&si))
9760 {
9761 t = tsi_stmt (si);
9762 if (TREE_CODE (t) == DECL_EXPR
9763 && TREE_CODE (DECL_EXPR_DECL (t)) == VAR_DECL)
9764 bitmap_set_bit (has_decl_expr, DECL_UID (DECL_EXPR_DECL (t)));
9765 }
9766 }
9767 }
9768 if (OMP_FOR_PRE_BODY (for_stmt))
9769 {
9770 if (TREE_CODE (for_stmt) != OMP_TASKLOOP || gimplify_omp_ctxp)
9771 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9772 else
9773 {
9774 struct gimplify_omp_ctx ctx;
9775 memset (&ctx, 0, sizeof (ctx));
9776 ctx.region_type = ORT_NONE;
9777 gimplify_omp_ctxp = &ctx;
9778 gimplify_and_add (OMP_FOR_PRE_BODY (for_stmt), &for_pre_body);
9779 gimplify_omp_ctxp = NULL;
9780 }
9781 }
9782 OMP_FOR_PRE_BODY (for_stmt) = NULL_TREE;
9783
9784 if (OMP_FOR_INIT (for_stmt) == NULL_TREE)
9785 for_stmt = inner_for_stmt;
9786
9787 /* For taskloop, need to gimplify the start, end and step before the
9788 taskloop, outside of the taskloop omp context. */
9789 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
9790 {
9791 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9792 {
9793 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9794 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9795 {
9796 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
9797 TREE_OPERAND (t, 1)
9798 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9799 gimple_seq_empty_p (for_pre_body)
9800 ? pre_p : &for_pre_body, NULL,
9801 false);
9802 /* Reference to pointer conversion is considered useless,
9803 but is significant for firstprivate clause. Force it
9804 here. */
9805 if (TREE_CODE (type) == POINTER_TYPE
9806 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
9807 == REFERENCE_TYPE))
9808 {
9809 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
9810 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
9811 TREE_OPERAND (t, 1));
9812 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
9813 ? pre_p : &for_pre_body);
9814 TREE_OPERAND (t, 1) = v;
9815 }
9816 tree c = build_omp_clause (input_location,
9817 OMP_CLAUSE_FIRSTPRIVATE);
9818 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9819 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9820 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9821 }
9822
9823 /* Handle OMP_FOR_COND. */
9824 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
9825 if (!is_gimple_constant (TREE_OPERAND (t, 1)))
9826 {
9827 tree type = TREE_TYPE (TREE_OPERAND (t, 0));
9828 TREE_OPERAND (t, 1)
9829 = get_initialized_tmp_var (TREE_OPERAND (t, 1),
9830 gimple_seq_empty_p (for_pre_body)
9831 ? pre_p : &for_pre_body, NULL,
9832 false);
9833 /* Reference to pointer conversion is considered useless,
9834 but is significant for firstprivate clause. Force it
9835 here. */
9836 if (TREE_CODE (type) == POINTER_TYPE
9837 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (t, 1)))
9838 == REFERENCE_TYPE))
9839 {
9840 tree v = create_tmp_var (TYPE_MAIN_VARIANT (type));
9841 tree m = build2 (INIT_EXPR, TREE_TYPE (v), v,
9842 TREE_OPERAND (t, 1));
9843 gimplify_and_add (m, gimple_seq_empty_p (for_pre_body)
9844 ? pre_p : &for_pre_body);
9845 TREE_OPERAND (t, 1) = v;
9846 }
9847 tree c = build_omp_clause (input_location,
9848 OMP_CLAUSE_FIRSTPRIVATE);
9849 OMP_CLAUSE_DECL (c) = TREE_OPERAND (t, 1);
9850 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9851 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9852 }
9853
9854 /* Handle OMP_FOR_INCR. */
9855 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
9856 if (TREE_CODE (t) == MODIFY_EXPR)
9857 {
9858 decl = TREE_OPERAND (t, 0);
9859 t = TREE_OPERAND (t, 1);
9860 tree *tp = &TREE_OPERAND (t, 1);
9861 if (TREE_CODE (t) == PLUS_EXPR && *tp == decl)
9862 tp = &TREE_OPERAND (t, 0);
9863
9864 if (!is_gimple_constant (*tp))
9865 {
9866 gimple_seq *seq = gimple_seq_empty_p (for_pre_body)
9867 ? pre_p : &for_pre_body;
9868 *tp = get_initialized_tmp_var (*tp, seq, NULL, false);
9869 tree c = build_omp_clause (input_location,
9870 OMP_CLAUSE_FIRSTPRIVATE);
9871 OMP_CLAUSE_DECL (c) = *tp;
9872 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (orig_for_stmt);
9873 OMP_FOR_CLAUSES (orig_for_stmt) = c;
9874 }
9875 }
9876 }
9877
9878 gimplify_scan_omp_clauses (&OMP_FOR_CLAUSES (orig_for_stmt), pre_p, ort,
9879 OMP_TASKLOOP);
9880 }
9881
9882 if (orig_for_stmt != for_stmt)
9883 gimplify_omp_ctxp->combined_loop = true;
9884
9885 for_body = NULL;
9886 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9887 == TREE_VEC_LENGTH (OMP_FOR_COND (for_stmt)));
9888 gcc_assert (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9889 == TREE_VEC_LENGTH (OMP_FOR_INCR (for_stmt)));
9890
9891 tree c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_ORDERED);
9892 bool is_doacross = false;
9893 if (c && OMP_CLAUSE_ORDERED_EXPR (c))
9894 {
9895 is_doacross = true;
9896 gimplify_omp_ctxp->loop_iter_var.create (TREE_VEC_LENGTH
9897 (OMP_FOR_INIT (for_stmt))
9898 * 2);
9899 }
9900 int collapse = 1, tile = 0;
9901 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_COLLAPSE);
9902 if (c)
9903 collapse = tree_to_shwi (OMP_CLAUSE_COLLAPSE_EXPR (c));
9904 c = omp_find_clause (OMP_FOR_CLAUSES (for_stmt), OMP_CLAUSE_TILE);
9905 if (c)
9906 tile = list_length (OMP_CLAUSE_TILE_LIST (c));
9907 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
9908 {
9909 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
9910 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
9911 decl = TREE_OPERAND (t, 0);
9912 gcc_assert (DECL_P (decl));
9913 gcc_assert (INTEGRAL_TYPE_P (TREE_TYPE (decl))
9914 || POINTER_TYPE_P (TREE_TYPE (decl)));
9915 if (is_doacross)
9916 {
9917 if (TREE_CODE (for_stmt) == OMP_FOR && OMP_FOR_ORIG_DECLS (for_stmt))
9918 gimplify_omp_ctxp->loop_iter_var.quick_push
9919 (TREE_VEC_ELT (OMP_FOR_ORIG_DECLS (for_stmt), i));
9920 else
9921 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9922 gimplify_omp_ctxp->loop_iter_var.quick_push (decl);
9923 }
9924
9925 /* Make sure the iteration variable is private. */
9926 tree c = NULL_TREE;
9927 tree c2 = NULL_TREE;
9928 if (orig_for_stmt != for_stmt)
9929 /* Do this only on innermost construct for combined ones. */;
9930 else if (ort == ORT_SIMD)
9931 {
9932 splay_tree_node n = splay_tree_lookup (gimplify_omp_ctxp->variables,
9933 (splay_tree_key) decl);
9934 omp_is_private (gimplify_omp_ctxp, decl,
9935 1 + (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt))
9936 != 1));
9937 if (n != NULL && (n->value & GOVD_DATA_SHARE_CLASS) != 0)
9938 omp_notice_variable (gimplify_omp_ctxp, decl, true);
9939 else if (TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
9940 {
9941 c = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
9942 OMP_CLAUSE_LINEAR_NO_COPYIN (c) = 1;
9943 unsigned int flags = GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN;
9944 if (has_decl_expr
9945 && bitmap_bit_p (has_decl_expr, DECL_UID (decl)))
9946 {
9947 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9948 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9949 }
9950 struct gimplify_omp_ctx *outer
9951 = gimplify_omp_ctxp->outer_context;
9952 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9953 {
9954 if (outer->region_type == ORT_WORKSHARE
9955 && outer->combined_loop)
9956 {
9957 n = splay_tree_lookup (outer->variables,
9958 (splay_tree_key)decl);
9959 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9960 {
9961 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9962 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9963 }
9964 else
9965 {
9966 struct gimplify_omp_ctx *octx = outer->outer_context;
9967 if (octx
9968 && octx->region_type == ORT_COMBINED_PARALLEL
9969 && octx->outer_context
9970 && (octx->outer_context->region_type
9971 == ORT_WORKSHARE)
9972 && octx->outer_context->combined_loop)
9973 {
9974 octx = octx->outer_context;
9975 n = splay_tree_lookup (octx->variables,
9976 (splay_tree_key)decl);
9977 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
9978 {
9979 OMP_CLAUSE_LINEAR_NO_COPYOUT (c) = 1;
9980 flags |= GOVD_LINEAR_LASTPRIVATE_NO_OUTER;
9981 }
9982 }
9983 }
9984 }
9985 }
9986
9987 OMP_CLAUSE_DECL (c) = decl;
9988 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
9989 OMP_FOR_CLAUSES (for_stmt) = c;
9990 omp_add_variable (gimplify_omp_ctxp, decl, flags);
9991 if (outer && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c))
9992 {
9993 if (outer->region_type == ORT_WORKSHARE
9994 && outer->combined_loop)
9995 {
9996 if (outer->outer_context
9997 && (outer->outer_context->region_type
9998 == ORT_COMBINED_PARALLEL))
9999 outer = outer->outer_context;
10000 else if (omp_check_private (outer, decl, false))
10001 outer = NULL;
10002 }
10003 else if (((outer->region_type & ORT_TASK) != 0)
10004 && outer->combined_loop
10005 && !omp_check_private (gimplify_omp_ctxp,
10006 decl, false))
10007 ;
10008 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10009 {
10010 omp_notice_variable (outer, decl, true);
10011 outer = NULL;
10012 }
10013 if (outer)
10014 {
10015 n = splay_tree_lookup (outer->variables,
10016 (splay_tree_key)decl);
10017 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10018 {
10019 omp_add_variable (outer, decl,
10020 GOVD_LASTPRIVATE | GOVD_SEEN);
10021 if (outer->region_type == ORT_COMBINED_PARALLEL
10022 && outer->outer_context
10023 && (outer->outer_context->region_type
10024 == ORT_WORKSHARE)
10025 && outer->outer_context->combined_loop)
10026 {
10027 outer = outer->outer_context;
10028 n = splay_tree_lookup (outer->variables,
10029 (splay_tree_key)decl);
10030 if (omp_check_private (outer, decl, false))
10031 outer = NULL;
10032 else if (n == NULL
10033 || ((n->value & GOVD_DATA_SHARE_CLASS)
10034 == 0))
10035 omp_add_variable (outer, decl,
10036 GOVD_LASTPRIVATE
10037 | GOVD_SEEN);
10038 else
10039 outer = NULL;
10040 }
10041 if (outer && outer->outer_context
10042 && (outer->outer_context->region_type
10043 == ORT_COMBINED_TEAMS))
10044 {
10045 outer = outer->outer_context;
10046 n = splay_tree_lookup (outer->variables,
10047 (splay_tree_key)decl);
10048 if (n == NULL
10049 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10050 omp_add_variable (outer, decl,
10051 GOVD_SHARED | GOVD_SEEN);
10052 else
10053 outer = NULL;
10054 }
10055 if (outer && outer->outer_context)
10056 omp_notice_variable (outer->outer_context, decl,
10057 true);
10058 }
10059 }
10060 }
10061 }
10062 else
10063 {
10064 bool lastprivate
10065 = (!has_decl_expr
10066 || !bitmap_bit_p (has_decl_expr, DECL_UID (decl)));
10067 struct gimplify_omp_ctx *outer
10068 = gimplify_omp_ctxp->outer_context;
10069 if (outer && lastprivate)
10070 {
10071 if (outer->region_type == ORT_WORKSHARE
10072 && outer->combined_loop)
10073 {
10074 n = splay_tree_lookup (outer->variables,
10075 (splay_tree_key)decl);
10076 if (n != NULL && (n->value & GOVD_LOCAL) != 0)
10077 {
10078 lastprivate = false;
10079 outer = NULL;
10080 }
10081 else if (outer->outer_context
10082 && (outer->outer_context->region_type
10083 == ORT_COMBINED_PARALLEL))
10084 outer = outer->outer_context;
10085 else if (omp_check_private (outer, decl, false))
10086 outer = NULL;
10087 }
10088 else if (((outer->region_type & ORT_TASK) != 0)
10089 && outer->combined_loop
10090 && !omp_check_private (gimplify_omp_ctxp,
10091 decl, false))
10092 ;
10093 else if (outer->region_type != ORT_COMBINED_PARALLEL)
10094 {
10095 omp_notice_variable (outer, decl, true);
10096 outer = NULL;
10097 }
10098 if (outer)
10099 {
10100 n = splay_tree_lookup (outer->variables,
10101 (splay_tree_key)decl);
10102 if (n == NULL || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10103 {
10104 omp_add_variable (outer, decl,
10105 GOVD_LASTPRIVATE | GOVD_SEEN);
10106 if (outer->region_type == ORT_COMBINED_PARALLEL
10107 && outer->outer_context
10108 && (outer->outer_context->region_type
10109 == ORT_WORKSHARE)
10110 && outer->outer_context->combined_loop)
10111 {
10112 outer = outer->outer_context;
10113 n = splay_tree_lookup (outer->variables,
10114 (splay_tree_key)decl);
10115 if (omp_check_private (outer, decl, false))
10116 outer = NULL;
10117 else if (n == NULL
10118 || ((n->value & GOVD_DATA_SHARE_CLASS)
10119 == 0))
10120 omp_add_variable (outer, decl,
10121 GOVD_LASTPRIVATE
10122 | GOVD_SEEN);
10123 else
10124 outer = NULL;
10125 }
10126 if (outer && outer->outer_context
10127 && (outer->outer_context->region_type
10128 == ORT_COMBINED_TEAMS))
10129 {
10130 outer = outer->outer_context;
10131 n = splay_tree_lookup (outer->variables,
10132 (splay_tree_key)decl);
10133 if (n == NULL
10134 || (n->value & GOVD_DATA_SHARE_CLASS) == 0)
10135 omp_add_variable (outer, decl,
10136 GOVD_SHARED | GOVD_SEEN);
10137 else
10138 outer = NULL;
10139 }
10140 if (outer && outer->outer_context)
10141 omp_notice_variable (outer->outer_context, decl,
10142 true);
10143 }
10144 }
10145 }
10146
10147 c = build_omp_clause (input_location,
10148 lastprivate ? OMP_CLAUSE_LASTPRIVATE
10149 : OMP_CLAUSE_PRIVATE);
10150 OMP_CLAUSE_DECL (c) = decl;
10151 OMP_CLAUSE_CHAIN (c) = OMP_FOR_CLAUSES (for_stmt);
10152 OMP_FOR_CLAUSES (for_stmt) = c;
10153 omp_add_variable (gimplify_omp_ctxp, decl,
10154 (lastprivate ? GOVD_LASTPRIVATE : GOVD_PRIVATE)
10155 | GOVD_EXPLICIT | GOVD_SEEN);
10156 c = NULL_TREE;
10157 }
10158 }
10159 else if (omp_is_private (gimplify_omp_ctxp, decl, 0))
10160 omp_notice_variable (gimplify_omp_ctxp, decl, true);
10161 else
10162 omp_add_variable (gimplify_omp_ctxp, decl, GOVD_PRIVATE | GOVD_SEEN);
10163
10164 /* If DECL is not a gimple register, create a temporary variable to act
10165 as an iteration counter. This is valid, since DECL cannot be
10166 modified in the body of the loop. Similarly for any iteration vars
10167 in simd with collapse > 1 where the iterator vars must be
10168 lastprivate. */
10169 if (orig_for_stmt != for_stmt)
10170 var = decl;
10171 else if (!is_gimple_reg (decl)
10172 || (ort == ORT_SIMD
10173 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) > 1))
10174 {
10175 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10176 /* Make sure omp_add_variable is not called on it prematurely.
10177 We call it ourselves a few lines later. */
10178 gimplify_omp_ctxp = NULL;
10179 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10180 gimplify_omp_ctxp = ctx;
10181 TREE_OPERAND (t, 0) = var;
10182
10183 gimplify_seq_add_stmt (&for_body, gimple_build_assign (decl, var));
10184
10185 if (ort == ORT_SIMD
10186 && TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)) == 1)
10187 {
10188 c2 = build_omp_clause (input_location, OMP_CLAUSE_LINEAR);
10189 OMP_CLAUSE_LINEAR_NO_COPYIN (c2) = 1;
10190 OMP_CLAUSE_LINEAR_NO_COPYOUT (c2) = 1;
10191 OMP_CLAUSE_DECL (c2) = var;
10192 OMP_CLAUSE_CHAIN (c2) = OMP_FOR_CLAUSES (for_stmt);
10193 OMP_FOR_CLAUSES (for_stmt) = c2;
10194 omp_add_variable (gimplify_omp_ctxp, var,
10195 GOVD_LINEAR | GOVD_EXPLICIT | GOVD_SEEN);
10196 if (c == NULL_TREE)
10197 {
10198 c = c2;
10199 c2 = NULL_TREE;
10200 }
10201 }
10202 else
10203 omp_add_variable (gimplify_omp_ctxp, var,
10204 GOVD_PRIVATE | GOVD_SEEN);
10205 }
10206 else
10207 var = decl;
10208
10209 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10210 is_gimple_val, fb_rvalue, false);
10211 ret = MIN (ret, tret);
10212 if (ret == GS_ERROR)
10213 return ret;
10214
10215 /* Handle OMP_FOR_COND. */
10216 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10217 gcc_assert (COMPARISON_CLASS_P (t));
10218 gcc_assert (TREE_OPERAND (t, 0) == decl);
10219
10220 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10221 is_gimple_val, fb_rvalue, false);
10222 ret = MIN (ret, tret);
10223
10224 /* Handle OMP_FOR_INCR. */
10225 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10226 switch (TREE_CODE (t))
10227 {
10228 case PREINCREMENT_EXPR:
10229 case POSTINCREMENT_EXPR:
10230 {
10231 tree decl = TREE_OPERAND (t, 0);
10232 /* c_omp_for_incr_canonicalize_ptr() should have been
10233 called to massage things appropriately. */
10234 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10235
10236 if (orig_for_stmt != for_stmt)
10237 break;
10238 t = build_int_cst (TREE_TYPE (decl), 1);
10239 if (c)
10240 OMP_CLAUSE_LINEAR_STEP (c) = t;
10241 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10242 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10243 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10244 break;
10245 }
10246
10247 case PREDECREMENT_EXPR:
10248 case POSTDECREMENT_EXPR:
10249 /* c_omp_for_incr_canonicalize_ptr() should have been
10250 called to massage things appropriately. */
10251 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (decl)));
10252 if (orig_for_stmt != for_stmt)
10253 break;
10254 t = build_int_cst (TREE_TYPE (decl), -1);
10255 if (c)
10256 OMP_CLAUSE_LINEAR_STEP (c) = t;
10257 t = build2 (PLUS_EXPR, TREE_TYPE (decl), var, t);
10258 t = build2 (MODIFY_EXPR, TREE_TYPE (var), var, t);
10259 TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i) = t;
10260 break;
10261
10262 case MODIFY_EXPR:
10263 gcc_assert (TREE_OPERAND (t, 0) == decl);
10264 TREE_OPERAND (t, 0) = var;
10265
10266 t = TREE_OPERAND (t, 1);
10267 switch (TREE_CODE (t))
10268 {
10269 case PLUS_EXPR:
10270 if (TREE_OPERAND (t, 1) == decl)
10271 {
10272 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 0);
10273 TREE_OPERAND (t, 0) = var;
10274 break;
10275 }
10276
10277 /* Fallthru. */
10278 case MINUS_EXPR:
10279 case POINTER_PLUS_EXPR:
10280 gcc_assert (TREE_OPERAND (t, 0) == decl);
10281 TREE_OPERAND (t, 0) = var;
10282 break;
10283 default:
10284 gcc_unreachable ();
10285 }
10286
10287 tret = gimplify_expr (&TREE_OPERAND (t, 1), &for_pre_body, NULL,
10288 is_gimple_val, fb_rvalue, false);
10289 ret = MIN (ret, tret);
10290 if (c)
10291 {
10292 tree step = TREE_OPERAND (t, 1);
10293 tree stept = TREE_TYPE (decl);
10294 if (POINTER_TYPE_P (stept))
10295 stept = sizetype;
10296 step = fold_convert (stept, step);
10297 if (TREE_CODE (t) == MINUS_EXPR)
10298 step = fold_build1 (NEGATE_EXPR, stept, step);
10299 OMP_CLAUSE_LINEAR_STEP (c) = step;
10300 if (step != TREE_OPERAND (t, 1))
10301 {
10302 tret = gimplify_expr (&OMP_CLAUSE_LINEAR_STEP (c),
10303 &for_pre_body, NULL,
10304 is_gimple_val, fb_rvalue, false);
10305 ret = MIN (ret, tret);
10306 }
10307 }
10308 break;
10309
10310 default:
10311 gcc_unreachable ();
10312 }
10313
10314 if (c2)
10315 {
10316 gcc_assert (c);
10317 OMP_CLAUSE_LINEAR_STEP (c2) = OMP_CLAUSE_LINEAR_STEP (c);
10318 }
10319
10320 if ((var != decl || collapse > 1 || tile) && orig_for_stmt == for_stmt)
10321 {
10322 for (c = OMP_FOR_CLAUSES (for_stmt); c ; c = OMP_CLAUSE_CHAIN (c))
10323 if (((OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE
10324 && OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c) == NULL)
10325 || (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LINEAR
10326 && !OMP_CLAUSE_LINEAR_NO_COPYOUT (c)
10327 && OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c) == NULL))
10328 && OMP_CLAUSE_DECL (c) == decl)
10329 {
10330 if (is_doacross && (collapse == 1 || i >= collapse))
10331 t = var;
10332 else
10333 {
10334 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10335 gcc_assert (TREE_CODE (t) == MODIFY_EXPR);
10336 gcc_assert (TREE_OPERAND (t, 0) == var);
10337 t = TREE_OPERAND (t, 1);
10338 gcc_assert (TREE_CODE (t) == PLUS_EXPR
10339 || TREE_CODE (t) == MINUS_EXPR
10340 || TREE_CODE (t) == POINTER_PLUS_EXPR);
10341 gcc_assert (TREE_OPERAND (t, 0) == var);
10342 t = build2 (TREE_CODE (t), TREE_TYPE (decl),
10343 is_doacross ? var : decl,
10344 TREE_OPERAND (t, 1));
10345 }
10346 gimple_seq *seq;
10347 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_LASTPRIVATE)
10348 seq = &OMP_CLAUSE_LASTPRIVATE_GIMPLE_SEQ (c);
10349 else
10350 seq = &OMP_CLAUSE_LINEAR_GIMPLE_SEQ (c);
10351 push_gimplify_context ();
10352 gimplify_assign (decl, t, seq);
10353 gimple *bind = NULL;
10354 if (gimplify_ctxp->temps)
10355 {
10356 bind = gimple_build_bind (NULL_TREE, *seq, NULL_TREE);
10357 *seq = NULL;
10358 gimplify_seq_add_stmt (seq, bind);
10359 }
10360 pop_gimplify_context (bind);
10361 }
10362 }
10363 }
10364
10365 BITMAP_FREE (has_decl_expr);
10366
10367 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10368 {
10369 push_gimplify_context ();
10370 if (TREE_CODE (OMP_FOR_BODY (orig_for_stmt)) != BIND_EXPR)
10371 {
10372 OMP_FOR_BODY (orig_for_stmt)
10373 = build3 (BIND_EXPR, void_type_node, NULL,
10374 OMP_FOR_BODY (orig_for_stmt), NULL);
10375 TREE_SIDE_EFFECTS (OMP_FOR_BODY (orig_for_stmt)) = 1;
10376 }
10377 }
10378
10379 gimple *g = gimplify_and_return_first (OMP_FOR_BODY (orig_for_stmt),
10380 &for_body);
10381
10382 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10383 {
10384 if (gimple_code (g) == GIMPLE_BIND)
10385 pop_gimplify_context (g);
10386 else
10387 pop_gimplify_context (NULL);
10388 }
10389
10390 if (orig_for_stmt != for_stmt)
10391 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10392 {
10393 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10394 decl = TREE_OPERAND (t, 0);
10395 struct gimplify_omp_ctx *ctx = gimplify_omp_ctxp;
10396 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10397 gimplify_omp_ctxp = ctx->outer_context;
10398 var = create_tmp_var (TREE_TYPE (decl), get_name (decl));
10399 gimplify_omp_ctxp = ctx;
10400 omp_add_variable (gimplify_omp_ctxp, var, GOVD_PRIVATE | GOVD_SEEN);
10401 TREE_OPERAND (t, 0) = var;
10402 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10403 TREE_OPERAND (t, 1) = copy_node (TREE_OPERAND (t, 1));
10404 TREE_OPERAND (TREE_OPERAND (t, 1), 0) = var;
10405 }
10406
10407 gimplify_adjust_omp_clauses (pre_p, for_body,
10408 &OMP_FOR_CLAUSES (orig_for_stmt),
10409 TREE_CODE (orig_for_stmt));
10410
10411 int kind;
10412 switch (TREE_CODE (orig_for_stmt))
10413 {
10414 case OMP_FOR: kind = GF_OMP_FOR_KIND_FOR; break;
10415 case OMP_SIMD: kind = GF_OMP_FOR_KIND_SIMD; break;
10416 case OMP_DISTRIBUTE: kind = GF_OMP_FOR_KIND_DISTRIBUTE; break;
10417 case OMP_TASKLOOP: kind = GF_OMP_FOR_KIND_TASKLOOP; break;
10418 case OACC_LOOP: kind = GF_OMP_FOR_KIND_OACC_LOOP; break;
10419 default:
10420 gcc_unreachable ();
10421 }
10422 gfor = gimple_build_omp_for (for_body, kind, OMP_FOR_CLAUSES (orig_for_stmt),
10423 TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)),
10424 for_pre_body);
10425 if (orig_for_stmt != for_stmt)
10426 gimple_omp_for_set_combined_p (gfor, true);
10427 if (gimplify_omp_ctxp
10428 && (gimplify_omp_ctxp->combined_loop
10429 || (gimplify_omp_ctxp->region_type == ORT_COMBINED_PARALLEL
10430 && gimplify_omp_ctxp->outer_context
10431 && gimplify_omp_ctxp->outer_context->combined_loop)))
10432 {
10433 gimple_omp_for_set_combined_into_p (gfor, true);
10434 if (gimplify_omp_ctxp->combined_loop)
10435 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_SIMD);
10436 else
10437 gcc_assert (TREE_CODE (orig_for_stmt) == OMP_FOR);
10438 }
10439
10440 for (i = 0; i < TREE_VEC_LENGTH (OMP_FOR_INIT (for_stmt)); i++)
10441 {
10442 t = TREE_VEC_ELT (OMP_FOR_INIT (for_stmt), i);
10443 gimple_omp_for_set_index (gfor, i, TREE_OPERAND (t, 0));
10444 gimple_omp_for_set_initial (gfor, i, TREE_OPERAND (t, 1));
10445 t = TREE_VEC_ELT (OMP_FOR_COND (for_stmt), i);
10446 gimple_omp_for_set_cond (gfor, i, TREE_CODE (t));
10447 gimple_omp_for_set_final (gfor, i, TREE_OPERAND (t, 1));
10448 t = TREE_VEC_ELT (OMP_FOR_INCR (for_stmt), i);
10449 gimple_omp_for_set_incr (gfor, i, TREE_OPERAND (t, 1));
10450 }
10451
10452 /* OMP_TASKLOOP is gimplified as two GIMPLE_OMP_FOR taskloop
10453 constructs with GIMPLE_OMP_TASK sandwiched in between them.
10454 The outer taskloop stands for computing the number of iterations,
10455 counts for collapsed loops and holding taskloop specific clauses.
10456 The task construct stands for the effect of data sharing on the
10457 explicit task it creates and the inner taskloop stands for expansion
10458 of the static loop inside of the explicit task construct. */
10459 if (TREE_CODE (orig_for_stmt) == OMP_TASKLOOP)
10460 {
10461 tree *gfor_clauses_ptr = gimple_omp_for_clauses_ptr (gfor);
10462 tree task_clauses = NULL_TREE;
10463 tree c = *gfor_clauses_ptr;
10464 tree *gtask_clauses_ptr = &task_clauses;
10465 tree outer_for_clauses = NULL_TREE;
10466 tree *gforo_clauses_ptr = &outer_for_clauses;
10467 for (; c; c = OMP_CLAUSE_CHAIN (c))
10468 switch (OMP_CLAUSE_CODE (c))
10469 {
10470 /* These clauses are allowed on task, move them there. */
10471 case OMP_CLAUSE_SHARED:
10472 case OMP_CLAUSE_FIRSTPRIVATE:
10473 case OMP_CLAUSE_DEFAULT:
10474 case OMP_CLAUSE_IF:
10475 case OMP_CLAUSE_UNTIED:
10476 case OMP_CLAUSE_FINAL:
10477 case OMP_CLAUSE_MERGEABLE:
10478 case OMP_CLAUSE_PRIORITY:
10479 *gtask_clauses_ptr = c;
10480 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10481 break;
10482 case OMP_CLAUSE_PRIVATE:
10483 if (OMP_CLAUSE_PRIVATE_TASKLOOP_IV (c))
10484 {
10485 /* We want private on outer for and firstprivate
10486 on task. */
10487 *gtask_clauses_ptr
10488 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10489 OMP_CLAUSE_FIRSTPRIVATE);
10490 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10491 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10492 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10493 *gforo_clauses_ptr = c;
10494 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10495 }
10496 else
10497 {
10498 *gtask_clauses_ptr = c;
10499 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10500 }
10501 break;
10502 /* These clauses go into outer taskloop clauses. */
10503 case OMP_CLAUSE_GRAINSIZE:
10504 case OMP_CLAUSE_NUM_TASKS:
10505 case OMP_CLAUSE_NOGROUP:
10506 *gforo_clauses_ptr = c;
10507 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10508 break;
10509 /* Taskloop clause we duplicate on both taskloops. */
10510 case OMP_CLAUSE_COLLAPSE:
10511 *gfor_clauses_ptr = c;
10512 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10513 *gforo_clauses_ptr = copy_node (c);
10514 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10515 break;
10516 /* For lastprivate, keep the clause on inner taskloop, and add
10517 a shared clause on task. If the same decl is also firstprivate,
10518 add also firstprivate clause on the inner taskloop. */
10519 case OMP_CLAUSE_LASTPRIVATE:
10520 if (OMP_CLAUSE_LASTPRIVATE_TASKLOOP_IV (c))
10521 {
10522 /* For taskloop C++ lastprivate IVs, we want:
10523 1) private on outer taskloop
10524 2) firstprivate and shared on task
10525 3) lastprivate on inner taskloop */
10526 *gtask_clauses_ptr
10527 = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10528 OMP_CLAUSE_FIRSTPRIVATE);
10529 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10530 lang_hooks.decls.omp_finish_clause (*gtask_clauses_ptr, NULL);
10531 gtask_clauses_ptr = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10532 OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c) = 1;
10533 *gforo_clauses_ptr = build_omp_clause (OMP_CLAUSE_LOCATION (c),
10534 OMP_CLAUSE_PRIVATE);
10535 OMP_CLAUSE_DECL (*gforo_clauses_ptr) = OMP_CLAUSE_DECL (c);
10536 OMP_CLAUSE_PRIVATE_TASKLOOP_IV (*gforo_clauses_ptr) = 1;
10537 TREE_TYPE (*gforo_clauses_ptr) = TREE_TYPE (c);
10538 gforo_clauses_ptr = &OMP_CLAUSE_CHAIN (*gforo_clauses_ptr);
10539 }
10540 *gfor_clauses_ptr = c;
10541 gfor_clauses_ptr = &OMP_CLAUSE_CHAIN (c);
10542 *gtask_clauses_ptr
10543 = build_omp_clause (OMP_CLAUSE_LOCATION (c), OMP_CLAUSE_SHARED);
10544 OMP_CLAUSE_DECL (*gtask_clauses_ptr) = OMP_CLAUSE_DECL (c);
10545 if (OMP_CLAUSE_LASTPRIVATE_FIRSTPRIVATE (c))
10546 OMP_CLAUSE_SHARED_FIRSTPRIVATE (*gtask_clauses_ptr) = 1;
10547 gtask_clauses_ptr
10548 = &OMP_CLAUSE_CHAIN (*gtask_clauses_ptr);
10549 break;
10550 default:
10551 gcc_unreachable ();
10552 }
10553 *gfor_clauses_ptr = NULL_TREE;
10554 *gtask_clauses_ptr = NULL_TREE;
10555 *gforo_clauses_ptr = NULL_TREE;
10556 g = gimple_build_bind (NULL_TREE, gfor, NULL_TREE);
10557 g = gimple_build_omp_task (g, task_clauses, NULL_TREE, NULL_TREE,
10558 NULL_TREE, NULL_TREE, NULL_TREE);
10559 gimple_omp_task_set_taskloop_p (g, true);
10560 g = gimple_build_bind (NULL_TREE, g, NULL_TREE);
10561 gomp_for *gforo
10562 = gimple_build_omp_for (g, GF_OMP_FOR_KIND_TASKLOOP, outer_for_clauses,
10563 gimple_omp_for_collapse (gfor),
10564 gimple_omp_for_pre_body (gfor));
10565 gimple_omp_for_set_pre_body (gfor, NULL);
10566 gimple_omp_for_set_combined_p (gforo, true);
10567 gimple_omp_for_set_combined_into_p (gfor, true);
10568 for (i = 0; i < (int) gimple_omp_for_collapse (gfor); i++)
10569 {
10570 tree type = TREE_TYPE (gimple_omp_for_index (gfor, i));
10571 tree v = create_tmp_var (type);
10572 gimple_omp_for_set_index (gforo, i, v);
10573 t = unshare_expr (gimple_omp_for_initial (gfor, i));
10574 gimple_omp_for_set_initial (gforo, i, t);
10575 gimple_omp_for_set_cond (gforo, i,
10576 gimple_omp_for_cond (gfor, i));
10577 t = unshare_expr (gimple_omp_for_final (gfor, i));
10578 gimple_omp_for_set_final (gforo, i, t);
10579 t = unshare_expr (gimple_omp_for_incr (gfor, i));
10580 gcc_assert (TREE_OPERAND (t, 0) == gimple_omp_for_index (gfor, i));
10581 TREE_OPERAND (t, 0) = v;
10582 gimple_omp_for_set_incr (gforo, i, t);
10583 t = build_omp_clause (input_location, OMP_CLAUSE_PRIVATE);
10584 OMP_CLAUSE_DECL (t) = v;
10585 OMP_CLAUSE_CHAIN (t) = gimple_omp_for_clauses (gforo);
10586 gimple_omp_for_set_clauses (gforo, t);
10587 }
10588 gimplify_seq_add_stmt (pre_p, gforo);
10589 }
10590 else
10591 gimplify_seq_add_stmt (pre_p, gfor);
10592 if (ret != GS_ALL_DONE)
10593 return GS_ERROR;
10594 *expr_p = NULL_TREE;
10595 return GS_ALL_DONE;
10596 }
10597
10598 /* Helper function of optimize_target_teams, find OMP_TEAMS inside
10599 of OMP_TARGET's body. */
10600
10601 static tree
find_omp_teams(tree * tp,int * walk_subtrees,void *)10602 find_omp_teams (tree *tp, int *walk_subtrees, void *)
10603 {
10604 *walk_subtrees = 0;
10605 switch (TREE_CODE (*tp))
10606 {
10607 case OMP_TEAMS:
10608 return *tp;
10609 case BIND_EXPR:
10610 case STATEMENT_LIST:
10611 *walk_subtrees = 1;
10612 break;
10613 default:
10614 break;
10615 }
10616 return NULL_TREE;
10617 }
10618
10619 /* Helper function of optimize_target_teams, determine if the expression
10620 can be computed safely before the target construct on the host. */
10621
10622 static tree
computable_teams_clause(tree * tp,int * walk_subtrees,void *)10623 computable_teams_clause (tree *tp, int *walk_subtrees, void *)
10624 {
10625 splay_tree_node n;
10626
10627 if (TYPE_P (*tp))
10628 {
10629 *walk_subtrees = 0;
10630 return NULL_TREE;
10631 }
10632 switch (TREE_CODE (*tp))
10633 {
10634 case VAR_DECL:
10635 case PARM_DECL:
10636 case RESULT_DECL:
10637 *walk_subtrees = 0;
10638 if (error_operand_p (*tp)
10639 || !INTEGRAL_TYPE_P (TREE_TYPE (*tp))
10640 || DECL_HAS_VALUE_EXPR_P (*tp)
10641 || DECL_THREAD_LOCAL_P (*tp)
10642 || TREE_SIDE_EFFECTS (*tp)
10643 || TREE_THIS_VOLATILE (*tp))
10644 return *tp;
10645 if (is_global_var (*tp)
10646 && (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (*tp))
10647 || lookup_attribute ("omp declare target link",
10648 DECL_ATTRIBUTES (*tp))))
10649 return *tp;
10650 if (VAR_P (*tp)
10651 && !DECL_SEEN_IN_BIND_EXPR_P (*tp)
10652 && !is_global_var (*tp)
10653 && decl_function_context (*tp) == current_function_decl)
10654 return *tp;
10655 n = splay_tree_lookup (gimplify_omp_ctxp->variables,
10656 (splay_tree_key) *tp);
10657 if (n == NULL)
10658 {
10659 if (gimplify_omp_ctxp->target_map_scalars_firstprivate)
10660 return NULL_TREE;
10661 return *tp;
10662 }
10663 else if (n->value & GOVD_LOCAL)
10664 return *tp;
10665 else if (n->value & GOVD_FIRSTPRIVATE)
10666 return NULL_TREE;
10667 else if ((n->value & (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10668 == (GOVD_MAP | GOVD_MAP_ALWAYS_TO))
10669 return NULL_TREE;
10670 return *tp;
10671 case INTEGER_CST:
10672 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10673 return *tp;
10674 return NULL_TREE;
10675 case TARGET_EXPR:
10676 if (TARGET_EXPR_INITIAL (*tp)
10677 || TREE_CODE (TARGET_EXPR_SLOT (*tp)) != VAR_DECL)
10678 return *tp;
10679 return computable_teams_clause (&TARGET_EXPR_SLOT (*tp),
10680 walk_subtrees, NULL);
10681 /* Allow some reasonable subset of integral arithmetics. */
10682 case PLUS_EXPR:
10683 case MINUS_EXPR:
10684 case MULT_EXPR:
10685 case TRUNC_DIV_EXPR:
10686 case CEIL_DIV_EXPR:
10687 case FLOOR_DIV_EXPR:
10688 case ROUND_DIV_EXPR:
10689 case TRUNC_MOD_EXPR:
10690 case CEIL_MOD_EXPR:
10691 case FLOOR_MOD_EXPR:
10692 case ROUND_MOD_EXPR:
10693 case RDIV_EXPR:
10694 case EXACT_DIV_EXPR:
10695 case MIN_EXPR:
10696 case MAX_EXPR:
10697 case LSHIFT_EXPR:
10698 case RSHIFT_EXPR:
10699 case BIT_IOR_EXPR:
10700 case BIT_XOR_EXPR:
10701 case BIT_AND_EXPR:
10702 case NEGATE_EXPR:
10703 case ABS_EXPR:
10704 case BIT_NOT_EXPR:
10705 case NON_LVALUE_EXPR:
10706 CASE_CONVERT:
10707 if (!INTEGRAL_TYPE_P (TREE_TYPE (*tp)))
10708 return *tp;
10709 return NULL_TREE;
10710 /* And disallow anything else, except for comparisons. */
10711 default:
10712 if (COMPARISON_CLASS_P (*tp))
10713 return NULL_TREE;
10714 return *tp;
10715 }
10716 }
10717
10718 /* Try to determine if the num_teams and/or thread_limit expressions
10719 can have their values determined already before entering the
10720 target construct.
10721 INTEGER_CSTs trivially are,
10722 integral decls that are firstprivate (explicitly or implicitly)
10723 or explicitly map(always, to:) or map(always, tofrom:) on the target
10724 region too, and expressions involving simple arithmetics on those
10725 too, function calls are not ok, dereferencing something neither etc.
10726 Add NUM_TEAMS and THREAD_LIMIT clauses to the OMP_CLAUSES of
10727 EXPR based on what we find:
10728 0 stands for clause not specified at all, use implementation default
10729 -1 stands for value that can't be determined easily before entering
10730 the target construct.
10731 If teams construct is not present at all, use 1 for num_teams
10732 and 0 for thread_limit (only one team is involved, and the thread
10733 limit is implementation defined. */
10734
10735 static void
optimize_target_teams(tree target,gimple_seq * pre_p)10736 optimize_target_teams (tree target, gimple_seq *pre_p)
10737 {
10738 tree body = OMP_BODY (target);
10739 tree teams = walk_tree (&body, find_omp_teams, NULL, NULL);
10740 tree num_teams = integer_zero_node;
10741 tree thread_limit = integer_zero_node;
10742 location_t num_teams_loc = EXPR_LOCATION (target);
10743 location_t thread_limit_loc = EXPR_LOCATION (target);
10744 tree c, *p, expr;
10745 struct gimplify_omp_ctx *target_ctx = gimplify_omp_ctxp;
10746
10747 if (teams == NULL_TREE)
10748 num_teams = integer_one_node;
10749 else
10750 for (c = OMP_TEAMS_CLAUSES (teams); c; c = OMP_CLAUSE_CHAIN (c))
10751 {
10752 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_NUM_TEAMS)
10753 {
10754 p = &num_teams;
10755 num_teams_loc = OMP_CLAUSE_LOCATION (c);
10756 }
10757 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_THREAD_LIMIT)
10758 {
10759 p = &thread_limit;
10760 thread_limit_loc = OMP_CLAUSE_LOCATION (c);
10761 }
10762 else
10763 continue;
10764 expr = OMP_CLAUSE_OPERAND (c, 0);
10765 if (TREE_CODE (expr) == INTEGER_CST)
10766 {
10767 *p = expr;
10768 continue;
10769 }
10770 if (walk_tree (&expr, computable_teams_clause, NULL, NULL))
10771 {
10772 *p = integer_minus_one_node;
10773 continue;
10774 }
10775 *p = expr;
10776 gimplify_omp_ctxp = gimplify_omp_ctxp->outer_context;
10777 if (gimplify_expr (p, pre_p, NULL, is_gimple_val, fb_rvalue, false)
10778 == GS_ERROR)
10779 {
10780 gimplify_omp_ctxp = target_ctx;
10781 *p = integer_minus_one_node;
10782 continue;
10783 }
10784 gimplify_omp_ctxp = target_ctx;
10785 if (!DECL_P (expr) && TREE_CODE (expr) != TARGET_EXPR)
10786 OMP_CLAUSE_OPERAND (c, 0) = *p;
10787 }
10788 c = build_omp_clause (thread_limit_loc, OMP_CLAUSE_THREAD_LIMIT);
10789 OMP_CLAUSE_THREAD_LIMIT_EXPR (c) = thread_limit;
10790 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10791 OMP_TARGET_CLAUSES (target) = c;
10792 c = build_omp_clause (num_teams_loc, OMP_CLAUSE_NUM_TEAMS);
10793 OMP_CLAUSE_NUM_TEAMS_EXPR (c) = num_teams;
10794 OMP_CLAUSE_CHAIN (c) = OMP_TARGET_CLAUSES (target);
10795 OMP_TARGET_CLAUSES (target) = c;
10796 }
10797
10798 /* Gimplify the gross structure of several OMP constructs. */
10799
10800 static void
gimplify_omp_workshare(tree * expr_p,gimple_seq * pre_p)10801 gimplify_omp_workshare (tree *expr_p, gimple_seq *pre_p)
10802 {
10803 tree expr = *expr_p;
10804 gimple *stmt;
10805 gimple_seq body = NULL;
10806 enum omp_region_type ort;
10807
10808 switch (TREE_CODE (expr))
10809 {
10810 case OMP_SECTIONS:
10811 case OMP_SINGLE:
10812 ort = ORT_WORKSHARE;
10813 break;
10814 case OMP_TARGET:
10815 ort = OMP_TARGET_COMBINED (expr) ? ORT_COMBINED_TARGET : ORT_TARGET;
10816 break;
10817 case OACC_KERNELS:
10818 ort = ORT_ACC_KERNELS;
10819 break;
10820 case OACC_PARALLEL:
10821 ort = ORT_ACC_PARALLEL;
10822 break;
10823 case OACC_DATA:
10824 ort = ORT_ACC_DATA;
10825 break;
10826 case OMP_TARGET_DATA:
10827 ort = ORT_TARGET_DATA;
10828 break;
10829 case OMP_TEAMS:
10830 ort = OMP_TEAMS_COMBINED (expr) ? ORT_COMBINED_TEAMS : ORT_TEAMS;
10831 break;
10832 case OACC_HOST_DATA:
10833 ort = ORT_ACC_HOST_DATA;
10834 break;
10835 default:
10836 gcc_unreachable ();
10837 }
10838 gimplify_scan_omp_clauses (&OMP_CLAUSES (expr), pre_p, ort,
10839 TREE_CODE (expr));
10840 if (TREE_CODE (expr) == OMP_TARGET)
10841 optimize_target_teams (expr, pre_p);
10842 if ((ort & (ORT_TARGET | ORT_TARGET_DATA)) != 0)
10843 {
10844 push_gimplify_context ();
10845 gimple *g = gimplify_and_return_first (OMP_BODY (expr), &body);
10846 if (gimple_code (g) == GIMPLE_BIND)
10847 pop_gimplify_context (g);
10848 else
10849 pop_gimplify_context (NULL);
10850 if ((ort & ORT_TARGET_DATA) != 0)
10851 {
10852 enum built_in_function end_ix;
10853 switch (TREE_CODE (expr))
10854 {
10855 case OACC_DATA:
10856 case OACC_HOST_DATA:
10857 end_ix = BUILT_IN_GOACC_DATA_END;
10858 break;
10859 case OMP_TARGET_DATA:
10860 end_ix = BUILT_IN_GOMP_TARGET_END_DATA;
10861 break;
10862 default:
10863 gcc_unreachable ();
10864 }
10865 tree fn = builtin_decl_explicit (end_ix);
10866 g = gimple_build_call (fn, 0);
10867 gimple_seq cleanup = NULL;
10868 gimple_seq_add_stmt (&cleanup, g);
10869 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
10870 body = NULL;
10871 gimple_seq_add_stmt (&body, g);
10872 }
10873 }
10874 else
10875 gimplify_and_add (OMP_BODY (expr), &body);
10876 gimplify_adjust_omp_clauses (pre_p, body, &OMP_CLAUSES (expr),
10877 TREE_CODE (expr));
10878
10879 switch (TREE_CODE (expr))
10880 {
10881 case OACC_DATA:
10882 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_DATA,
10883 OMP_CLAUSES (expr));
10884 break;
10885 case OACC_KERNELS:
10886 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_KERNELS,
10887 OMP_CLAUSES (expr));
10888 break;
10889 case OACC_HOST_DATA:
10890 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_HOST_DATA,
10891 OMP_CLAUSES (expr));
10892 break;
10893 case OACC_PARALLEL:
10894 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_OACC_PARALLEL,
10895 OMP_CLAUSES (expr));
10896 break;
10897 case OMP_SECTIONS:
10898 stmt = gimple_build_omp_sections (body, OMP_CLAUSES (expr));
10899 break;
10900 case OMP_SINGLE:
10901 stmt = gimple_build_omp_single (body, OMP_CLAUSES (expr));
10902 break;
10903 case OMP_TARGET:
10904 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_REGION,
10905 OMP_CLAUSES (expr));
10906 break;
10907 case OMP_TARGET_DATA:
10908 stmt = gimple_build_omp_target (body, GF_OMP_TARGET_KIND_DATA,
10909 OMP_CLAUSES (expr));
10910 break;
10911 case OMP_TEAMS:
10912 stmt = gimple_build_omp_teams (body, OMP_CLAUSES (expr));
10913 break;
10914 default:
10915 gcc_unreachable ();
10916 }
10917
10918 gimplify_seq_add_stmt (pre_p, stmt);
10919 *expr_p = NULL_TREE;
10920 }
10921
10922 /* Gimplify the gross structure of OpenACC enter/exit data, update, and OpenMP
10923 target update constructs. */
10924
10925 static void
gimplify_omp_target_update(tree * expr_p,gimple_seq * pre_p)10926 gimplify_omp_target_update (tree *expr_p, gimple_seq *pre_p)
10927 {
10928 tree expr = *expr_p;
10929 int kind;
10930 gomp_target *stmt;
10931 enum omp_region_type ort = ORT_WORKSHARE;
10932
10933 switch (TREE_CODE (expr))
10934 {
10935 case OACC_ENTER_DATA:
10936 case OACC_EXIT_DATA:
10937 kind = GF_OMP_TARGET_KIND_OACC_ENTER_EXIT_DATA;
10938 ort = ORT_ACC;
10939 break;
10940 case OACC_UPDATE:
10941 kind = GF_OMP_TARGET_KIND_OACC_UPDATE;
10942 ort = ORT_ACC;
10943 break;
10944 case OMP_TARGET_UPDATE:
10945 kind = GF_OMP_TARGET_KIND_UPDATE;
10946 break;
10947 case OMP_TARGET_ENTER_DATA:
10948 kind = GF_OMP_TARGET_KIND_ENTER_DATA;
10949 break;
10950 case OMP_TARGET_EXIT_DATA:
10951 kind = GF_OMP_TARGET_KIND_EXIT_DATA;
10952 break;
10953 default:
10954 gcc_unreachable ();
10955 }
10956 gimplify_scan_omp_clauses (&OMP_STANDALONE_CLAUSES (expr), pre_p,
10957 ort, TREE_CODE (expr));
10958 gimplify_adjust_omp_clauses (pre_p, NULL, &OMP_STANDALONE_CLAUSES (expr),
10959 TREE_CODE (expr));
10960 stmt = gimple_build_omp_target (NULL, kind, OMP_STANDALONE_CLAUSES (expr));
10961
10962 gimplify_seq_add_stmt (pre_p, stmt);
10963 *expr_p = NULL_TREE;
10964 }
10965
10966 /* A subroutine of gimplify_omp_atomic. The front end is supposed to have
10967 stabilized the lhs of the atomic operation as *ADDR. Return true if
10968 EXPR is this stabilized form. */
10969
10970 static bool
goa_lhs_expr_p(tree expr,tree addr)10971 goa_lhs_expr_p (tree expr, tree addr)
10972 {
10973 /* Also include casts to other type variants. The C front end is fond
10974 of adding these for e.g. volatile variables. This is like
10975 STRIP_TYPE_NOPS but includes the main variant lookup. */
10976 STRIP_USELESS_TYPE_CONVERSION (expr);
10977
10978 if (TREE_CODE (expr) == INDIRECT_REF)
10979 {
10980 expr = TREE_OPERAND (expr, 0);
10981 while (expr != addr
10982 && (CONVERT_EXPR_P (expr)
10983 || TREE_CODE (expr) == NON_LVALUE_EXPR)
10984 && TREE_CODE (expr) == TREE_CODE (addr)
10985 && types_compatible_p (TREE_TYPE (expr), TREE_TYPE (addr)))
10986 {
10987 expr = TREE_OPERAND (expr, 0);
10988 addr = TREE_OPERAND (addr, 0);
10989 }
10990 if (expr == addr)
10991 return true;
10992 return (TREE_CODE (addr) == ADDR_EXPR
10993 && TREE_CODE (expr) == ADDR_EXPR
10994 && TREE_OPERAND (addr, 0) == TREE_OPERAND (expr, 0));
10995 }
10996 if (TREE_CODE (addr) == ADDR_EXPR && expr == TREE_OPERAND (addr, 0))
10997 return true;
10998 return false;
10999 }
11000
11001 /* Walk *EXPR_P and replace appearances of *LHS_ADDR with LHS_VAR. If an
11002 expression does not involve the lhs, evaluate it into a temporary.
11003 Return 1 if the lhs appeared as a subexpression, 0 if it did not,
11004 or -1 if an error was encountered. */
11005
11006 static int
goa_stabilize_expr(tree * expr_p,gimple_seq * pre_p,tree lhs_addr,tree lhs_var)11007 goa_stabilize_expr (tree *expr_p, gimple_seq *pre_p, tree lhs_addr,
11008 tree lhs_var)
11009 {
11010 tree expr = *expr_p;
11011 int saw_lhs;
11012
11013 if (goa_lhs_expr_p (expr, lhs_addr))
11014 {
11015 *expr_p = lhs_var;
11016 return 1;
11017 }
11018 if (is_gimple_val (expr))
11019 return 0;
11020
11021 saw_lhs = 0;
11022 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
11023 {
11024 case tcc_binary:
11025 case tcc_comparison:
11026 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p, lhs_addr,
11027 lhs_var);
11028 /* FALLTHRU */
11029 case tcc_unary:
11030 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p, lhs_addr,
11031 lhs_var);
11032 break;
11033 case tcc_expression:
11034 switch (TREE_CODE (expr))
11035 {
11036 case TRUTH_ANDIF_EXPR:
11037 case TRUTH_ORIF_EXPR:
11038 case TRUTH_AND_EXPR:
11039 case TRUTH_OR_EXPR:
11040 case TRUTH_XOR_EXPR:
11041 case BIT_INSERT_EXPR:
11042 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 1), pre_p,
11043 lhs_addr, lhs_var);
11044 /* FALLTHRU */
11045 case TRUTH_NOT_EXPR:
11046 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11047 lhs_addr, lhs_var);
11048 break;
11049 case COMPOUND_EXPR:
11050 /* Break out any preevaluations from cp_build_modify_expr. */
11051 for (; TREE_CODE (expr) == COMPOUND_EXPR;
11052 expr = TREE_OPERAND (expr, 1))
11053 gimplify_stmt (&TREE_OPERAND (expr, 0), pre_p);
11054 *expr_p = expr;
11055 return goa_stabilize_expr (expr_p, pre_p, lhs_addr, lhs_var);
11056 default:
11057 break;
11058 }
11059 break;
11060 case tcc_reference:
11061 if (TREE_CODE (expr) == BIT_FIELD_REF)
11062 saw_lhs |= goa_stabilize_expr (&TREE_OPERAND (expr, 0), pre_p,
11063 lhs_addr, lhs_var);
11064 break;
11065 default:
11066 break;
11067 }
11068
11069 if (saw_lhs == 0)
11070 {
11071 enum gimplify_status gs;
11072 gs = gimplify_expr (expr_p, pre_p, NULL, is_gimple_val, fb_rvalue);
11073 if (gs != GS_ALL_DONE)
11074 saw_lhs = -1;
11075 }
11076
11077 return saw_lhs;
11078 }
11079
11080 /* Gimplify an OMP_ATOMIC statement. */
11081
11082 static enum gimplify_status
gimplify_omp_atomic(tree * expr_p,gimple_seq * pre_p)11083 gimplify_omp_atomic (tree *expr_p, gimple_seq *pre_p)
11084 {
11085 tree addr = TREE_OPERAND (*expr_p, 0);
11086 tree rhs = TREE_CODE (*expr_p) == OMP_ATOMIC_READ
11087 ? NULL : TREE_OPERAND (*expr_p, 1);
11088 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (addr)));
11089 tree tmp_load;
11090 gomp_atomic_load *loadstmt;
11091 gomp_atomic_store *storestmt;
11092
11093 tmp_load = create_tmp_reg (type);
11094 if (rhs && goa_stabilize_expr (&rhs, pre_p, addr, tmp_load) < 0)
11095 return GS_ERROR;
11096
11097 if (gimplify_expr (&addr, pre_p, NULL, is_gimple_val, fb_rvalue)
11098 != GS_ALL_DONE)
11099 return GS_ERROR;
11100
11101 loadstmt = gimple_build_omp_atomic_load (tmp_load, addr);
11102 gimplify_seq_add_stmt (pre_p, loadstmt);
11103 if (rhs)
11104 {
11105 /* BIT_INSERT_EXPR is not valid for non-integral bitfield
11106 representatives. Use BIT_FIELD_REF on the lhs instead. */
11107 if (TREE_CODE (rhs) == BIT_INSERT_EXPR
11108 && !INTEGRAL_TYPE_P (TREE_TYPE (tmp_load)))
11109 {
11110 tree bitpos = TREE_OPERAND (rhs, 2);
11111 tree op1 = TREE_OPERAND (rhs, 1);
11112 tree bitsize;
11113 tree tmp_store = tmp_load;
11114 if (TREE_CODE (*expr_p) == OMP_ATOMIC_CAPTURE_OLD)
11115 tmp_store = get_initialized_tmp_var (tmp_load, pre_p, NULL);
11116 if (INTEGRAL_TYPE_P (TREE_TYPE (op1)))
11117 bitsize = bitsize_int (TYPE_PRECISION (TREE_TYPE (op1)));
11118 else
11119 bitsize = TYPE_SIZE (TREE_TYPE (op1));
11120 gcc_assert (TREE_OPERAND (rhs, 0) == tmp_load);
11121 tree t = build2_loc (EXPR_LOCATION (rhs),
11122 MODIFY_EXPR, void_type_node,
11123 build3_loc (EXPR_LOCATION (rhs), BIT_FIELD_REF,
11124 TREE_TYPE (op1), tmp_store, bitsize,
11125 bitpos), op1);
11126 gimplify_and_add (t, pre_p);
11127 rhs = tmp_store;
11128 }
11129 if (gimplify_expr (&rhs, pre_p, NULL, is_gimple_val, fb_rvalue)
11130 != GS_ALL_DONE)
11131 return GS_ERROR;
11132 }
11133
11134 if (TREE_CODE (*expr_p) == OMP_ATOMIC_READ)
11135 rhs = tmp_load;
11136 storestmt = gimple_build_omp_atomic_store (rhs);
11137 gimplify_seq_add_stmt (pre_p, storestmt);
11138 if (OMP_ATOMIC_SEQ_CST (*expr_p))
11139 {
11140 gimple_omp_atomic_set_seq_cst (loadstmt);
11141 gimple_omp_atomic_set_seq_cst (storestmt);
11142 }
11143 switch (TREE_CODE (*expr_p))
11144 {
11145 case OMP_ATOMIC_READ:
11146 case OMP_ATOMIC_CAPTURE_OLD:
11147 *expr_p = tmp_load;
11148 gimple_omp_atomic_set_need_value (loadstmt);
11149 break;
11150 case OMP_ATOMIC_CAPTURE_NEW:
11151 *expr_p = rhs;
11152 gimple_omp_atomic_set_need_value (storestmt);
11153 break;
11154 default:
11155 *expr_p = NULL;
11156 break;
11157 }
11158
11159 return GS_ALL_DONE;
11160 }
11161
11162 /* Gimplify a TRANSACTION_EXPR. This involves gimplification of the
11163 body, and adding some EH bits. */
11164
11165 static enum gimplify_status
gimplify_transaction(tree * expr_p,gimple_seq * pre_p)11166 gimplify_transaction (tree *expr_p, gimple_seq *pre_p)
11167 {
11168 tree expr = *expr_p, temp, tbody = TRANSACTION_EXPR_BODY (expr);
11169 gimple *body_stmt;
11170 gtransaction *trans_stmt;
11171 gimple_seq body = NULL;
11172 int subcode = 0;
11173
11174 /* Wrap the transaction body in a BIND_EXPR so we have a context
11175 where to put decls for OMP. */
11176 if (TREE_CODE (tbody) != BIND_EXPR)
11177 {
11178 tree bind = build3 (BIND_EXPR, void_type_node, NULL, tbody, NULL);
11179 TREE_SIDE_EFFECTS (bind) = 1;
11180 SET_EXPR_LOCATION (bind, EXPR_LOCATION (tbody));
11181 TRANSACTION_EXPR_BODY (expr) = bind;
11182 }
11183
11184 push_gimplify_context ();
11185 temp = voidify_wrapper_expr (*expr_p, NULL);
11186
11187 body_stmt = gimplify_and_return_first (TRANSACTION_EXPR_BODY (expr), &body);
11188 pop_gimplify_context (body_stmt);
11189
11190 trans_stmt = gimple_build_transaction (body);
11191 if (TRANSACTION_EXPR_OUTER (expr))
11192 subcode = GTMA_IS_OUTER;
11193 else if (TRANSACTION_EXPR_RELAXED (expr))
11194 subcode = GTMA_IS_RELAXED;
11195 gimple_transaction_set_subcode (trans_stmt, subcode);
11196
11197 gimplify_seq_add_stmt (pre_p, trans_stmt);
11198
11199 if (temp)
11200 {
11201 *expr_p = temp;
11202 return GS_OK;
11203 }
11204
11205 *expr_p = NULL_TREE;
11206 return GS_ALL_DONE;
11207 }
11208
11209 /* Gimplify an OMP_ORDERED construct. EXPR is the tree version. BODY
11210 is the OMP_BODY of the original EXPR (which has already been
11211 gimplified so it's not present in the EXPR).
11212
11213 Return the gimplified GIMPLE_OMP_ORDERED tuple. */
11214
11215 static gimple *
gimplify_omp_ordered(tree expr,gimple_seq body)11216 gimplify_omp_ordered (tree expr, gimple_seq body)
11217 {
11218 tree c, decls;
11219 int failures = 0;
11220 unsigned int i;
11221 tree source_c = NULL_TREE;
11222 tree sink_c = NULL_TREE;
11223
11224 if (gimplify_omp_ctxp)
11225 {
11226 for (c = OMP_ORDERED_CLAUSES (expr); c; c = OMP_CLAUSE_CHAIN (c))
11227 if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11228 && gimplify_omp_ctxp->loop_iter_var.is_empty ()
11229 && (OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK
11230 || OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE))
11231 {
11232 error_at (OMP_CLAUSE_LOCATION (c),
11233 "%<ordered%> construct with %<depend%> clause must be "
11234 "closely nested inside a loop with %<ordered%> clause "
11235 "with a parameter");
11236 failures++;
11237 }
11238 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11239 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SINK)
11240 {
11241 bool fail = false;
11242 for (decls = OMP_CLAUSE_DECL (c), i = 0;
11243 decls && TREE_CODE (decls) == TREE_LIST;
11244 decls = TREE_CHAIN (decls), ++i)
11245 if (i >= gimplify_omp_ctxp->loop_iter_var.length () / 2)
11246 continue;
11247 else if (TREE_VALUE (decls)
11248 != gimplify_omp_ctxp->loop_iter_var[2 * i])
11249 {
11250 error_at (OMP_CLAUSE_LOCATION (c),
11251 "variable %qE is not an iteration "
11252 "of outermost loop %d, expected %qE",
11253 TREE_VALUE (decls), i + 1,
11254 gimplify_omp_ctxp->loop_iter_var[2 * i]);
11255 fail = true;
11256 failures++;
11257 }
11258 else
11259 TREE_VALUE (decls)
11260 = gimplify_omp_ctxp->loop_iter_var[2 * i + 1];
11261 if (!fail && i != gimplify_omp_ctxp->loop_iter_var.length () / 2)
11262 {
11263 error_at (OMP_CLAUSE_LOCATION (c),
11264 "number of variables in %<depend(sink)%> "
11265 "clause does not match number of "
11266 "iteration variables");
11267 failures++;
11268 }
11269 sink_c = c;
11270 }
11271 else if (OMP_CLAUSE_CODE (c) == OMP_CLAUSE_DEPEND
11272 && OMP_CLAUSE_DEPEND_KIND (c) == OMP_CLAUSE_DEPEND_SOURCE)
11273 {
11274 if (source_c)
11275 {
11276 error_at (OMP_CLAUSE_LOCATION (c),
11277 "more than one %<depend(source)%> clause on an "
11278 "%<ordered%> construct");
11279 failures++;
11280 }
11281 else
11282 source_c = c;
11283 }
11284 }
11285 if (source_c && sink_c)
11286 {
11287 error_at (OMP_CLAUSE_LOCATION (source_c),
11288 "%<depend(source)%> clause specified together with "
11289 "%<depend(sink:)%> clauses on the same construct");
11290 failures++;
11291 }
11292
11293 if (failures)
11294 return gimple_build_nop ();
11295 return gimple_build_omp_ordered (body, OMP_ORDERED_CLAUSES (expr));
11296 }
11297
11298 /* Convert the GENERIC expression tree *EXPR_P to GIMPLE. If the
11299 expression produces a value to be used as an operand inside a GIMPLE
11300 statement, the value will be stored back in *EXPR_P. This value will
11301 be a tree of class tcc_declaration, tcc_constant, tcc_reference or
11302 an SSA_NAME. The corresponding sequence of GIMPLE statements is
11303 emitted in PRE_P and POST_P.
11304
11305 Additionally, this process may overwrite parts of the input
11306 expression during gimplification. Ideally, it should be
11307 possible to do non-destructive gimplification.
11308
11309 EXPR_P points to the GENERIC expression to convert to GIMPLE. If
11310 the expression needs to evaluate to a value to be used as
11311 an operand in a GIMPLE statement, this value will be stored in
11312 *EXPR_P on exit. This happens when the caller specifies one
11313 of fb_lvalue or fb_rvalue fallback flags.
11314
11315 PRE_P will contain the sequence of GIMPLE statements corresponding
11316 to the evaluation of EXPR and all the side-effects that must
11317 be executed before the main expression. On exit, the last
11318 statement of PRE_P is the core statement being gimplified. For
11319 instance, when gimplifying 'if (++a)' the last statement in
11320 PRE_P will be 'if (t.1)' where t.1 is the result of
11321 pre-incrementing 'a'.
11322
11323 POST_P will contain the sequence of GIMPLE statements corresponding
11324 to the evaluation of all the side-effects that must be executed
11325 after the main expression. If this is NULL, the post
11326 side-effects are stored at the end of PRE_P.
11327
11328 The reason why the output is split in two is to handle post
11329 side-effects explicitly. In some cases, an expression may have
11330 inner and outer post side-effects which need to be emitted in
11331 an order different from the one given by the recursive
11332 traversal. For instance, for the expression (*p--)++ the post
11333 side-effects of '--' must actually occur *after* the post
11334 side-effects of '++'. However, gimplification will first visit
11335 the inner expression, so if a separate POST sequence was not
11336 used, the resulting sequence would be:
11337
11338 1 t.1 = *p
11339 2 p = p - 1
11340 3 t.2 = t.1 + 1
11341 4 *p = t.2
11342
11343 However, the post-decrement operation in line #2 must not be
11344 evaluated until after the store to *p at line #4, so the
11345 correct sequence should be:
11346
11347 1 t.1 = *p
11348 2 t.2 = t.1 + 1
11349 3 *p = t.2
11350 4 p = p - 1
11351
11352 So, by specifying a separate post queue, it is possible
11353 to emit the post side-effects in the correct order.
11354 If POST_P is NULL, an internal queue will be used. Before
11355 returning to the caller, the sequence POST_P is appended to
11356 the main output sequence PRE_P.
11357
11358 GIMPLE_TEST_F points to a function that takes a tree T and
11359 returns nonzero if T is in the GIMPLE form requested by the
11360 caller. The GIMPLE predicates are in gimple.c.
11361
11362 FALLBACK tells the function what sort of a temporary we want if
11363 gimplification cannot produce an expression that complies with
11364 GIMPLE_TEST_F.
11365
11366 fb_none means that no temporary should be generated
11367 fb_rvalue means that an rvalue is OK to generate
11368 fb_lvalue means that an lvalue is OK to generate
11369 fb_either means that either is OK, but an lvalue is preferable.
11370 fb_mayfail means that gimplification may fail (in which case
11371 GS_ERROR will be returned)
11372
11373 The return value is either GS_ERROR or GS_ALL_DONE, since this
11374 function iterates until EXPR is completely gimplified or an error
11375 occurs. */
11376
11377 enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback)11378 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
11379 bool (*gimple_test_f) (tree), fallback_t fallback)
11380 {
11381 tree tmp;
11382 gimple_seq internal_pre = NULL;
11383 gimple_seq internal_post = NULL;
11384 tree save_expr;
11385 bool is_statement;
11386 location_t saved_location;
11387 enum gimplify_status ret;
11388 gimple_stmt_iterator pre_last_gsi, post_last_gsi;
11389 tree label;
11390
11391 save_expr = *expr_p;
11392 if (save_expr == NULL_TREE)
11393 return GS_ALL_DONE;
11394
11395 /* If we are gimplifying a top-level statement, PRE_P must be valid. */
11396 is_statement = gimple_test_f == is_gimple_stmt;
11397 if (is_statement)
11398 gcc_assert (pre_p);
11399
11400 /* Consistency checks. */
11401 if (gimple_test_f == is_gimple_reg)
11402 gcc_assert (fallback & (fb_rvalue | fb_lvalue));
11403 else if (gimple_test_f == is_gimple_val
11404 || gimple_test_f == is_gimple_call_addr
11405 || gimple_test_f == is_gimple_condexpr
11406 || gimple_test_f == is_gimple_mem_rhs
11407 || gimple_test_f == is_gimple_mem_rhs_or_call
11408 || gimple_test_f == is_gimple_reg_rhs
11409 || gimple_test_f == is_gimple_reg_rhs_or_call
11410 || gimple_test_f == is_gimple_asm_val
11411 || gimple_test_f == is_gimple_mem_ref_addr)
11412 gcc_assert (fallback & fb_rvalue);
11413 else if (gimple_test_f == is_gimple_min_lval
11414 || gimple_test_f == is_gimple_lvalue)
11415 gcc_assert (fallback & fb_lvalue);
11416 else if (gimple_test_f == is_gimple_addressable)
11417 gcc_assert (fallback & fb_either);
11418 else if (gimple_test_f == is_gimple_stmt)
11419 gcc_assert (fallback == fb_none);
11420 else
11421 {
11422 /* We should have recognized the GIMPLE_TEST_F predicate to
11423 know what kind of fallback to use in case a temporary is
11424 needed to hold the value or address of *EXPR_P. */
11425 gcc_unreachable ();
11426 }
11427
11428 /* We used to check the predicate here and return immediately if it
11429 succeeds. This is wrong; the design is for gimplification to be
11430 idempotent, and for the predicates to only test for valid forms, not
11431 whether they are fully simplified. */
11432 if (pre_p == NULL)
11433 pre_p = &internal_pre;
11434
11435 if (post_p == NULL)
11436 post_p = &internal_post;
11437
11438 /* Remember the last statements added to PRE_P and POST_P. Every
11439 new statement added by the gimplification helpers needs to be
11440 annotated with location information. To centralize the
11441 responsibility, we remember the last statement that had been
11442 added to both queues before gimplifying *EXPR_P. If
11443 gimplification produces new statements in PRE_P and POST_P, those
11444 statements will be annotated with the same location information
11445 as *EXPR_P. */
11446 pre_last_gsi = gsi_last (*pre_p);
11447 post_last_gsi = gsi_last (*post_p);
11448
11449 saved_location = input_location;
11450 if (save_expr != error_mark_node
11451 && EXPR_HAS_LOCATION (*expr_p))
11452 input_location = EXPR_LOCATION (*expr_p);
11453
11454 /* Loop over the specific gimplifiers until the toplevel node
11455 remains the same. */
11456 do
11457 {
11458 /* Strip away as many useless type conversions as possible
11459 at the toplevel. */
11460 STRIP_USELESS_TYPE_CONVERSION (*expr_p);
11461
11462 /* Remember the expr. */
11463 save_expr = *expr_p;
11464
11465 /* Die, die, die, my darling. */
11466 if (error_operand_p (save_expr))
11467 {
11468 ret = GS_ERROR;
11469 break;
11470 }
11471
11472 /* Do any language-specific gimplification. */
11473 ret = ((enum gimplify_status)
11474 lang_hooks.gimplify_expr (expr_p, pre_p, post_p));
11475 if (ret == GS_OK)
11476 {
11477 if (*expr_p == NULL_TREE)
11478 break;
11479 if (*expr_p != save_expr)
11480 continue;
11481 }
11482 else if (ret != GS_UNHANDLED)
11483 break;
11484
11485 /* Make sure that all the cases set 'ret' appropriately. */
11486 ret = GS_UNHANDLED;
11487 switch (TREE_CODE (*expr_p))
11488 {
11489 /* First deal with the special cases. */
11490
11491 case POSTINCREMENT_EXPR:
11492 case POSTDECREMENT_EXPR:
11493 case PREINCREMENT_EXPR:
11494 case PREDECREMENT_EXPR:
11495 ret = gimplify_self_mod_expr (expr_p, pre_p, post_p,
11496 fallback != fb_none,
11497 TREE_TYPE (*expr_p));
11498 break;
11499
11500 case VIEW_CONVERT_EXPR:
11501 if (is_gimple_reg_type (TREE_TYPE (*expr_p))
11502 && is_gimple_reg_type (TREE_TYPE (TREE_OPERAND (*expr_p, 0))))
11503 {
11504 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11505 post_p, is_gimple_val, fb_rvalue);
11506 recalculate_side_effects (*expr_p);
11507 break;
11508 }
11509 /* Fallthru. */
11510
11511 case ARRAY_REF:
11512 case ARRAY_RANGE_REF:
11513 case REALPART_EXPR:
11514 case IMAGPART_EXPR:
11515 case COMPONENT_REF:
11516 ret = gimplify_compound_lval (expr_p, pre_p, post_p,
11517 fallback ? fallback : fb_rvalue);
11518 break;
11519
11520 case COND_EXPR:
11521 ret = gimplify_cond_expr (expr_p, pre_p, fallback);
11522
11523 /* C99 code may assign to an array in a structure value of a
11524 conditional expression, and this has undefined behavior
11525 only on execution, so create a temporary if an lvalue is
11526 required. */
11527 if (fallback == fb_lvalue)
11528 {
11529 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11530 mark_addressable (*expr_p);
11531 ret = GS_OK;
11532 }
11533 break;
11534
11535 case CALL_EXPR:
11536 ret = gimplify_call_expr (expr_p, pre_p, fallback != fb_none);
11537
11538 /* C99 code may assign to an array in a structure returned
11539 from a function, and this has undefined behavior only on
11540 execution, so create a temporary if an lvalue is
11541 required. */
11542 if (fallback == fb_lvalue)
11543 {
11544 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11545 mark_addressable (*expr_p);
11546 ret = GS_OK;
11547 }
11548 break;
11549
11550 case TREE_LIST:
11551 gcc_unreachable ();
11552
11553 case COMPOUND_EXPR:
11554 ret = gimplify_compound_expr (expr_p, pre_p, fallback != fb_none);
11555 break;
11556
11557 case COMPOUND_LITERAL_EXPR:
11558 ret = gimplify_compound_literal_expr (expr_p, pre_p,
11559 gimple_test_f, fallback);
11560 break;
11561
11562 case MODIFY_EXPR:
11563 case INIT_EXPR:
11564 ret = gimplify_modify_expr (expr_p, pre_p, post_p,
11565 fallback != fb_none);
11566 break;
11567
11568 case TRUTH_ANDIF_EXPR:
11569 case TRUTH_ORIF_EXPR:
11570 {
11571 /* Preserve the original type of the expression and the
11572 source location of the outer expression. */
11573 tree org_type = TREE_TYPE (*expr_p);
11574 *expr_p = gimple_boolify (*expr_p);
11575 *expr_p = build3_loc (input_location, COND_EXPR,
11576 org_type, *expr_p,
11577 fold_convert_loc
11578 (input_location,
11579 org_type, boolean_true_node),
11580 fold_convert_loc
11581 (input_location,
11582 org_type, boolean_false_node));
11583 ret = GS_OK;
11584 break;
11585 }
11586
11587 case TRUTH_NOT_EXPR:
11588 {
11589 tree type = TREE_TYPE (*expr_p);
11590 /* The parsers are careful to generate TRUTH_NOT_EXPR
11591 only with operands that are always zero or one.
11592 We do not fold here but handle the only interesting case
11593 manually, as fold may re-introduce the TRUTH_NOT_EXPR. */
11594 *expr_p = gimple_boolify (*expr_p);
11595 if (TYPE_PRECISION (TREE_TYPE (*expr_p)) == 1)
11596 *expr_p = build1_loc (input_location, BIT_NOT_EXPR,
11597 TREE_TYPE (*expr_p),
11598 TREE_OPERAND (*expr_p, 0));
11599 else
11600 *expr_p = build2_loc (input_location, BIT_XOR_EXPR,
11601 TREE_TYPE (*expr_p),
11602 TREE_OPERAND (*expr_p, 0),
11603 build_int_cst (TREE_TYPE (*expr_p), 1));
11604 if (!useless_type_conversion_p (type, TREE_TYPE (*expr_p)))
11605 *expr_p = fold_convert_loc (input_location, type, *expr_p);
11606 ret = GS_OK;
11607 break;
11608 }
11609
11610 case ADDR_EXPR:
11611 ret = gimplify_addr_expr (expr_p, pre_p, post_p);
11612 break;
11613
11614 case ANNOTATE_EXPR:
11615 {
11616 tree cond = TREE_OPERAND (*expr_p, 0);
11617 tree kind = TREE_OPERAND (*expr_p, 1);
11618 tree data = TREE_OPERAND (*expr_p, 2);
11619 tree type = TREE_TYPE (cond);
11620 if (!INTEGRAL_TYPE_P (type))
11621 {
11622 *expr_p = cond;
11623 ret = GS_OK;
11624 break;
11625 }
11626 tree tmp = create_tmp_var (type);
11627 gimplify_arg (&cond, pre_p, EXPR_LOCATION (*expr_p));
11628 gcall *call
11629 = gimple_build_call_internal (IFN_ANNOTATE, 3, cond, kind, data);
11630 gimple_call_set_lhs (call, tmp);
11631 gimplify_seq_add_stmt (pre_p, call);
11632 *expr_p = tmp;
11633 ret = GS_ALL_DONE;
11634 break;
11635 }
11636
11637 case VA_ARG_EXPR:
11638 ret = gimplify_va_arg_expr (expr_p, pre_p, post_p);
11639 break;
11640
11641 CASE_CONVERT:
11642 if (IS_EMPTY_STMT (*expr_p))
11643 {
11644 ret = GS_ALL_DONE;
11645 break;
11646 }
11647
11648 if (VOID_TYPE_P (TREE_TYPE (*expr_p))
11649 || fallback == fb_none)
11650 {
11651 /* Just strip a conversion to void (or in void context) and
11652 try again. */
11653 *expr_p = TREE_OPERAND (*expr_p, 0);
11654 ret = GS_OK;
11655 break;
11656 }
11657
11658 ret = gimplify_conversion (expr_p);
11659 if (ret == GS_ERROR)
11660 break;
11661 if (*expr_p != save_expr)
11662 break;
11663 /* FALLTHRU */
11664
11665 case FIX_TRUNC_EXPR:
11666 /* unary_expr: ... | '(' cast ')' val | ... */
11667 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11668 is_gimple_val, fb_rvalue);
11669 recalculate_side_effects (*expr_p);
11670 break;
11671
11672 case INDIRECT_REF:
11673 {
11674 bool volatilep = TREE_THIS_VOLATILE (*expr_p);
11675 bool notrap = TREE_THIS_NOTRAP (*expr_p);
11676 tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
11677
11678 *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
11679 if (*expr_p != save_expr)
11680 {
11681 ret = GS_OK;
11682 break;
11683 }
11684
11685 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11686 is_gimple_reg, fb_rvalue);
11687 if (ret == GS_ERROR)
11688 break;
11689
11690 recalculate_side_effects (*expr_p);
11691 *expr_p = fold_build2_loc (input_location, MEM_REF,
11692 TREE_TYPE (*expr_p),
11693 TREE_OPERAND (*expr_p, 0),
11694 build_int_cst (saved_ptr_type, 0));
11695 TREE_THIS_VOLATILE (*expr_p) = volatilep;
11696 TREE_THIS_NOTRAP (*expr_p) = notrap;
11697 ret = GS_OK;
11698 break;
11699 }
11700
11701 /* We arrive here through the various re-gimplifcation paths. */
11702 case MEM_REF:
11703 /* First try re-folding the whole thing. */
11704 tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
11705 TREE_OPERAND (*expr_p, 0),
11706 TREE_OPERAND (*expr_p, 1));
11707 if (tmp)
11708 {
11709 REF_REVERSE_STORAGE_ORDER (tmp)
11710 = REF_REVERSE_STORAGE_ORDER (*expr_p);
11711 *expr_p = tmp;
11712 recalculate_side_effects (*expr_p);
11713 ret = GS_OK;
11714 break;
11715 }
11716 /* Avoid re-gimplifying the address operand if it is already
11717 in suitable form. Re-gimplifying would mark the address
11718 operand addressable. Always gimplify when not in SSA form
11719 as we still may have to gimplify decls with value-exprs. */
11720 if (!gimplify_ctxp || !gimple_in_ssa_p (cfun)
11721 || !is_gimple_mem_ref_addr (TREE_OPERAND (*expr_p, 0)))
11722 {
11723 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
11724 is_gimple_mem_ref_addr, fb_rvalue);
11725 if (ret == GS_ERROR)
11726 break;
11727 }
11728 recalculate_side_effects (*expr_p);
11729 ret = GS_ALL_DONE;
11730 break;
11731
11732 /* Constants need not be gimplified. */
11733 case INTEGER_CST:
11734 case REAL_CST:
11735 case FIXED_CST:
11736 case STRING_CST:
11737 case COMPLEX_CST:
11738 case VECTOR_CST:
11739 /* Drop the overflow flag on constants, we do not want
11740 that in the GIMPLE IL. */
11741 if (TREE_OVERFLOW_P (*expr_p))
11742 *expr_p = drop_tree_overflow (*expr_p);
11743 ret = GS_ALL_DONE;
11744 break;
11745
11746 case CONST_DECL:
11747 /* If we require an lvalue, such as for ADDR_EXPR, retain the
11748 CONST_DECL node. Otherwise the decl is replaceable by its
11749 value. */
11750 /* ??? Should be == fb_lvalue, but ADDR_EXPR passes fb_either. */
11751 if (fallback & fb_lvalue)
11752 ret = GS_ALL_DONE;
11753 else
11754 {
11755 *expr_p = DECL_INITIAL (*expr_p);
11756 ret = GS_OK;
11757 }
11758 break;
11759
11760 case DECL_EXPR:
11761 ret = gimplify_decl_expr (expr_p, pre_p);
11762 break;
11763
11764 case BIND_EXPR:
11765 ret = gimplify_bind_expr (expr_p, pre_p);
11766 break;
11767
11768 case LOOP_EXPR:
11769 ret = gimplify_loop_expr (expr_p, pre_p);
11770 break;
11771
11772 case SWITCH_EXPR:
11773 ret = gimplify_switch_expr (expr_p, pre_p);
11774 break;
11775
11776 case EXIT_EXPR:
11777 ret = gimplify_exit_expr (expr_p);
11778 break;
11779
11780 case GOTO_EXPR:
11781 /* If the target is not LABEL, then it is a computed jump
11782 and the target needs to be gimplified. */
11783 if (TREE_CODE (GOTO_DESTINATION (*expr_p)) != LABEL_DECL)
11784 {
11785 ret = gimplify_expr (&GOTO_DESTINATION (*expr_p), pre_p,
11786 NULL, is_gimple_val, fb_rvalue);
11787 if (ret == GS_ERROR)
11788 break;
11789 }
11790 gimplify_seq_add_stmt (pre_p,
11791 gimple_build_goto (GOTO_DESTINATION (*expr_p)));
11792 ret = GS_ALL_DONE;
11793 break;
11794
11795 case PREDICT_EXPR:
11796 gimplify_seq_add_stmt (pre_p,
11797 gimple_build_predict (PREDICT_EXPR_PREDICTOR (*expr_p),
11798 PREDICT_EXPR_OUTCOME (*expr_p)));
11799 ret = GS_ALL_DONE;
11800 break;
11801
11802 case LABEL_EXPR:
11803 ret = gimplify_label_expr (expr_p, pre_p);
11804 label = LABEL_EXPR_LABEL (*expr_p);
11805 gcc_assert (decl_function_context (label) == current_function_decl);
11806
11807 /* If the label is used in a goto statement, or address of the label
11808 is taken, we need to unpoison all variables that were seen so far.
11809 Doing so would prevent us from reporting a false positives. */
11810 if (asan_poisoned_variables
11811 && asan_used_labels != NULL
11812 && asan_used_labels->contains (label))
11813 asan_poison_variables (asan_poisoned_variables, false, pre_p);
11814 break;
11815
11816 case CASE_LABEL_EXPR:
11817 ret = gimplify_case_label_expr (expr_p, pre_p);
11818
11819 if (gimplify_ctxp->live_switch_vars)
11820 asan_poison_variables (gimplify_ctxp->live_switch_vars, false,
11821 pre_p);
11822 break;
11823
11824 case RETURN_EXPR:
11825 ret = gimplify_return_expr (*expr_p, pre_p);
11826 break;
11827
11828 case CONSTRUCTOR:
11829 /* Don't reduce this in place; let gimplify_init_constructor work its
11830 magic. Buf if we're just elaborating this for side effects, just
11831 gimplify any element that has side-effects. */
11832 if (fallback == fb_none)
11833 {
11834 unsigned HOST_WIDE_INT ix;
11835 tree val;
11836 tree temp = NULL_TREE;
11837 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (*expr_p), ix, val)
11838 if (TREE_SIDE_EFFECTS (val))
11839 append_to_statement_list (val, &temp);
11840
11841 *expr_p = temp;
11842 ret = temp ? GS_OK : GS_ALL_DONE;
11843 }
11844 /* C99 code may assign to an array in a constructed
11845 structure or union, and this has undefined behavior only
11846 on execution, so create a temporary if an lvalue is
11847 required. */
11848 else if (fallback == fb_lvalue)
11849 {
11850 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, post_p, false);
11851 mark_addressable (*expr_p);
11852 ret = GS_OK;
11853 }
11854 else
11855 ret = GS_ALL_DONE;
11856 break;
11857
11858 /* The following are special cases that are not handled by the
11859 original GIMPLE grammar. */
11860
11861 /* SAVE_EXPR nodes are converted into a GIMPLE identifier and
11862 eliminated. */
11863 case SAVE_EXPR:
11864 ret = gimplify_save_expr (expr_p, pre_p, post_p);
11865 break;
11866
11867 case BIT_FIELD_REF:
11868 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11869 post_p, is_gimple_lvalue, fb_either);
11870 recalculate_side_effects (*expr_p);
11871 break;
11872
11873 case TARGET_MEM_REF:
11874 {
11875 enum gimplify_status r0 = GS_ALL_DONE, r1 = GS_ALL_DONE;
11876
11877 if (TMR_BASE (*expr_p))
11878 r0 = gimplify_expr (&TMR_BASE (*expr_p), pre_p,
11879 post_p, is_gimple_mem_ref_addr, fb_either);
11880 if (TMR_INDEX (*expr_p))
11881 r1 = gimplify_expr (&TMR_INDEX (*expr_p), pre_p,
11882 post_p, is_gimple_val, fb_rvalue);
11883 if (TMR_INDEX2 (*expr_p))
11884 r1 = gimplify_expr (&TMR_INDEX2 (*expr_p), pre_p,
11885 post_p, is_gimple_val, fb_rvalue);
11886 /* TMR_STEP and TMR_OFFSET are always integer constants. */
11887 ret = MIN (r0, r1);
11888 }
11889 break;
11890
11891 case NON_LVALUE_EXPR:
11892 /* This should have been stripped above. */
11893 gcc_unreachable ();
11894
11895 case ASM_EXPR:
11896 ret = gimplify_asm_expr (expr_p, pre_p, post_p);
11897 break;
11898
11899 case TRY_FINALLY_EXPR:
11900 case TRY_CATCH_EXPR:
11901 {
11902 gimple_seq eval, cleanup;
11903 gtry *try_;
11904
11905 /* Calls to destructors are generated automatically in FINALLY/CATCH
11906 block. They should have location as UNKNOWN_LOCATION. However,
11907 gimplify_call_expr will reset these call stmts to input_location
11908 if it finds stmt's location is unknown. To prevent resetting for
11909 destructors, we set the input_location to unknown.
11910 Note that this only affects the destructor calls in FINALLY/CATCH
11911 block, and will automatically reset to its original value by the
11912 end of gimplify_expr. */
11913 input_location = UNKNOWN_LOCATION;
11914 eval = cleanup = NULL;
11915 gimplify_and_add (TREE_OPERAND (*expr_p, 0), &eval);
11916 gimplify_and_add (TREE_OPERAND (*expr_p, 1), &cleanup);
11917 /* Don't create bogus GIMPLE_TRY with empty cleanup. */
11918 if (gimple_seq_empty_p (cleanup))
11919 {
11920 gimple_seq_add_seq (pre_p, eval);
11921 ret = GS_ALL_DONE;
11922 break;
11923 }
11924 try_ = gimple_build_try (eval, cleanup,
11925 TREE_CODE (*expr_p) == TRY_FINALLY_EXPR
11926 ? GIMPLE_TRY_FINALLY
11927 : GIMPLE_TRY_CATCH);
11928 if (EXPR_HAS_LOCATION (save_expr))
11929 gimple_set_location (try_, EXPR_LOCATION (save_expr));
11930 else if (LOCATION_LOCUS (saved_location) != UNKNOWN_LOCATION)
11931 gimple_set_location (try_, saved_location);
11932 if (TREE_CODE (*expr_p) == TRY_CATCH_EXPR)
11933 gimple_try_set_catch_is_cleanup (try_,
11934 TRY_CATCH_IS_CLEANUP (*expr_p));
11935 gimplify_seq_add_stmt (pre_p, try_);
11936 ret = GS_ALL_DONE;
11937 break;
11938 }
11939
11940 case CLEANUP_POINT_EXPR:
11941 ret = gimplify_cleanup_point_expr (expr_p, pre_p);
11942 break;
11943
11944 case TARGET_EXPR:
11945 ret = gimplify_target_expr (expr_p, pre_p, post_p);
11946 break;
11947
11948 case CATCH_EXPR:
11949 {
11950 gimple *c;
11951 gimple_seq handler = NULL;
11952 gimplify_and_add (CATCH_BODY (*expr_p), &handler);
11953 c = gimple_build_catch (CATCH_TYPES (*expr_p), handler);
11954 gimplify_seq_add_stmt (pre_p, c);
11955 ret = GS_ALL_DONE;
11956 break;
11957 }
11958
11959 case EH_FILTER_EXPR:
11960 {
11961 gimple *ehf;
11962 gimple_seq failure = NULL;
11963
11964 gimplify_and_add (EH_FILTER_FAILURE (*expr_p), &failure);
11965 ehf = gimple_build_eh_filter (EH_FILTER_TYPES (*expr_p), failure);
11966 gimple_set_no_warning (ehf, TREE_NO_WARNING (*expr_p));
11967 gimplify_seq_add_stmt (pre_p, ehf);
11968 ret = GS_ALL_DONE;
11969 break;
11970 }
11971
11972 case OBJ_TYPE_REF:
11973 {
11974 enum gimplify_status r0, r1;
11975 r0 = gimplify_expr (&OBJ_TYPE_REF_OBJECT (*expr_p), pre_p,
11976 post_p, is_gimple_val, fb_rvalue);
11977 r1 = gimplify_expr (&OBJ_TYPE_REF_EXPR (*expr_p), pre_p,
11978 post_p, is_gimple_val, fb_rvalue);
11979 TREE_SIDE_EFFECTS (*expr_p) = 0;
11980 ret = MIN (r0, r1);
11981 }
11982 break;
11983
11984 case LABEL_DECL:
11985 /* We get here when taking the address of a label. We mark
11986 the label as "forced"; meaning it can never be removed and
11987 it is a potential target for any computed goto. */
11988 FORCED_LABEL (*expr_p) = 1;
11989 ret = GS_ALL_DONE;
11990 break;
11991
11992 case STATEMENT_LIST:
11993 ret = gimplify_statement_list (expr_p, pre_p);
11994 break;
11995
11996 case WITH_SIZE_EXPR:
11997 {
11998 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
11999 post_p == &internal_post ? NULL : post_p,
12000 gimple_test_f, fallback);
12001 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12002 is_gimple_val, fb_rvalue);
12003 ret = GS_ALL_DONE;
12004 }
12005 break;
12006
12007 case VAR_DECL:
12008 case PARM_DECL:
12009 ret = gimplify_var_or_parm_decl (expr_p);
12010 break;
12011
12012 case RESULT_DECL:
12013 /* When within an OMP context, notice uses of variables. */
12014 if (gimplify_omp_ctxp)
12015 omp_notice_variable (gimplify_omp_ctxp, *expr_p, true);
12016 ret = GS_ALL_DONE;
12017 break;
12018
12019 case DEBUG_EXPR_DECL:
12020 gcc_unreachable ();
12021
12022 case DEBUG_BEGIN_STMT:
12023 gimplify_seq_add_stmt (pre_p,
12024 gimple_build_debug_begin_stmt
12025 (TREE_BLOCK (*expr_p),
12026 EXPR_LOCATION (*expr_p)));
12027 ret = GS_ALL_DONE;
12028 *expr_p = NULL;
12029 break;
12030
12031 case SSA_NAME:
12032 /* Allow callbacks into the gimplifier during optimization. */
12033 ret = GS_ALL_DONE;
12034 break;
12035
12036 case OMP_PARALLEL:
12037 gimplify_omp_parallel (expr_p, pre_p);
12038 ret = GS_ALL_DONE;
12039 break;
12040
12041 case OMP_TASK:
12042 gimplify_omp_task (expr_p, pre_p);
12043 ret = GS_ALL_DONE;
12044 break;
12045
12046 case OMP_FOR:
12047 case OMP_SIMD:
12048 case OMP_DISTRIBUTE:
12049 case OMP_TASKLOOP:
12050 case OACC_LOOP:
12051 ret = gimplify_omp_for (expr_p, pre_p);
12052 break;
12053
12054 case OACC_CACHE:
12055 gimplify_oacc_cache (expr_p, pre_p);
12056 ret = GS_ALL_DONE;
12057 break;
12058
12059 case OACC_DECLARE:
12060 gimplify_oacc_declare (expr_p, pre_p);
12061 ret = GS_ALL_DONE;
12062 break;
12063
12064 case OACC_HOST_DATA:
12065 case OACC_DATA:
12066 case OACC_KERNELS:
12067 case OACC_PARALLEL:
12068 case OMP_SECTIONS:
12069 case OMP_SINGLE:
12070 case OMP_TARGET:
12071 case OMP_TARGET_DATA:
12072 case OMP_TEAMS:
12073 gimplify_omp_workshare (expr_p, pre_p);
12074 ret = GS_ALL_DONE;
12075 break;
12076
12077 case OACC_ENTER_DATA:
12078 case OACC_EXIT_DATA:
12079 case OACC_UPDATE:
12080 case OMP_TARGET_UPDATE:
12081 case OMP_TARGET_ENTER_DATA:
12082 case OMP_TARGET_EXIT_DATA:
12083 gimplify_omp_target_update (expr_p, pre_p);
12084 ret = GS_ALL_DONE;
12085 break;
12086
12087 case OMP_SECTION:
12088 case OMP_MASTER:
12089 case OMP_TASKGROUP:
12090 case OMP_ORDERED:
12091 case OMP_CRITICAL:
12092 {
12093 gimple_seq body = NULL;
12094 gimple *g;
12095
12096 gimplify_and_add (OMP_BODY (*expr_p), &body);
12097 switch (TREE_CODE (*expr_p))
12098 {
12099 case OMP_SECTION:
12100 g = gimple_build_omp_section (body);
12101 break;
12102 case OMP_MASTER:
12103 g = gimple_build_omp_master (body);
12104 break;
12105 case OMP_TASKGROUP:
12106 {
12107 gimple_seq cleanup = NULL;
12108 tree fn
12109 = builtin_decl_explicit (BUILT_IN_GOMP_TASKGROUP_END);
12110 g = gimple_build_call (fn, 0);
12111 gimple_seq_add_stmt (&cleanup, g);
12112 g = gimple_build_try (body, cleanup, GIMPLE_TRY_FINALLY);
12113 body = NULL;
12114 gimple_seq_add_stmt (&body, g);
12115 g = gimple_build_omp_taskgroup (body);
12116 }
12117 break;
12118 case OMP_ORDERED:
12119 g = gimplify_omp_ordered (*expr_p, body);
12120 break;
12121 case OMP_CRITICAL:
12122 gimplify_scan_omp_clauses (&OMP_CRITICAL_CLAUSES (*expr_p),
12123 pre_p, ORT_WORKSHARE, OMP_CRITICAL);
12124 gimplify_adjust_omp_clauses (pre_p, body,
12125 &OMP_CRITICAL_CLAUSES (*expr_p),
12126 OMP_CRITICAL);
12127 g = gimple_build_omp_critical (body,
12128 OMP_CRITICAL_NAME (*expr_p),
12129 OMP_CRITICAL_CLAUSES (*expr_p));
12130 break;
12131 default:
12132 gcc_unreachable ();
12133 }
12134 gimplify_seq_add_stmt (pre_p, g);
12135 ret = GS_ALL_DONE;
12136 break;
12137 }
12138
12139 case OMP_ATOMIC:
12140 case OMP_ATOMIC_READ:
12141 case OMP_ATOMIC_CAPTURE_OLD:
12142 case OMP_ATOMIC_CAPTURE_NEW:
12143 ret = gimplify_omp_atomic (expr_p, pre_p);
12144 break;
12145
12146 case TRANSACTION_EXPR:
12147 ret = gimplify_transaction (expr_p, pre_p);
12148 break;
12149
12150 case TRUTH_AND_EXPR:
12151 case TRUTH_OR_EXPR:
12152 case TRUTH_XOR_EXPR:
12153 {
12154 tree orig_type = TREE_TYPE (*expr_p);
12155 tree new_type, xop0, xop1;
12156 *expr_p = gimple_boolify (*expr_p);
12157 new_type = TREE_TYPE (*expr_p);
12158 if (!useless_type_conversion_p (orig_type, new_type))
12159 {
12160 *expr_p = fold_convert_loc (input_location, orig_type, *expr_p);
12161 ret = GS_OK;
12162 break;
12163 }
12164
12165 /* Boolified binary truth expressions are semantically equivalent
12166 to bitwise binary expressions. Canonicalize them to the
12167 bitwise variant. */
12168 switch (TREE_CODE (*expr_p))
12169 {
12170 case TRUTH_AND_EXPR:
12171 TREE_SET_CODE (*expr_p, BIT_AND_EXPR);
12172 break;
12173 case TRUTH_OR_EXPR:
12174 TREE_SET_CODE (*expr_p, BIT_IOR_EXPR);
12175 break;
12176 case TRUTH_XOR_EXPR:
12177 TREE_SET_CODE (*expr_p, BIT_XOR_EXPR);
12178 break;
12179 default:
12180 break;
12181 }
12182 /* Now make sure that operands have compatible type to
12183 expression's new_type. */
12184 xop0 = TREE_OPERAND (*expr_p, 0);
12185 xop1 = TREE_OPERAND (*expr_p, 1);
12186 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop0)))
12187 TREE_OPERAND (*expr_p, 0) = fold_convert_loc (input_location,
12188 new_type,
12189 xop0);
12190 if (!useless_type_conversion_p (new_type, TREE_TYPE (xop1)))
12191 TREE_OPERAND (*expr_p, 1) = fold_convert_loc (input_location,
12192 new_type,
12193 xop1);
12194 /* Continue classified as tcc_binary. */
12195 goto expr_2;
12196 }
12197
12198 case VEC_COND_EXPR:
12199 {
12200 enum gimplify_status r0, r1, r2;
12201
12202 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12203 post_p, is_gimple_condexpr, fb_rvalue);
12204 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12205 post_p, is_gimple_val, fb_rvalue);
12206 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12207 post_p, is_gimple_val, fb_rvalue);
12208
12209 ret = MIN (MIN (r0, r1), r2);
12210 recalculate_side_effects (*expr_p);
12211 }
12212 break;
12213
12214 case FMA_EXPR:
12215 case VEC_PERM_EXPR:
12216 /* Classified as tcc_expression. */
12217 goto expr_3;
12218
12219 case BIT_INSERT_EXPR:
12220 /* Argument 3 is a constant. */
12221 goto expr_2;
12222
12223 case POINTER_PLUS_EXPR:
12224 {
12225 enum gimplify_status r0, r1;
12226 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12227 post_p, is_gimple_val, fb_rvalue);
12228 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12229 post_p, is_gimple_val, fb_rvalue);
12230 recalculate_side_effects (*expr_p);
12231 ret = MIN (r0, r1);
12232 break;
12233 }
12234
12235 default:
12236 switch (TREE_CODE_CLASS (TREE_CODE (*expr_p)))
12237 {
12238 case tcc_comparison:
12239 /* Handle comparison of objects of non scalar mode aggregates
12240 with a call to memcmp. It would be nice to only have to do
12241 this for variable-sized objects, but then we'd have to allow
12242 the same nest of reference nodes we allow for MODIFY_EXPR and
12243 that's too complex.
12244
12245 Compare scalar mode aggregates as scalar mode values. Using
12246 memcmp for them would be very inefficient at best, and is
12247 plain wrong if bitfields are involved. */
12248 {
12249 tree type = TREE_TYPE (TREE_OPERAND (*expr_p, 1));
12250
12251 /* Vector comparisons need no boolification. */
12252 if (TREE_CODE (type) == VECTOR_TYPE)
12253 goto expr_2;
12254 else if (!AGGREGATE_TYPE_P (type))
12255 {
12256 tree org_type = TREE_TYPE (*expr_p);
12257 *expr_p = gimple_boolify (*expr_p);
12258 if (!useless_type_conversion_p (org_type,
12259 TREE_TYPE (*expr_p)))
12260 {
12261 *expr_p = fold_convert_loc (input_location,
12262 org_type, *expr_p);
12263 ret = GS_OK;
12264 }
12265 else
12266 goto expr_2;
12267 }
12268 else if (TYPE_MODE (type) != BLKmode)
12269 ret = gimplify_scalar_mode_aggregate_compare (expr_p);
12270 else
12271 ret = gimplify_variable_sized_compare (expr_p);
12272
12273 break;
12274 }
12275
12276 /* If *EXPR_P does not need to be special-cased, handle it
12277 according to its class. */
12278 case tcc_unary:
12279 ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12280 post_p, is_gimple_val, fb_rvalue);
12281 break;
12282
12283 case tcc_binary:
12284 expr_2:
12285 {
12286 enum gimplify_status r0, r1;
12287
12288 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12289 post_p, is_gimple_val, fb_rvalue);
12290 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12291 post_p, is_gimple_val, fb_rvalue);
12292
12293 ret = MIN (r0, r1);
12294 break;
12295 }
12296
12297 expr_3:
12298 {
12299 enum gimplify_status r0, r1, r2;
12300
12301 r0 = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p,
12302 post_p, is_gimple_val, fb_rvalue);
12303 r1 = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p,
12304 post_p, is_gimple_val, fb_rvalue);
12305 r2 = gimplify_expr (&TREE_OPERAND (*expr_p, 2), pre_p,
12306 post_p, is_gimple_val, fb_rvalue);
12307
12308 ret = MIN (MIN (r0, r1), r2);
12309 break;
12310 }
12311
12312 case tcc_declaration:
12313 case tcc_constant:
12314 ret = GS_ALL_DONE;
12315 goto dont_recalculate;
12316
12317 default:
12318 gcc_unreachable ();
12319 }
12320
12321 recalculate_side_effects (*expr_p);
12322
12323 dont_recalculate:
12324 break;
12325 }
12326
12327 gcc_assert (*expr_p || ret != GS_OK);
12328 }
12329 while (ret == GS_OK);
12330
12331 /* If we encountered an error_mark somewhere nested inside, either
12332 stub out the statement or propagate the error back out. */
12333 if (ret == GS_ERROR)
12334 {
12335 if (is_statement)
12336 *expr_p = NULL;
12337 goto out;
12338 }
12339
12340 /* This was only valid as a return value from the langhook, which
12341 we handled. Make sure it doesn't escape from any other context. */
12342 gcc_assert (ret != GS_UNHANDLED);
12343
12344 if (fallback == fb_none && *expr_p && !is_gimple_stmt (*expr_p))
12345 {
12346 /* We aren't looking for a value, and we don't have a valid
12347 statement. If it doesn't have side-effects, throw it away.
12348 We can also get here with code such as "*&&L;", where L is
12349 a LABEL_DECL that is marked as FORCED_LABEL. */
12350 if (TREE_CODE (*expr_p) == LABEL_DECL
12351 || !TREE_SIDE_EFFECTS (*expr_p))
12352 *expr_p = NULL;
12353 else if (!TREE_THIS_VOLATILE (*expr_p))
12354 {
12355 /* This is probably a _REF that contains something nested that
12356 has side effects. Recurse through the operands to find it. */
12357 enum tree_code code = TREE_CODE (*expr_p);
12358
12359 switch (code)
12360 {
12361 case COMPONENT_REF:
12362 case REALPART_EXPR:
12363 case IMAGPART_EXPR:
12364 case VIEW_CONVERT_EXPR:
12365 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12366 gimple_test_f, fallback);
12367 break;
12368
12369 case ARRAY_REF:
12370 case ARRAY_RANGE_REF:
12371 gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
12372 gimple_test_f, fallback);
12373 gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
12374 gimple_test_f, fallback);
12375 break;
12376
12377 default:
12378 /* Anything else with side-effects must be converted to
12379 a valid statement before we get here. */
12380 gcc_unreachable ();
12381 }
12382
12383 *expr_p = NULL;
12384 }
12385 else if (COMPLETE_TYPE_P (TREE_TYPE (*expr_p))
12386 && TYPE_MODE (TREE_TYPE (*expr_p)) != BLKmode)
12387 {
12388 /* Historically, the compiler has treated a bare reference
12389 to a non-BLKmode volatile lvalue as forcing a load. */
12390 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (*expr_p));
12391
12392 /* Normally, we do not want to create a temporary for a
12393 TREE_ADDRESSABLE type because such a type should not be
12394 copied by bitwise-assignment. However, we make an
12395 exception here, as all we are doing here is ensuring that
12396 we read the bytes that make up the type. We use
12397 create_tmp_var_raw because create_tmp_var will abort when
12398 given a TREE_ADDRESSABLE type. */
12399 tree tmp = create_tmp_var_raw (type, "vol");
12400 gimple_add_tmp_var (tmp);
12401 gimplify_assign (tmp, *expr_p, pre_p);
12402 *expr_p = NULL;
12403 }
12404 else
12405 /* We can't do anything useful with a volatile reference to
12406 an incomplete type, so just throw it away. Likewise for
12407 a BLKmode type, since any implicit inner load should
12408 already have been turned into an explicit one by the
12409 gimplification process. */
12410 *expr_p = NULL;
12411 }
12412
12413 /* If we are gimplifying at the statement level, we're done. Tack
12414 everything together and return. */
12415 if (fallback == fb_none || is_statement)
12416 {
12417 /* Since *EXPR_P has been converted into a GIMPLE tuple, clear
12418 it out for GC to reclaim it. */
12419 *expr_p = NULL_TREE;
12420
12421 if (!gimple_seq_empty_p (internal_pre)
12422 || !gimple_seq_empty_p (internal_post))
12423 {
12424 gimplify_seq_add_seq (&internal_pre, internal_post);
12425 gimplify_seq_add_seq (pre_p, internal_pre);
12426 }
12427
12428 /* The result of gimplifying *EXPR_P is going to be the last few
12429 statements in *PRE_P and *POST_P. Add location information
12430 to all the statements that were added by the gimplification
12431 helpers. */
12432 if (!gimple_seq_empty_p (*pre_p))
12433 annotate_all_with_location_after (*pre_p, pre_last_gsi, input_location);
12434
12435 if (!gimple_seq_empty_p (*post_p))
12436 annotate_all_with_location_after (*post_p, post_last_gsi,
12437 input_location);
12438
12439 goto out;
12440 }
12441
12442 #ifdef ENABLE_GIMPLE_CHECKING
12443 if (*expr_p)
12444 {
12445 enum tree_code code = TREE_CODE (*expr_p);
12446 /* These expressions should already be in gimple IR form. */
12447 gcc_assert (code != MODIFY_EXPR
12448 && code != ASM_EXPR
12449 && code != BIND_EXPR
12450 && code != CATCH_EXPR
12451 && (code != COND_EXPR || gimplify_ctxp->allow_rhs_cond_expr)
12452 && code != EH_FILTER_EXPR
12453 && code != GOTO_EXPR
12454 && code != LABEL_EXPR
12455 && code != LOOP_EXPR
12456 && code != SWITCH_EXPR
12457 && code != TRY_FINALLY_EXPR
12458 && code != OACC_PARALLEL
12459 && code != OACC_KERNELS
12460 && code != OACC_DATA
12461 && code != OACC_HOST_DATA
12462 && code != OACC_DECLARE
12463 && code != OACC_UPDATE
12464 && code != OACC_ENTER_DATA
12465 && code != OACC_EXIT_DATA
12466 && code != OACC_CACHE
12467 && code != OMP_CRITICAL
12468 && code != OMP_FOR
12469 && code != OACC_LOOP
12470 && code != OMP_MASTER
12471 && code != OMP_TASKGROUP
12472 && code != OMP_ORDERED
12473 && code != OMP_PARALLEL
12474 && code != OMP_SECTIONS
12475 && code != OMP_SECTION
12476 && code != OMP_SINGLE);
12477 }
12478 #endif
12479
12480 /* Otherwise we're gimplifying a subexpression, so the resulting
12481 value is interesting. If it's a valid operand that matches
12482 GIMPLE_TEST_F, we're done. Unless we are handling some
12483 post-effects internally; if that's the case, we need to copy into
12484 a temporary before adding the post-effects to POST_P. */
12485 if (gimple_seq_empty_p (internal_post) && (*gimple_test_f) (*expr_p))
12486 goto out;
12487
12488 /* Otherwise, we need to create a new temporary for the gimplified
12489 expression. */
12490
12491 /* We can't return an lvalue if we have an internal postqueue. The
12492 object the lvalue refers to would (probably) be modified by the
12493 postqueue; we need to copy the value out first, which means an
12494 rvalue. */
12495 if ((fallback & fb_lvalue)
12496 && gimple_seq_empty_p (internal_post)
12497 && is_gimple_addressable (*expr_p))
12498 {
12499 /* An lvalue will do. Take the address of the expression, store it
12500 in a temporary, and replace the expression with an INDIRECT_REF of
12501 that temporary. */
12502 tmp = build_fold_addr_expr_loc (input_location, *expr_p);
12503 gimplify_expr (&tmp, pre_p, post_p, is_gimple_reg, fb_rvalue);
12504 *expr_p = build_simple_mem_ref (tmp);
12505 }
12506 else if ((fallback & fb_rvalue) && is_gimple_reg_rhs_or_call (*expr_p))
12507 {
12508 /* An rvalue will do. Assign the gimplified expression into a
12509 new temporary TMP and replace the original expression with
12510 TMP. First, make sure that the expression has a type so that
12511 it can be assigned into a temporary. */
12512 gcc_assert (!VOID_TYPE_P (TREE_TYPE (*expr_p)));
12513 *expr_p = get_formal_tmp_var (*expr_p, pre_p);
12514 }
12515 else
12516 {
12517 #ifdef ENABLE_GIMPLE_CHECKING
12518 if (!(fallback & fb_mayfail))
12519 {
12520 fprintf (stderr, "gimplification failed:\n");
12521 print_generic_expr (stderr, *expr_p);
12522 debug_tree (*expr_p);
12523 internal_error ("gimplification failed");
12524 }
12525 #endif
12526 gcc_assert (fallback & fb_mayfail);
12527
12528 /* If this is an asm statement, and the user asked for the
12529 impossible, don't die. Fail and let gimplify_asm_expr
12530 issue an error. */
12531 ret = GS_ERROR;
12532 goto out;
12533 }
12534
12535 /* Make sure the temporary matches our predicate. */
12536 gcc_assert ((*gimple_test_f) (*expr_p));
12537
12538 if (!gimple_seq_empty_p (internal_post))
12539 {
12540 annotate_all_with_location (internal_post, input_location);
12541 gimplify_seq_add_seq (pre_p, internal_post);
12542 }
12543
12544 out:
12545 input_location = saved_location;
12546 return ret;
12547 }
12548
12549 /* Like gimplify_expr but make sure the gimplified result is not itself
12550 a SSA name (but a decl if it were). Temporaries required by
12551 evaluating *EXPR_P may be still SSA names. */
12552
12553 static enum gimplify_status
gimplify_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p,bool (* gimple_test_f)(tree),fallback_t fallback,bool allow_ssa)12554 gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
12555 bool (*gimple_test_f) (tree), fallback_t fallback,
12556 bool allow_ssa)
12557 {
12558 bool was_ssa_name_p = TREE_CODE (*expr_p) == SSA_NAME;
12559 enum gimplify_status ret = gimplify_expr (expr_p, pre_p, post_p,
12560 gimple_test_f, fallback);
12561 if (! allow_ssa
12562 && TREE_CODE (*expr_p) == SSA_NAME)
12563 {
12564 tree name = *expr_p;
12565 if (was_ssa_name_p)
12566 *expr_p = get_initialized_tmp_var (*expr_p, pre_p, NULL, false);
12567 else
12568 {
12569 /* Avoid the extra copy if possible. */
12570 *expr_p = create_tmp_reg (TREE_TYPE (name));
12571 if (!gimple_nop_p (SSA_NAME_DEF_STMT (name)))
12572 gimple_set_lhs (SSA_NAME_DEF_STMT (name), *expr_p);
12573 release_ssa_name (name);
12574 }
12575 }
12576 return ret;
12577 }
12578
12579 /* Look through TYPE for variable-sized objects and gimplify each such
12580 size that we find. Add to LIST_P any statements generated. */
12581
12582 void
gimplify_type_sizes(tree type,gimple_seq * list_p)12583 gimplify_type_sizes (tree type, gimple_seq *list_p)
12584 {
12585 tree field, t;
12586
12587 if (type == NULL || type == error_mark_node)
12588 return;
12589
12590 /* We first do the main variant, then copy into any other variants. */
12591 type = TYPE_MAIN_VARIANT (type);
12592
12593 /* Avoid infinite recursion. */
12594 if (TYPE_SIZES_GIMPLIFIED (type))
12595 return;
12596
12597 TYPE_SIZES_GIMPLIFIED (type) = 1;
12598
12599 switch (TREE_CODE (type))
12600 {
12601 case INTEGER_TYPE:
12602 case ENUMERAL_TYPE:
12603 case BOOLEAN_TYPE:
12604 case REAL_TYPE:
12605 case FIXED_POINT_TYPE:
12606 gimplify_one_sizepos (&TYPE_MIN_VALUE (type), list_p);
12607 gimplify_one_sizepos (&TYPE_MAX_VALUE (type), list_p);
12608
12609 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12610 {
12611 TYPE_MIN_VALUE (t) = TYPE_MIN_VALUE (type);
12612 TYPE_MAX_VALUE (t) = TYPE_MAX_VALUE (type);
12613 }
12614 break;
12615
12616 case ARRAY_TYPE:
12617 /* These types may not have declarations, so handle them here. */
12618 gimplify_type_sizes (TREE_TYPE (type), list_p);
12619 gimplify_type_sizes (TYPE_DOMAIN (type), list_p);
12620 /* Ensure VLA bounds aren't removed, for -O0 they should be variables
12621 with assigned stack slots, for -O1+ -g they should be tracked
12622 by VTA. */
12623 if (!(TYPE_NAME (type)
12624 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
12625 && DECL_IGNORED_P (TYPE_NAME (type)))
12626 && TYPE_DOMAIN (type)
12627 && INTEGRAL_TYPE_P (TYPE_DOMAIN (type)))
12628 {
12629 t = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
12630 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12631 DECL_IGNORED_P (t) = 0;
12632 t = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
12633 if (t && VAR_P (t) && DECL_ARTIFICIAL (t))
12634 DECL_IGNORED_P (t) = 0;
12635 }
12636 break;
12637
12638 case RECORD_TYPE:
12639 case UNION_TYPE:
12640 case QUAL_UNION_TYPE:
12641 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
12642 if (TREE_CODE (field) == FIELD_DECL)
12643 {
12644 gimplify_one_sizepos (&DECL_FIELD_OFFSET (field), list_p);
12645 gimplify_one_sizepos (&DECL_SIZE (field), list_p);
12646 gimplify_one_sizepos (&DECL_SIZE_UNIT (field), list_p);
12647 gimplify_type_sizes (TREE_TYPE (field), list_p);
12648 }
12649 break;
12650
12651 case POINTER_TYPE:
12652 case REFERENCE_TYPE:
12653 /* We used to recurse on the pointed-to type here, which turned out to
12654 be incorrect because its definition might refer to variables not
12655 yet initialized at this point if a forward declaration is involved.
12656
12657 It was actually useful for anonymous pointed-to types to ensure
12658 that the sizes evaluation dominates every possible later use of the
12659 values. Restricting to such types here would be safe since there
12660 is no possible forward declaration around, but would introduce an
12661 undesirable middle-end semantic to anonymity. We then defer to
12662 front-ends the responsibility of ensuring that the sizes are
12663 evaluated both early and late enough, e.g. by attaching artificial
12664 type declarations to the tree. */
12665 break;
12666
12667 default:
12668 break;
12669 }
12670
12671 gimplify_one_sizepos (&TYPE_SIZE (type), list_p);
12672 gimplify_one_sizepos (&TYPE_SIZE_UNIT (type), list_p);
12673
12674 for (t = TYPE_NEXT_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
12675 {
12676 TYPE_SIZE (t) = TYPE_SIZE (type);
12677 TYPE_SIZE_UNIT (t) = TYPE_SIZE_UNIT (type);
12678 TYPE_SIZES_GIMPLIFIED (t) = 1;
12679 }
12680 }
12681
12682 /* A subroutine of gimplify_type_sizes to make sure that *EXPR_P,
12683 a size or position, has had all of its SAVE_EXPRs evaluated.
12684 We add any required statements to *STMT_P. */
12685
12686 void
gimplify_one_sizepos(tree * expr_p,gimple_seq * stmt_p)12687 gimplify_one_sizepos (tree *expr_p, gimple_seq *stmt_p)
12688 {
12689 tree expr = *expr_p;
12690
12691 /* We don't do anything if the value isn't there, is constant, or contains
12692 A PLACEHOLDER_EXPR. We also don't want to do anything if it's already
12693 a VAR_DECL. If it's a VAR_DECL from another function, the gimplifier
12694 will want to replace it with a new variable, but that will cause problems
12695 if this type is from outside the function. It's OK to have that here. */
12696 if (expr == NULL_TREE
12697 || is_gimple_constant (expr)
12698 || TREE_CODE (expr) == VAR_DECL
12699 || CONTAINS_PLACEHOLDER_P (expr))
12700 return;
12701
12702 *expr_p = unshare_expr (expr);
12703
12704 /* SSA names in decl/type fields are a bad idea - they'll get reclaimed
12705 if the def vanishes. */
12706 gimplify_expr (expr_p, stmt_p, NULL, is_gimple_val, fb_rvalue, false);
12707
12708 /* If expr wasn't already is_gimple_sizepos or is_gimple_constant from the
12709 FE, ensure that it is a VAR_DECL, otherwise we might handle some decls
12710 as gimplify_vla_decl even when they would have all sizes INTEGER_CSTs. */
12711 if (is_gimple_constant (*expr_p))
12712 *expr_p = get_initialized_tmp_var (*expr_p, stmt_p, NULL, false);
12713 }
12714
12715 /* Gimplify the body of statements of FNDECL and return a GIMPLE_BIND node
12716 containing the sequence of corresponding GIMPLE statements. If DO_PARMS
12717 is true, also gimplify the parameters. */
12718
12719 gbind *
gimplify_body(tree fndecl,bool do_parms)12720 gimplify_body (tree fndecl, bool do_parms)
12721 {
12722 location_t saved_location = input_location;
12723 gimple_seq parm_stmts, parm_cleanup = NULL, seq;
12724 gimple *outer_stmt;
12725 gbind *outer_bind;
12726 struct cgraph_node *cgn;
12727
12728 timevar_push (TV_TREE_GIMPLIFY);
12729
12730 init_tree_ssa (cfun);
12731
12732 /* Initialize for optimize_insn_for_s{ize,peed}_p possibly called during
12733 gimplification. */
12734 default_rtl_profile ();
12735
12736 gcc_assert (gimplify_ctxp == NULL);
12737 push_gimplify_context (true);
12738
12739 if (flag_openacc || flag_openmp)
12740 {
12741 gcc_assert (gimplify_omp_ctxp == NULL);
12742 if (lookup_attribute ("omp declare target", DECL_ATTRIBUTES (fndecl)))
12743 gimplify_omp_ctxp = new_omp_context (ORT_TARGET);
12744 }
12745
12746 /* Unshare most shared trees in the body and in that of any nested functions.
12747 It would seem we don't have to do this for nested functions because
12748 they are supposed to be output and then the outer function gimplified
12749 first, but the g++ front end doesn't always do it that way. */
12750 unshare_body (fndecl);
12751 unvisit_body (fndecl);
12752
12753 cgn = cgraph_node::get (fndecl);
12754 if (cgn && cgn->origin)
12755 nonlocal_vlas = new hash_set<tree>;
12756
12757 /* Make sure input_location isn't set to something weird. */
12758 input_location = DECL_SOURCE_LOCATION (fndecl);
12759
12760 /* Resolve callee-copies. This has to be done before processing
12761 the body so that DECL_VALUE_EXPR gets processed correctly. */
12762 parm_stmts = do_parms ? gimplify_parameters (&parm_cleanup) : NULL;
12763
12764 /* Gimplify the function's body. */
12765 seq = NULL;
12766 gimplify_stmt (&DECL_SAVED_TREE (fndecl), &seq);
12767 outer_stmt = gimple_seq_first_stmt (seq);
12768 if (!outer_stmt)
12769 {
12770 outer_stmt = gimple_build_nop ();
12771 gimplify_seq_add_stmt (&seq, outer_stmt);
12772 }
12773
12774 /* The body must contain exactly one statement, a GIMPLE_BIND. If this is
12775 not the case, wrap everything in a GIMPLE_BIND to make it so. */
12776 if (gimple_code (outer_stmt) == GIMPLE_BIND
12777 && gimple_seq_first (seq) == gimple_seq_last (seq))
12778 outer_bind = as_a <gbind *> (outer_stmt);
12779 else
12780 outer_bind = gimple_build_bind (NULL_TREE, seq, NULL);
12781
12782 DECL_SAVED_TREE (fndecl) = NULL_TREE;
12783
12784 /* If we had callee-copies statements, insert them at the beginning
12785 of the function and clear DECL_VALUE_EXPR_P on the parameters. */
12786 if (!gimple_seq_empty_p (parm_stmts))
12787 {
12788 tree parm;
12789
12790 gimplify_seq_add_seq (&parm_stmts, gimple_bind_body (outer_bind));
12791 if (parm_cleanup)
12792 {
12793 gtry *g = gimple_build_try (parm_stmts, parm_cleanup,
12794 GIMPLE_TRY_FINALLY);
12795 parm_stmts = NULL;
12796 gimple_seq_add_stmt (&parm_stmts, g);
12797 }
12798 gimple_bind_set_body (outer_bind, parm_stmts);
12799
12800 for (parm = DECL_ARGUMENTS (current_function_decl);
12801 parm; parm = DECL_CHAIN (parm))
12802 if (DECL_HAS_VALUE_EXPR_P (parm))
12803 {
12804 DECL_HAS_VALUE_EXPR_P (parm) = 0;
12805 DECL_IGNORED_P (parm) = 0;
12806 }
12807 }
12808
12809 if (nonlocal_vlas)
12810 {
12811 if (nonlocal_vla_vars)
12812 {
12813 /* tree-nested.c may later on call declare_vars (..., true);
12814 which relies on BLOCK_VARS chain to be the tail of the
12815 gimple_bind_vars chain. Ensure we don't violate that
12816 assumption. */
12817 if (gimple_bind_block (outer_bind)
12818 == DECL_INITIAL (current_function_decl))
12819 declare_vars (nonlocal_vla_vars, outer_bind, true);
12820 else
12821 BLOCK_VARS (DECL_INITIAL (current_function_decl))
12822 = chainon (BLOCK_VARS (DECL_INITIAL (current_function_decl)),
12823 nonlocal_vla_vars);
12824 nonlocal_vla_vars = NULL_TREE;
12825 }
12826 delete nonlocal_vlas;
12827 nonlocal_vlas = NULL;
12828 }
12829
12830 if ((flag_openacc || flag_openmp || flag_openmp_simd)
12831 && gimplify_omp_ctxp)
12832 {
12833 delete_omp_context (gimplify_omp_ctxp);
12834 gimplify_omp_ctxp = NULL;
12835 }
12836
12837 pop_gimplify_context (outer_bind);
12838 gcc_assert (gimplify_ctxp == NULL);
12839
12840 if (flag_checking && !seen_error ())
12841 verify_gimple_in_seq (gimple_bind_body (outer_bind));
12842
12843 timevar_pop (TV_TREE_GIMPLIFY);
12844 input_location = saved_location;
12845
12846 return outer_bind;
12847 }
12848
12849 typedef char *char_p; /* For DEF_VEC_P. */
12850
12851 /* Return whether we should exclude FNDECL from instrumentation. */
12852
12853 static bool
flag_instrument_functions_exclude_p(tree fndecl)12854 flag_instrument_functions_exclude_p (tree fndecl)
12855 {
12856 vec<char_p> *v;
12857
12858 v = (vec<char_p> *) flag_instrument_functions_exclude_functions;
12859 if (v && v->length () > 0)
12860 {
12861 const char *name;
12862 int i;
12863 char *s;
12864
12865 name = lang_hooks.decl_printable_name (fndecl, 0);
12866 FOR_EACH_VEC_ELT (*v, i, s)
12867 if (strstr (name, s) != NULL)
12868 return true;
12869 }
12870
12871 v = (vec<char_p> *) flag_instrument_functions_exclude_files;
12872 if (v && v->length () > 0)
12873 {
12874 const char *name;
12875 int i;
12876 char *s;
12877
12878 name = DECL_SOURCE_FILE (fndecl);
12879 FOR_EACH_VEC_ELT (*v, i, s)
12880 if (strstr (name, s) != NULL)
12881 return true;
12882 }
12883
12884 return false;
12885 }
12886
12887 /* Entry point to the gimplification pass. FNDECL is the FUNCTION_DECL
12888 node for the function we want to gimplify.
12889
12890 Return the sequence of GIMPLE statements corresponding to the body
12891 of FNDECL. */
12892
12893 void
gimplify_function_tree(tree fndecl)12894 gimplify_function_tree (tree fndecl)
12895 {
12896 tree parm, ret;
12897 gimple_seq seq;
12898 gbind *bind;
12899
12900 gcc_assert (!gimple_body (fndecl));
12901
12902 if (DECL_STRUCT_FUNCTION (fndecl))
12903 push_cfun (DECL_STRUCT_FUNCTION (fndecl));
12904 else
12905 push_struct_function (fndecl);
12906
12907 /* Tentatively set PROP_gimple_lva here, and reset it in gimplify_va_arg_expr
12908 if necessary. */
12909 cfun->curr_properties |= PROP_gimple_lva;
12910
12911 for (parm = DECL_ARGUMENTS (fndecl); parm ; parm = DECL_CHAIN (parm))
12912 {
12913 /* Preliminarily mark non-addressed complex variables as eligible
12914 for promotion to gimple registers. We'll transform their uses
12915 as we find them. */
12916 if ((TREE_CODE (TREE_TYPE (parm)) == COMPLEX_TYPE
12917 || TREE_CODE (TREE_TYPE (parm)) == VECTOR_TYPE)
12918 && !TREE_THIS_VOLATILE (parm)
12919 && !needs_to_live_in_memory (parm))
12920 DECL_GIMPLE_REG_P (parm) = 1;
12921 }
12922
12923 ret = DECL_RESULT (fndecl);
12924 if ((TREE_CODE (TREE_TYPE (ret)) == COMPLEX_TYPE
12925 || TREE_CODE (TREE_TYPE (ret)) == VECTOR_TYPE)
12926 && !needs_to_live_in_memory (ret))
12927 DECL_GIMPLE_REG_P (ret) = 1;
12928
12929 if (asan_sanitize_use_after_scope () && sanitize_flags_p (SANITIZE_ADDRESS))
12930 asan_poisoned_variables = new hash_set<tree> ();
12931 bind = gimplify_body (fndecl, true);
12932 if (asan_poisoned_variables)
12933 {
12934 delete asan_poisoned_variables;
12935 asan_poisoned_variables = NULL;
12936 }
12937
12938 /* The tree body of the function is no longer needed, replace it
12939 with the new GIMPLE body. */
12940 seq = NULL;
12941 gimple_seq_add_stmt (&seq, bind);
12942 gimple_set_body (fndecl, seq);
12943
12944 /* If we're instrumenting function entry/exit, then prepend the call to
12945 the entry hook and wrap the whole function in a TRY_FINALLY_EXPR to
12946 catch the exit hook. */
12947 /* ??? Add some way to ignore exceptions for this TFE. */
12948 if (flag_instrument_function_entry_exit
12949 && !DECL_NO_INSTRUMENT_FUNCTION_ENTRY_EXIT (fndecl)
12950 /* Do not instrument extern inline functions. */
12951 && !(DECL_DECLARED_INLINE_P (fndecl)
12952 && DECL_EXTERNAL (fndecl)
12953 && DECL_DISREGARD_INLINE_LIMITS (fndecl))
12954 && !flag_instrument_functions_exclude_p (fndecl))
12955 {
12956 tree x;
12957 gbind *new_bind;
12958 gimple *tf;
12959 gimple_seq cleanup = NULL, body = NULL;
12960 tree tmp_var;
12961 gcall *call;
12962
12963 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12964 call = gimple_build_call (x, 1, integer_zero_node);
12965 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12966 gimple_call_set_lhs (call, tmp_var);
12967 gimplify_seq_add_stmt (&cleanup, call);
12968 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_EXIT);
12969 call = gimple_build_call (x, 2,
12970 build_fold_addr_expr (current_function_decl),
12971 tmp_var);
12972 gimplify_seq_add_stmt (&cleanup, call);
12973 tf = gimple_build_try (seq, cleanup, GIMPLE_TRY_FINALLY);
12974
12975 x = builtin_decl_implicit (BUILT_IN_RETURN_ADDRESS);
12976 call = gimple_build_call (x, 1, integer_zero_node);
12977 tmp_var = create_tmp_var (ptr_type_node, "return_addr");
12978 gimple_call_set_lhs (call, tmp_var);
12979 gimplify_seq_add_stmt (&body, call);
12980 x = builtin_decl_implicit (BUILT_IN_PROFILE_FUNC_ENTER);
12981 call = gimple_build_call (x, 2,
12982 build_fold_addr_expr (current_function_decl),
12983 tmp_var);
12984 gimplify_seq_add_stmt (&body, call);
12985 gimplify_seq_add_stmt (&body, tf);
12986 new_bind = gimple_build_bind (NULL, body, NULL);
12987
12988 /* Replace the current function body with the body
12989 wrapped in the try/finally TF. */
12990 seq = NULL;
12991 gimple_seq_add_stmt (&seq, new_bind);
12992 gimple_set_body (fndecl, seq);
12993 bind = new_bind;
12994 }
12995
12996 if (sanitize_flags_p (SANITIZE_THREAD))
12997 {
12998 gcall *call = gimple_build_call_internal (IFN_TSAN_FUNC_EXIT, 0);
12999 gimple *tf = gimple_build_try (seq, call, GIMPLE_TRY_FINALLY);
13000 gbind *new_bind = gimple_build_bind (NULL, tf, NULL);
13001 /* Replace the current function body with the body
13002 wrapped in the try/finally TF. */
13003 seq = NULL;
13004 gimple_seq_add_stmt (&seq, new_bind);
13005 gimple_set_body (fndecl, seq);
13006 }
13007
13008 DECL_SAVED_TREE (fndecl) = NULL_TREE;
13009 cfun->curr_properties |= PROP_gimple_any;
13010
13011 pop_cfun ();
13012
13013 dump_function (TDI_gimple, fndecl);
13014 }
13015
13016 /* Return a dummy expression of type TYPE in order to keep going after an
13017 error. */
13018
13019 static tree
dummy_object(tree type)13020 dummy_object (tree type)
13021 {
13022 tree t = build_int_cst (build_pointer_type (type), 0);
13023 return build2 (MEM_REF, type, t, t);
13024 }
13025
13026 /* Gimplify __builtin_va_arg, aka VA_ARG_EXPR, which is not really a
13027 builtin function, but a very special sort of operator. */
13028
13029 enum gimplify_status
gimplify_va_arg_expr(tree * expr_p,gimple_seq * pre_p,gimple_seq * post_p ATTRIBUTE_UNUSED)13030 gimplify_va_arg_expr (tree *expr_p, gimple_seq *pre_p,
13031 gimple_seq *post_p ATTRIBUTE_UNUSED)
13032 {
13033 tree promoted_type, have_va_type;
13034 tree valist = TREE_OPERAND (*expr_p, 0);
13035 tree type = TREE_TYPE (*expr_p);
13036 tree t, tag, aptag;
13037 location_t loc = EXPR_LOCATION (*expr_p);
13038
13039 /* Verify that valist is of the proper type. */
13040 have_va_type = TREE_TYPE (valist);
13041 if (have_va_type == error_mark_node)
13042 return GS_ERROR;
13043 have_va_type = targetm.canonical_va_list_type (have_va_type);
13044 if (have_va_type == NULL_TREE
13045 && POINTER_TYPE_P (TREE_TYPE (valist)))
13046 /* Handle 'Case 1: Not an array type' from c-common.c/build_va_arg. */
13047 have_va_type
13048 = targetm.canonical_va_list_type (TREE_TYPE (TREE_TYPE (valist)));
13049 gcc_assert (have_va_type != NULL_TREE);
13050
13051 /* Generate a diagnostic for requesting data of a type that cannot
13052 be passed through `...' due to type promotion at the call site. */
13053 if ((promoted_type = lang_hooks.types.type_promotes_to (type))
13054 != type)
13055 {
13056 static bool gave_help;
13057 bool warned;
13058 /* Use the expansion point to handle cases such as passing bool (defined
13059 in a system header) through `...'. */
13060 source_location xloc
13061 = expansion_point_location_if_in_system_header (loc);
13062
13063 /* Unfortunately, this is merely undefined, rather than a constraint
13064 violation, so we cannot make this an error. If this call is never
13065 executed, the program is still strictly conforming. */
13066 warned = warning_at (xloc, 0,
13067 "%qT is promoted to %qT when passed through %<...%>",
13068 type, promoted_type);
13069 if (!gave_help && warned)
13070 {
13071 gave_help = true;
13072 inform (xloc, "(so you should pass %qT not %qT to %<va_arg%>)",
13073 promoted_type, type);
13074 }
13075
13076 /* We can, however, treat "undefined" any way we please.
13077 Call abort to encourage the user to fix the program. */
13078 if (warned)
13079 inform (xloc, "if this code is reached, the program will abort");
13080 /* Before the abort, allow the evaluation of the va_list
13081 expression to exit or longjmp. */
13082 gimplify_and_add (valist, pre_p);
13083 t = build_call_expr_loc (loc,
13084 builtin_decl_implicit (BUILT_IN_TRAP), 0);
13085 gimplify_and_add (t, pre_p);
13086
13087 /* This is dead code, but go ahead and finish so that the
13088 mode of the result comes out right. */
13089 *expr_p = dummy_object (type);
13090 return GS_ALL_DONE;
13091 }
13092
13093 tag = build_int_cst (build_pointer_type (type), 0);
13094 aptag = build_int_cst (TREE_TYPE (valist), 0);
13095
13096 *expr_p = build_call_expr_internal_loc (loc, IFN_VA_ARG, type, 3,
13097 valist, tag, aptag);
13098
13099 /* Clear the tentatively set PROP_gimple_lva, to indicate that IFN_VA_ARG
13100 needs to be expanded. */
13101 cfun->curr_properties &= ~PROP_gimple_lva;
13102
13103 return GS_OK;
13104 }
13105
13106 /* Build a new GIMPLE_ASSIGN tuple and append it to the end of *SEQ_P.
13107
13108 DST/SRC are the destination and source respectively. You can pass
13109 ungimplified trees in DST or SRC, in which case they will be
13110 converted to a gimple operand if necessary.
13111
13112 This function returns the newly created GIMPLE_ASSIGN tuple. */
13113
13114 gimple *
gimplify_assign(tree dst,tree src,gimple_seq * seq_p)13115 gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
13116 {
13117 tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
13118 gimplify_and_add (t, seq_p);
13119 ggc_free (t);
13120 return gimple_seq_last_stmt (*seq_p);
13121 }
13122
13123 inline hashval_t
hash(const elt_t * p)13124 gimplify_hasher::hash (const elt_t *p)
13125 {
13126 tree t = p->val;
13127 return iterative_hash_expr (t, 0);
13128 }
13129
13130 inline bool
equal(const elt_t * p1,const elt_t * p2)13131 gimplify_hasher::equal (const elt_t *p1, const elt_t *p2)
13132 {
13133 tree t1 = p1->val;
13134 tree t2 = p2->val;
13135 enum tree_code code = TREE_CODE (t1);
13136
13137 if (TREE_CODE (t2) != code
13138 || TREE_TYPE (t1) != TREE_TYPE (t2))
13139 return false;
13140
13141 if (!operand_equal_p (t1, t2, 0))
13142 return false;
13143
13144 /* Only allow them to compare equal if they also hash equal; otherwise
13145 results are nondeterminate, and we fail bootstrap comparison. */
13146 gcc_checking_assert (hash (p1) == hash (p2));
13147
13148 return true;
13149 }
13150